1 // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use core::ptr::RawPtr;
13 // FIXME: #13996: mark the `allocate` and `reallocate` return value as `noalias`
15 /// Return a pointer to `size` bytes of memory aligned to `align`.
17 /// On failure, return a null pointer.
19 /// Behavior is undefined if the requested size is 0 or the alignment is not a
20 /// power of 2. The alignment must be no larger than the largest supported page
21 /// size on the platform.
23 pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
24 imp::allocate(size, align)
27 /// Resize the allocation referenced by `ptr` to `size` bytes.
29 /// On failure, return a null pointer and leave the original allocation intact.
31 /// Behavior is undefined if the requested size is 0 or the alignment is not a
32 /// power of 2. The alignment must be no larger than the largest supported page
33 /// size on the platform.
35 /// The `old_size` and `align` parameters are the parameters that were used to
36 /// create the allocation referenced by `ptr`. The `old_size` parameter may be
37 /// any value in range_inclusive(requested_size, usable_size).
39 pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 {
40 imp::reallocate(ptr, old_size, size, align)
43 /// Resize the allocation referenced by `ptr` to `size` bytes.
45 /// If the operation succeeds, it returns `usable_size(size, align)` and if it
46 /// fails (or is a no-op) it returns `usable_size(old_size, align)`.
48 /// Behavior is undefined if the requested size is 0 or the alignment is not a
49 /// power of 2. The alignment must be no larger than the largest supported page
50 /// size on the platform.
52 /// The `old_size` and `align` parameters are the parameters that were used to
53 /// create the allocation referenced by `ptr`. The `old_size` parameter may be
54 /// any value in range_inclusive(requested_size, usable_size).
56 pub unsafe fn reallocate_inplace(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> uint {
57 imp::reallocate_inplace(ptr, old_size, size, align)
60 /// Deallocates the memory referenced by `ptr`.
62 /// The `ptr` parameter must not be null.
64 /// The `old_size` and `align` parameters are the parameters that were used to
65 /// create the allocation referenced by `ptr`. The `old_size` parameter may be
66 /// any value in range_inclusive(requested_size, usable_size).
68 pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) {
69 imp::deallocate(ptr, old_size, align)
72 /// Returns the usable size of an allocation created with the specified the
73 /// `size` and `align`.
75 pub fn usable_size(size: uint, align: uint) -> uint {
76 imp::usable_size(size, align)
79 /// Prints implementation-defined allocator statistics.
81 /// These statistics may be inconsistent if other threads use the allocator
84 pub fn stats_print() {
88 /// An arbitrary non-null address to represent zero-size allocations.
90 /// This preserves the non-null invariant for types like `Box<T>`. The address may overlap with
91 /// non-zero-size memory allocations.
92 pub const EMPTY: *mut () = 0x1 as *mut ();
94 /// The allocator for unique pointers.
96 #[lang="exchange_malloc"]
98 unsafe fn exchange_malloc(size: uint, align: uint) -> *mut u8 {
102 let ptr = allocate(size, align);
103 if ptr.is_null() { ::oom() }
109 #[lang="exchange_free"]
111 unsafe fn exchange_free(ptr: *mut u8, old_size: uint, align: uint) {
112 deallocate(ptr, old_size, align);
115 // The minimum alignment guaranteed by the architecture. This value is used to
116 // add fast paths for low alignment values. In practice, the alignment is a
117 // constant at the call site and the branch will be optimized out.
118 #[cfg(any(target_arch = "arm",
119 target_arch = "mips",
120 target_arch = "mipsel"))]
121 const MIN_ALIGN: uint = 8;
122 #[cfg(any(target_arch = "x86",
123 target_arch = "x86_64"))]
124 const MIN_ALIGN: uint = 16;
128 use core::option::{None, Option};
129 use core::ptr::{null_mut, null};
131 use libc::{c_char, c_int, c_void, size_t};
132 use super::MIN_ALIGN;
134 #[link(name = "jemalloc", kind = "static")]
139 fn je_mallocx(size: size_t, flags: c_int) -> *mut c_void;
140 fn je_rallocx(ptr: *mut c_void, size: size_t, flags: c_int) -> *mut c_void;
141 fn je_xallocx(ptr: *mut c_void, size: size_t, extra: size_t, flags: c_int) -> size_t;
142 fn je_sdallocx(ptr: *mut c_void, size: size_t, flags: c_int);
143 fn je_nallocx(size: size_t, flags: c_int) -> size_t;
144 fn je_malloc_stats_print(write_cb: Option<extern "C" fn(cbopaque: *mut c_void,
146 cbopaque: *mut c_void,
147 opts: *const c_char);
150 // -lpthread needs to occur after -ljemalloc, the earlier argument isn't enough
151 #[cfg(all(not(windows), not(target_os = "android")))]
152 #[link(name = "pthread")]
155 // MALLOCX_ALIGN(a) macro
157 fn mallocx_align(a: uint) -> c_int { a.trailing_zeros() as c_int }
160 fn align_to_flags(align: uint) -> c_int {
161 if align <= MIN_ALIGN { 0 } else { mallocx_align(align) }
165 pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
166 let flags = align_to_flags(align);
167 je_mallocx(size as size_t, flags) as *mut u8
171 pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 {
172 let flags = align_to_flags(align);
173 je_rallocx(ptr as *mut c_void, size as size_t, flags) as *mut u8
177 pub unsafe fn reallocate_inplace(ptr: *mut u8, _old_size: uint, size: uint,
178 align: uint) -> uint {
179 let flags = align_to_flags(align);
180 je_xallocx(ptr as *mut c_void, size as size_t, 0, flags) as uint
184 pub unsafe fn deallocate(ptr: *mut u8, old_size: uint, align: uint) {
185 let flags = align_to_flags(align);
186 je_sdallocx(ptr as *mut c_void, old_size as size_t, flags)
190 pub fn usable_size(size: uint, align: uint) -> uint {
191 let flags = align_to_flags(align);
192 unsafe { je_nallocx(size as size_t, flags) as uint }
195 pub fn stats_print() {
197 je_malloc_stats_print(None, null_mut(), null())
202 #[cfg(all(not(jemalloc), unix))]
207 use super::MIN_ALIGN;
210 fn posix_memalign(memptr: *mut *mut libc::c_void,
212 size: libc::size_t) -> libc::c_int;
216 pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
217 if align <= MIN_ALIGN {
218 libc::malloc(size as libc::size_t) as *mut u8
220 let mut out = 0 as *mut libc::c_void;
221 let ret = posix_memalign(&mut out,
222 align as libc::size_t,
223 size as libc::size_t);
233 pub unsafe fn reallocate(ptr: *mut u8, old_size: uint, size: uint, align: uint) -> *mut u8 {
234 if align <= MIN_ALIGN {
235 libc::realloc(ptr as *mut libc::c_void, size as libc::size_t) as *mut u8
237 let new_ptr = allocate(size, align);
238 ptr::copy_memory(new_ptr, ptr as *const u8, cmp::min(size, old_size));
239 deallocate(ptr, old_size, align);
245 pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: uint, _size: uint,
246 _align: uint) -> uint {
251 pub unsafe fn deallocate(ptr: *mut u8, _old_size: uint, _align: uint) {
252 libc::free(ptr as *mut libc::c_void)
256 pub fn usable_size(size: uint, _align: uint) -> uint {
260 pub fn stats_print() {}
263 #[cfg(all(not(jemalloc), windows))]
265 use libc::{c_void, size_t};
267 use super::MIN_ALIGN;
270 fn _aligned_malloc(size: size_t, align: size_t) -> *mut c_void;
271 fn _aligned_realloc(block: *mut c_void, size: size_t,
272 align: size_t) -> *mut c_void;
273 fn _aligned_free(ptr: *mut c_void);
277 pub unsafe fn allocate(size: uint, align: uint) -> *mut u8 {
278 if align <= MIN_ALIGN {
279 libc::malloc(size as size_t) as *mut u8
281 _aligned_malloc(size as size_t, align as size_t) as *mut u8
286 pub unsafe fn reallocate(ptr: *mut u8, _old_size: uint, size: uint, align: uint) -> *mut u8 {
287 if align <= MIN_ALIGN {
288 libc::realloc(ptr as *mut c_void, size as size_t) as *mut u8
290 _aligned_realloc(ptr as *mut c_void, size as size_t, align as size_t) as *mut u8
295 pub unsafe fn reallocate_inplace(_ptr: *mut u8, old_size: uint, _size: uint,
296 _align: uint) -> uint {
301 pub unsafe fn deallocate(ptr: *mut u8, _old_size: uint, align: uint) {
302 if align <= MIN_ALIGN {
303 libc::free(ptr as *mut libc::c_void)
305 _aligned_free(ptr as *mut c_void)
310 pub fn usable_size(size: uint, _align: uint) -> uint {
314 pub fn stats_print() {}
320 use self::test::Bencher;
321 use core::ptr::RawPtr;
325 fn basic_reallocate_inplace_noop() {
328 let ptr = heap::allocate(size, 8);
329 if ptr.is_null() { ::oom() }
330 let ret = heap::reallocate_inplace(ptr, size, size, 8);
331 heap::deallocate(ptr, size, 8);
332 assert_eq!(ret, heap::usable_size(size, 8));
337 fn alloc_owned_small(b: &mut Bencher) {