1 //! Memory allocation APIs.
3 //! In a given program, the standard library has one “global” memory allocator
4 //! that is used for example by `Box<T>` and `Vec<T>`.
6 //! Currently the default global allocator is unspecified. Libraries, however,
7 //! like `cdylib`s and `staticlib`s are guaranteed to use the [`System`] by
10 //! # The `#[global_allocator]` attribute
12 //! This attribute allows configuring the choice of global allocator.
13 //! You can use this to implement a completely custom global allocator
14 //! to route all default allocation requests to a custom object.
17 //! use std::alloc::{GlobalAlloc, System, Layout};
19 //! struct MyAllocator;
21 //! unsafe impl GlobalAlloc for MyAllocator {
22 //! unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
23 //! System.alloc(layout)
26 //! unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
27 //! System.dealloc(ptr, layout)
31 //! #[global_allocator]
32 //! static GLOBAL: MyAllocator = MyAllocator;
35 //! // This `Vec` will allocate memory through `GLOBAL` above
36 //! let mut v = Vec::new();
41 //! The attribute is used on a `static` item whose type implements the
42 //! [`GlobalAlloc`] trait. This type can be provided by an external library:
44 //! ```rust,ignore (demonstrates crates.io usage)
45 //! extern crate jemallocator;
47 //! use jemallocator::Jemalloc;
49 //! #[global_allocator]
50 //! static GLOBAL: Jemalloc = Jemalloc;
55 //! The `#[global_allocator]` can only be used once in a crate
56 //! or its recursive dependencies.
58 #![deny(unsafe_op_in_unsafe_fn)]
59 #![stable(feature = "alloc_module", since = "1.28.0")]
62 use core::ptr::NonNull;
63 use core::sync::atomic::{AtomicPtr, Ordering};
66 use crate::sys_common::util::dumb_print;
68 #[stable(feature = "alloc_module", since = "1.28.0")]
70 pub use alloc_crate::alloc::*;
72 /// The default memory allocator provided by the operating system.
74 /// This is based on `malloc` on Unix platforms and `HeapAlloc` on Windows,
75 /// plus related functions.
77 /// This type implements the `GlobalAlloc` trait and Rust programs by default
78 /// work as if they had this definition:
81 /// use std::alloc::System;
83 /// #[global_allocator]
84 /// static A: System = System;
87 /// let a = Box::new(4); // Allocates from the system allocator.
88 /// println!("{}", a);
92 /// You can also define your own wrapper around `System` if you'd like, such as
93 /// keeping track of the number of all bytes allocated:
96 /// use std::alloc::{System, GlobalAlloc, Layout};
97 /// use std::sync::atomic::{AtomicUsize, Ordering::SeqCst};
101 /// static ALLOCATED: AtomicUsize = AtomicUsize::new(0);
103 /// unsafe impl GlobalAlloc for Counter {
104 /// unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
105 /// let ret = System.alloc(layout);
106 /// if !ret.is_null() {
107 /// ALLOCATED.fetch_add(layout.size(), SeqCst);
112 /// unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {
113 /// System.dealloc(ptr, layout);
114 /// ALLOCATED.fetch_sub(layout.size(), SeqCst);
118 /// #[global_allocator]
119 /// static A: Counter = Counter;
122 /// println!("allocated bytes before main: {}", ALLOCATED.load(SeqCst));
126 /// It can also be used directly to allocate memory independently of whatever
127 /// global allocator has been selected for a Rust program. For example if a Rust
128 /// program opts in to using jemalloc as the global allocator, `System` will
129 /// still allocate memory using `malloc` and `HeapAlloc`.
130 #[stable(feature = "alloc_system_type", since = "1.28.0")]
131 #[derive(Debug, Default, Copy, Clone)]
136 fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
137 match layout.size() {
138 0 => Ok(NonNull::slice_from_raw_parts(layout.dangling(), 0)),
139 // SAFETY: `layout` is non-zero in size,
141 let raw_ptr = if zeroed {
142 GlobalAlloc::alloc_zeroed(self, layout)
144 GlobalAlloc::alloc(self, layout)
146 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
147 Ok(NonNull::slice_from_raw_parts(ptr, size))
152 // SAFETY: Same as `Allocator::grow`
160 ) -> Result<NonNull<[u8]>, AllocError> {
162 new_layout.size() >= old_layout.size(),
163 "`new_layout.size()` must be greater than or equal to `old_layout.size()`"
166 match old_layout.size() {
167 0 => self.alloc_impl(new_layout, zeroed),
169 // SAFETY: `new_size` is non-zero as `new_size` is greater than or equal to `old_size`
170 // as required by safety conditions and the `old_size == 0` case was handled in the
171 // previous match arm. Other conditions must be upheld by the caller
172 old_size if old_layout.align() == new_layout.align() => unsafe {
173 let new_size = new_layout.size();
175 // `realloc` probably checks for `new_size >= old_layout.size()` or something similar.
176 intrinsics::assume(new_size >= old_layout.size());
178 let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
179 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
181 raw_ptr.add(old_size).write_bytes(0, new_size - old_size);
183 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
186 // SAFETY: because `new_layout.size()` must be greater than or equal to `old_size`,
187 // both the old and new memory allocation are valid for reads and writes for `old_size`
188 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
189 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
190 // for `dealloc` must be upheld by the caller.
192 let new_ptr = self.alloc_impl(new_layout, zeroed)?;
193 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), old_size);
194 Allocator::deallocate(&self, ptr, old_layout);
201 // The Allocator impl checks the layout size to be non-zero and forwards to the GlobalAlloc impl,
202 // which is in `std::sys::*::alloc`.
203 #[unstable(feature = "allocator_api", issue = "32838")]
204 unsafe impl Allocator for System {
206 fn allocate(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
207 self.alloc_impl(layout, false)
211 fn allocate_zeroed(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
212 self.alloc_impl(layout, true)
216 unsafe fn deallocate(&self, ptr: NonNull<u8>, layout: Layout) {
217 if layout.size() != 0 {
218 // SAFETY: `layout` is non-zero in size,
219 // other conditions must be upheld by the caller
220 unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) }
230 ) -> Result<NonNull<[u8]>, AllocError> {
231 // SAFETY: all conditions must be upheld by the caller
232 unsafe { self.grow_impl(ptr, old_layout, new_layout, false) }
236 unsafe fn grow_zeroed(
241 ) -> Result<NonNull<[u8]>, AllocError> {
242 // SAFETY: all conditions must be upheld by the caller
243 unsafe { self.grow_impl(ptr, old_layout, new_layout, true) }
252 ) -> Result<NonNull<[u8]>, AllocError> {
254 new_layout.size() <= old_layout.size(),
255 "`new_layout.size()` must be smaller than or equal to `old_layout.size()`"
258 match new_layout.size() {
259 // SAFETY: conditions must be upheld by the caller
261 Allocator::deallocate(&self, ptr, old_layout);
262 Ok(NonNull::slice_from_raw_parts(new_layout.dangling(), 0))
265 // SAFETY: `new_size` is non-zero. Other conditions must be upheld by the caller
266 new_size if old_layout.align() == new_layout.align() => unsafe {
267 // `realloc` probably checks for `new_size <= old_layout.size()` or something similar.
268 intrinsics::assume(new_size <= old_layout.size());
270 let raw_ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), old_layout, new_size);
271 let ptr = NonNull::new(raw_ptr).ok_or(AllocError)?;
272 Ok(NonNull::slice_from_raw_parts(ptr, new_size))
275 // SAFETY: because `new_size` must be smaller than or equal to `old_layout.size()`,
276 // both the old and new memory allocation are valid for reads and writes for `new_size`
277 // bytes. Also, because the old allocation wasn't yet deallocated, it cannot overlap
278 // `new_ptr`. Thus, the call to `copy_nonoverlapping` is safe. The safety contract
279 // for `dealloc` must be upheld by the caller.
281 let new_ptr = Allocator::allocate(&self, new_layout)?;
282 ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_mut_ptr(), new_size);
283 Allocator::deallocate(&self, ptr, old_layout);
290 static HOOK: AtomicPtr<()> = AtomicPtr::new(ptr::null_mut());
292 /// Registers a custom allocation error hook, replacing any that was previously registered.
294 /// The allocation error hook is invoked when an infallible memory allocation fails, before
295 /// the runtime aborts. The default hook prints a message to standard error,
296 /// but this behavior can be customized with the [`set_alloc_error_hook`] and
297 /// [`take_alloc_error_hook`] functions.
299 /// The hook is provided with a `Layout` struct which contains information
300 /// about the allocation that failed.
302 /// The allocation error hook is a global resource.
303 #[unstable(feature = "alloc_error_hook", issue = "51245")]
304 pub fn set_alloc_error_hook(hook: fn(Layout)) {
305 HOOK.store(hook as *mut (), Ordering::SeqCst);
308 /// Unregisters the current allocation error hook, returning it.
310 /// *See also the function [`set_alloc_error_hook`].*
312 /// If no custom hook is registered, the default hook will be returned.
313 #[unstable(feature = "alloc_error_hook", issue = "51245")]
314 pub fn take_alloc_error_hook() -> fn(Layout) {
315 let hook = HOOK.swap(ptr::null_mut(), Ordering::SeqCst);
316 if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } }
319 fn default_alloc_error_hook(layout: Layout) {
320 dumb_print(format_args!("memory allocation of {} bytes failed\n", layout.size()));
325 #[alloc_error_handler]
326 #[unstable(feature = "alloc_internals", issue = "none")]
327 pub fn rust_oom(layout: Layout) -> ! {
328 let hook = HOOK.load(Ordering::SeqCst);
329 let hook: fn(Layout) =
330 if hook.is_null() { default_alloc_error_hook } else { unsafe { mem::transmute(hook) } };
332 crate::process::abort()
337 #[allow(unused_attributes)]
338 #[unstable(feature = "alloc_internals", issue = "none")]
339 pub mod __default_lib_allocator {
340 use super::{GlobalAlloc, Layout, System};
341 // These magic symbol names are used as a fallback for implementing the
342 // `__rust_alloc` etc symbols (see `src/liballoc/alloc.rs`) when there is
343 // no `#[global_allocator]` attribute.
345 // for symbol names src/librustc_ast/expand/allocator.rs
346 // for signatures src/librustc_allocator/lib.rs
348 // linkage directives are provided as part of the current compiler allocator
351 #[rustc_std_internal_symbol]
352 pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 {
353 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
354 // `GlobalAlloc::alloc`.
356 let layout = Layout::from_size_align_unchecked(size, align);
361 #[rustc_std_internal_symbol]
362 pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) {
363 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
364 // `GlobalAlloc::dealloc`.
365 unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) }
368 #[rustc_std_internal_symbol]
369 pub unsafe extern "C" fn __rdl_realloc(
375 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
376 // `GlobalAlloc::realloc`.
378 let old_layout = Layout::from_size_align_unchecked(old_size, align);
379 System.realloc(ptr, old_layout, new_size)
383 #[rustc_std_internal_symbol]
384 pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
385 // SAFETY: see the guarantees expected by `Layout::from_size_align` and
386 // `GlobalAlloc::alloc_zeroed`.
388 let layout = Layout::from_size_align_unchecked(size, align);
389 System.alloc_zeroed(layout)