use crate::cmp;
use crate::fmt;
use crate::mem;
-use crate::usize;
-use crate::ptr::{self, NonNull};
use crate::num::NonZeroUsize;
+use crate::ptr::{self, NonNull};
+use crate::usize;
/// Represents the combination of a starting address and
/// a total capacity of the returned block.
#[derive(Debug)]
pub struct Excess(pub NonNull<u8>, pub usize);
-fn size_align<T>() -> (usize, usize) {
+const fn size_align<T>() -> (usize, usize) {
(mem::size_of::<T>(), mem::align_of::<T>())
}
/// must not overflow (i.e., the rounded value must be less than
/// `usize::MAX`).
#[stable(feature = "alloc_layout", since = "1.28.0")]
+ #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub fn from_size_align(size: usize, align: usize) -> Result<Self, LayoutErr> {
+ pub const fn from_size_align(size: usize, align: usize) -> Result<Self, LayoutErr> {
if !align.is_power_of_two() {
return Err(LayoutErr { private: () });
}
return Err(LayoutErr { private: () });
}
- unsafe {
- Ok(Layout::from_size_align_unchecked(size, align))
- }
+ unsafe { Ok(Layout::from_size_align_unchecked(size, align)) }
}
/// Creates a layout, bypassing all checks.
/// The minimum size in bytes for a memory block of this layout.
#[stable(feature = "alloc_layout", since = "1.28.0")]
+ #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub fn size(&self) -> usize { self.size_ }
+ pub const fn size(&self) -> usize {
+ self.size_
+ }
/// The minimum byte alignment for a memory block of this layout.
#[stable(feature = "alloc_layout", since = "1.28.0")]
+ #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub fn align(&self) -> usize { self.align_.get() }
+ pub const fn align(&self) -> usize {
+ self.align_.get()
+ }
/// Constructs a `Layout` suitable for holding a value of type `T`.
#[stable(feature = "alloc_layout", since = "1.28.0")]
+ #[rustc_const_stable(feature = "alloc_layout_const_new", since = "1.42.0")]
#[inline]
- pub fn new<T>() -> Self {
+ pub const fn new<T>() -> Self {
let (size, align) = size_align::<T>();
// Note that the align is guaranteed by rustc to be a power of two and
// the size+align combo is guaranteed to fit in our address space. As a
// result use the unchecked constructor here to avoid inserting code
// that panics if it isn't optimized well enough.
- debug_assert!(Layout::from_size_align(size, align).is_ok());
- unsafe {
- Layout::from_size_align_unchecked(size, align)
- }
+ unsafe { Layout::from_size_align_unchecked(size, align) }
}
/// Produces layout describing a record that could be used to
let (size, align) = (mem::size_of_val(t), mem::align_of_val(t));
// See rationale in `new` for why this is using an unsafe variant below
debug_assert!(Layout::from_size_align(size, align).is_ok());
- unsafe {
- Layout::from_size_align_unchecked(size, align)
- }
+ unsafe { Layout::from_size_align_unchecked(size, align) }
}
/// Creates a layout describing the record that can hold a value
/// address for the whole allocated block of memory. One way to
/// satisfy this constraint is to ensure `align <= self.align()`.
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
+ #[rustc_const_unstable(feature = "const_alloc_layout", issue = "67521")]
#[inline]
- pub fn padding_needed_for(&self, align: usize) -> usize {
+ pub const fn padding_needed_for(&self, align: usize) -> usize {
let len = self.size();
// Rounded up value is:
// size and padding overflow in the above manner should cause
// the allocator to yield an error anyway.)
- let len_rounded_up = len.wrapping_add(align).wrapping_sub(1)
- & !align.wrapping_sub(1);
+ let len_rounded_up = len.wrapping_add(align).wrapping_sub(1) & !align.wrapping_sub(1);
len_rounded_up.wrapping_sub(len)
}
// > must not overflow (i.e., the rounded value must be less than
// > `usize::MAX`)
let padded_size = self.size() + self.padding_needed_for(self.align());
- let alloc_size = padded_size.checked_mul(n)
- .ok_or(LayoutErr { private: () })?;
+ let alloc_size = padded_size.checked_mul(n).ok_or(LayoutErr { private: () })?;
unsafe {
// self.align is already known to be valid and alloc_size has been
let new_align = cmp::max(self.align(), next.align());
let pad = self.padding_needed_for(next.align());
- let offset = self.size().checked_add(pad)
- .ok_or(LayoutErr { private: () })?;
- let new_size = offset.checked_add(next.size())
- .ok_or(LayoutErr { private: () })?;
+ let offset = self.size().checked_add(pad).ok_or(LayoutErr { private: () })?;
+ let new_size = offset.checked_add(next.size()).ok_or(LayoutErr { private: () })?;
let layout = Layout::from_size_align(new_size, new_align)?;
Ok((layout, offset))
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
#[inline]
pub fn extend_packed(&self, next: Self) -> Result<Self, LayoutErr> {
- let new_size = self.size().checked_add(next.size())
- .ok_or(LayoutErr { private: () })?;
+ let new_size = self.size().checked_add(next.size()).ok_or(LayoutErr { private: () })?;
Layout::from_size_align(new_size, self.align())
}
#[unstable(feature = "alloc_layout_extra", issue = "55724")]
#[inline]
pub fn array<T>(n: usize) -> Result<Self, LayoutErr> {
- Layout::new::<T>()
- .repeat(n)
- .map(|(k, offs)| {
- debug_assert!(offs == mem::size_of::<T>());
- k
- })
+ Layout::new::<T>().repeat(n).map(|(k, offs)| {
+ debug_assert!(offs == mem::size_of::<T>());
+ k
+ })
}
}
#[stable(feature = "alloc_layout", since = "1.28.0")]
#[derive(Clone, PartialEq, Eq, Debug)]
pub struct LayoutErr {
- private: ()
+ private: (),
}
// (we need this for downstream impl of trait Error)
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let new_ptr = self.alloc(new_layout);
if !new_ptr.is_null() {
- ptr::copy_nonoverlapping(
- ptr,
- new_ptr,
- cmp::min(layout.size(), new_size),
- );
+ ptr::copy_nonoverlapping(ptr, new_ptr, cmp::min(layout.size(), new_size));
self.dealloc(ptr, layout);
}
new_ptr
/// the future.
#[unstable(feature = "allocator_api", issue = "32838")]
pub unsafe trait Alloc {
-
// (Note: some existing allocators have unspecified but well-defined
// behavior in response to a zero size allocation request ;
// e.g., in C, `malloc` of 0 will either return a null pointer or a
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
- unsafe fn realloc(&mut self,
- ptr: NonNull<u8>,
- layout: Layout,
- new_size: usize) -> Result<NonNull<u8>, AllocErr> {
+ unsafe fn realloc(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<NonNull<u8>, AllocErr> {
let old_size = layout.size();
if new_size >= old_size {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let result = self.alloc(new_layout);
if let Ok(new_ptr) = result {
- ptr::copy_nonoverlapping(ptr.as_ptr(),
- new_ptr.as_ptr(),
- cmp::min(old_size, new_size));
+ ptr::copy_nonoverlapping(ptr.as_ptr(), new_ptr.as_ptr(), cmp::min(old_size, new_size));
self.dealloc(ptr, layout);
}
result
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
- unsafe fn realloc_excess(&mut self,
- ptr: NonNull<u8>,
- layout: Layout,
- new_size: usize) -> Result<Excess, AllocErr> {
+ unsafe fn realloc_excess(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<Excess, AllocErr> {
let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
let usable_size = self.usable_size(&new_layout);
- self.realloc(ptr, layout, new_size)
- .map(|p| Excess(p, usable_size.1))
+ self.realloc(ptr, layout, new_size).map(|p| Excess(p, usable_size.1))
}
/// Attempts to extend the allocation referenced by `ptr` to fit `new_size`.
/// function; clients are expected either to be able to recover from
/// `grow_in_place` failures without aborting, or to fall back on
/// another reallocation method before resorting to an abort.
- unsafe fn grow_in_place(&mut self,
- ptr: NonNull<u8>,
- layout: Layout,
- new_size: usize) -> Result<(), CannotReallocInPlace> {
+ unsafe fn grow_in_place(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<(), CannotReallocInPlace> {
let _ = ptr; // this default implementation doesn't care about the actual address.
debug_assert!(new_size >= layout.size());
let (_l, u) = self.usable_size(&layout);
// _l <= layout.size() [guaranteed by usable_size()]
// layout.size() <= new_layout.size() [required by this method]
- if new_size <= u {
- Ok(())
- } else {
- Err(CannotReallocInPlace)
- }
+ if new_size <= u { Ok(()) } else { Err(CannotReallocInPlace) }
}
/// Attempts to shrink the allocation referenced by `ptr` to fit `new_size`.
/// function; clients are expected either to be able to recover from
/// `shrink_in_place` failures without aborting, or to fall back
/// on another reallocation method before resorting to an abort.
- unsafe fn shrink_in_place(&mut self,
- ptr: NonNull<u8>,
- layout: Layout,
- new_size: usize) -> Result<(), CannotReallocInPlace> {
+ unsafe fn shrink_in_place(
+ &mut self,
+ ptr: NonNull<u8>,
+ layout: Layout,
+ new_size: usize,
+ ) -> Result<(), CannotReallocInPlace> {
let _ = ptr; // this default implementation doesn't care about the actual address.
debug_assert!(new_size <= layout.size());
let (l, _u) = self.usable_size(&layout);
// layout.size() <= _u [guaranteed by usable_size()]
// new_layout.size() <= layout.size() [required by this method]
- if l <= new_size {
- Ok(())
- } else {
- Err(CannotReallocInPlace)
- }
+ if l <= new_size { Ok(()) } else { Err(CannotReallocInPlace) }
}
-
// == COMMON USAGE PATTERNS ==
// alloc_one, dealloc_one, alloc_array, realloc_array. dealloc_array
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc_one<T>(&mut self) -> Result<NonNull<T>, AllocErr>
- where Self: Sized
+ where
+ Self: Sized,
{
let k = Layout::new::<T>();
- if k.size() > 0 {
- unsafe { self.alloc(k).map(|p| p.cast()) }
- } else {
- Err(AllocErr)
- }
+ if k.size() > 0 { unsafe { self.alloc(k).map(|p| p.cast()) } } else { Err(AllocErr) }
}
/// Deallocates a block suitable for holding an instance of `T`.
///
/// * the layout of `T` must *fit* that block of memory.
unsafe fn dealloc_one<T>(&mut self, ptr: NonNull<T>)
- where Self: Sized
+ where
+ Self: Sized,
{
let k = Layout::new::<T>();
if k.size() > 0 {
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
fn alloc_array<T>(&mut self, n: usize) -> Result<NonNull<T>, AllocErr>
- where Self: Sized
+ where
+ Self: Sized,
{
match Layout::array::<T>(n) {
- Ok(layout) if layout.size() > 0 => {
- unsafe {
- self.alloc(layout).map(|p| p.cast())
- }
- }
+ Ok(layout) if layout.size() > 0 => unsafe { self.alloc(layout).map(|p| p.cast()) },
_ => Err(AllocErr),
}
}
/// rather than directly invoking `panic!` or similar.
///
/// [`handle_alloc_error`]: ../../alloc/alloc/fn.handle_alloc_error.html
- unsafe fn realloc_array<T>(&mut self,
- ptr: NonNull<T>,
- n_old: usize,
- n_new: usize) -> Result<NonNull<T>, AllocErr>
- where Self: Sized
+ unsafe fn realloc_array<T>(
+ &mut self,
+ ptr: NonNull<T>,
+ n_old: usize,
+ n_new: usize,
+ ) -> Result<NonNull<T>, AllocErr>
+ where
+ Self: Sized,
{
match (Layout::array::<T>(n_old), Layout::array::<T>(n_new)) {
(Ok(k_old), Ok(k_new)) if k_old.size() > 0 && k_new.size() > 0 => {
debug_assert!(k_old.align() == k_new.align());
self.realloc(ptr.cast(), k_old, k_new.size()).map(NonNull::cast)
}
- _ => {
- Err(AllocErr)
- }
+ _ => Err(AllocErr),
}
}
///
/// Always returns `Err` on arithmetic overflow.
unsafe fn dealloc_array<T>(&mut self, ptr: NonNull<T>, n: usize) -> Result<(), AllocErr>
- where Self: Sized
+ where
+ Self: Sized,
{
match Layout::array::<T>(n) {
- Ok(k) if k.size() > 0 => {
- Ok(self.dealloc(ptr.cast(), k))
- }
- _ => {
- Err(AllocErr)
- }
+ Ok(k) if k.size() > 0 => Ok(self.dealloc(ptr.cast(), k)),
+ _ => Err(AllocErr),
}
}
}