#![stable(feature = "alloc_module", since = "1.28.0")]
-use core::intrinsics::{self, min_align_of_val, size_of_val};
-use core::ptr::{self, NonNull, Unique};
+#[cfg(not(test))]
+use core::intrinsics;
+use core::intrinsics::{min_align_of_val, size_of_val};
+
+use core::ptr::Unique;
+#[cfg(not(test))]
+use core::ptr::{self, NonNull};
#[stable(feature = "alloc_module", since = "1.28.0")]
#[doc(inline)]
/// accessed through the [free functions in `alloc`](self#functions).
#[unstable(feature = "allocator_api", issue = "32838")]
#[derive(Copy, Clone, Default, Debug)]
+#[cfg(not(test))]
pub struct Global;
+#[cfg(test)]
+pub use std::alloc::Global;
+
/// Allocate memory with the global allocator.
///
/// This function forwards calls to the [`GlobalAlloc::alloc`] method
unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) }
}
+#[cfg(not(test))]
impl Global {
#[inline]
fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result<NonNull<[u8]>, AllocError> {
}
#[unstable(feature = "allocator_api", issue = "32838")]
+#[cfg(not(test))]
unsafe impl AllocRef for Global {
#[inline]
fn alloc(&self, layout: Layout) -> Result<NonNull<[u8]>, AllocError> {
// well.
// For example if `Box` is changed to `struct Box<T: ?Sized, A: AllocRef>(Unique<T>, A)`,
// this function has to be changed to `fn box_free<T: ?Sized, A: AllocRef>(Unique<T>, A)` as well.
-pub(crate) unsafe fn box_free<T: ?Sized>(ptr: Unique<T>) {
+pub(crate) unsafe fn box_free<T: ?Sized, A: AllocRef>(ptr: Unique<T>, alloc: A) {
unsafe {
let size = size_of_val(ptr.as_ref());
let align = min_align_of_val(ptr.as_ref());
let layout = Layout::from_size_align_unchecked(size, align);
- Global.dealloc(ptr.cast().into(), layout)
+ alloc.dealloc(ptr.cast().into(), layout)
}
}
unsafe { oom_impl(layout) }
}
- #[cfg(not(any(test, bootstrap)))]
+ #[cfg(not(any(target_os = "hermit", test, bootstrap)))]
#[doc(hidden)]
#[allow(unused_attributes)]
#[unstable(feature = "alloc_internals", issue = "none")]
use crate::boxed::Box;
const B: usize = 6;
- pub const MIN_LEN: usize = B - 1;
pub const CAPACITY: usize = 2 * B - 1;
+ pub const MIN_LEN_AFTER_SPLIT: usize = B - 1;
const KV_IDX_CENTER: usize = B - 1;
const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1;
const EDGE_IDX_RIGHT_OF_CENTER: usize = B;
#[repr(C)]
// gdb_providers.py uses this type name for introspection.
struct InternalNode<K, V> {
- // gdb_providers.py uses this field name for introspection.
data: LeafNode<K, V>,
/// The pointers to the children of this node. `len + 1` of these are considered
impl<K, V> BoxedNode<K, V> {
fn from_leaf(node: Box<LeafNode<K, V>>) -> Self {
- BoxedNode { ptr: Box::into_unique(node) }
+ BoxedNode { ptr: Box::into_unique(node).0 }
}
fn from_internal(node: Box<InternalNode<K, V>>) -> Self {
BoxedNode { ptr: Unique::from(&mut Box::leak(node).data) }
}
- unsafe fn from_ptr(ptr: NonNull<LeafNode<K, V>>) -> Self {
- BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } }
- }
-
fn as_ptr(&self) -> NonNull<LeafNode<K, V>> {
NonNull::from(self.ptr)
}
/// and is the opposite of `pop_internal_level`.
pub fn push_internal_level(&mut self) -> NodeRef<marker::Mut<'_>, K, V, marker::Internal> {
let mut new_node = Box::new(unsafe { InternalNode::new() });
- new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) });
+ new_node.edges[0].write(unsafe { ptr::read(&mut self.node) });
self.node = BoxedNode::from_internal(new_node);
self.height += 1;
let top = self.node.ptr;
- let internal_node = unsafe { self.internal_node_as_mut() };
- self.node = unsafe { BoxedNode::from_ptr(internal_node.first_edge().descend().node) };
+ let mut internal_node = unsafe { self.internal_node_as_mut() };
+ self.node = unsafe { internal_node.as_internal_mut().edges[0].assume_init_read() };
self.height -= 1;
self.node_as_mut().as_leaf_mut().parent = None;
/// Adds a key/value pair to the beginning of the node.
fn push_front(&mut self, key: K, val: V) {
- debug_assert!(self.len() < CAPACITY);
+ assert!(self.len() < CAPACITY);
unsafe {
slice_insert(self.keys_mut(), 0, key);
unsafe {
slice_insert(self.keys_mut(), 0, key);
slice_insert(self.vals_mut(), 0, val);
- slice_insert(
- slice::from_raw_parts_mut(
- MaybeUninit::slice_as_mut_ptr(&mut self.as_internal_mut().edges),
- self.len() + 1,
- ),
- 0,
- edge.node,
- );
+ slice_insert(self.edges_mut(), 0, edge.node);
}
self.as_leaf_mut().len += 1;
}
}
- impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::Edge> {
- /// Helps implementations of `insert_fit` for a particular `NodeType`,
- /// by taking care of leaf data.
+ impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
/// Inserts a new key/value pair between the key/value pairs to the right and left of
/// this edge. This method assumes that there is enough space in the node for the new
/// pair to fit.
- fn leafy_insert_fit(&mut self, key: K, val: V) {
+ ///
+ /// The returned pointer points to the inserted value.
+ fn insert_fit(&mut self, key: K, val: V) -> *mut V {
debug_assert!(self.node.len() < CAPACITY);
unsafe {
slice_insert(self.node.keys_mut(), self.idx, key);
slice_insert(self.node.vals_mut(), self.idx, val);
-
self.node.as_leaf_mut().len += 1;
- }
- }
- }
- impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge> {
- /// Inserts a new key/value pair between the key/value pairs to the right and left of
- /// this edge. This method assumes that there is enough space in the node for the new
- /// pair to fit.
- ///
- /// The returned pointer points to the inserted value.
- fn insert_fit(&mut self, key: K, val: V) -> *mut V {
- self.leafy_insert_fit(key, val);
- unsafe { self.node.val_mut_at(self.idx) }
+ self.node.val_mut_at(self.idx)
+ }
}
}
/// between this edge and the key/value pair to the right of this edge. This method assumes
/// that there is enough space in the node for the new pair to fit.
fn insert_fit(&mut self, key: K, val: V, edge: Root<K, V>) {
+ debug_assert!(self.node.len() < CAPACITY);
debug_assert!(edge.height == self.node.height - 1);
unsafe {
+ slice_insert(self.node.keys_mut(), self.idx, key);
+ slice_insert(self.node.vals_mut(), self.idx, val);
slice_insert(self.node.edges_mut(), self.idx + 1, edge.node);
- self.leafy_insert_fit(key, val);
+ self.node.as_leaf_mut().len += 1;
self.node.correct_childrens_parent_links((self.idx + 1)..=self.node.len());
}
}
impl<'a, K: 'a, V: 'a, NodeType> Handle<NodeRef<marker::Mut<'a>, K, V, NodeType>, marker::KV> {
+ /// Helps implementations of `split` for a particular `NodeType`,
+ /// by calculating the length of the new node.
+ fn split_new_node_len(&self) -> usize {
+ debug_assert!(self.idx < self.node.len());
+ self.node.len() - self.idx - 1
+ }
+
/// Helps implementations of `split` for a particular `NodeType`,
/// by taking care of leaf data.
- fn leafy_split(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V, usize) {
+ fn split_leaf_data(&mut self, new_node: &mut LeafNode<K, V>) -> (K, V) {
+ let new_len = self.split_new_node_len();
unsafe {
let k = ptr::read(self.node.key_at(self.idx));
let v = ptr::read(self.node.val_at(self.idx));
- let new_len = self.node.len() - self.idx - 1;
-
ptr::copy_nonoverlapping(
self.node.key_at(self.idx + 1),
MaybeUninit::slice_as_mut_ptr(&mut new_node.keys),
self.node.as_leaf_mut().len = self.idx as u16;
new_node.len = new_len as u16;
- (k, v, new_len)
+ (k, v)
}
}
}
impl<'a, K: 'a, V: 'a> Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::KV> {
/// Splits the underlying node into three parts:
///
- /// - The node is truncated to only contain the key/value pairs to the right of
+ /// - The node is truncated to only contain the key/value pairs to the left of
/// this handle.
/// - The key and value pointed to by this handle are extracted.
/// - All the key/value pairs to the right of this handle are put into a newly
unsafe {
let mut new_node = Box::new(LeafNode::new());
- let (k, v, _) = self.leafy_split(&mut new_node);
+ let (k, v) = self.split_leaf_data(&mut new_node);
- (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 })
+ let right = Root { node: BoxedNode::from_leaf(new_node), height: 0 };
+ (self.node, k, v, right)
}
}
/// Splits the underlying node into three parts:
///
/// - The node is truncated to only contain the edges and key/value pairs to the
- /// right of this handle.
+ /// left of this handle.
/// - The key and value pointed to by this handle are extracted.
/// - All the edges and key/value pairs to the right of this handle are put into
/// a newly allocated node.
pub fn split(mut self) -> (NodeRef<marker::Mut<'a>, K, V, marker::Internal>, K, V, Root<K, V>) {
unsafe {
let mut new_node = Box::new(InternalNode::new());
-
- let (k, v, new_len) = self.leafy_split(&mut new_node.data);
- let height = self.node.height;
- let old_node = &*self.node.as_internal_ptr();
-
+ // Move edges out before reducing length:
+ let new_len = self.split_new_node_len();
ptr::copy_nonoverlapping(
- old_node.edges.as_ptr().add(self.idx + 1),
- new_node.edges.as_mut_ptr(),
+ self.node.edge_at(self.idx + 1),
+ MaybeUninit::slice_as_mut_ptr(&mut new_node.edges),
new_len + 1,
);
+ let (k, v) = self.split_leaf_data(&mut new_node.data);
- let mut new_root = Root { node: BoxedNode::from_internal(new_node), height };
+ let height = self.node.height;
+ let mut right = Root { node: BoxedNode::from_internal(new_node), height };
- new_root.internal_node_as_mut().correct_childrens_parent_links(0..=new_len);
+ right.internal_node_as_mut().correct_childrens_parent_links(0..=new_len);
- (self.node, k, v, new_root)
+ (self.node, k, v, right)
}
}
use core::intrinsics;
use core::mem::{self, ManuallyDrop, MaybeUninit};
use core::ops::Drop;
-use core::ptr::{NonNull, Unique};
+use core::ptr::{self, NonNull, Unique};
use core::slice;
use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout};
pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self {
unsafe { Self::from_raw_parts_in(ptr, capacity, Global) }
}
- #[allow_internal_unstable(const_fn)]
+}
+
+impl<T, A: AllocRef> RawVec<T, A> {
+ /// Like `new`, but parameterized over the choice of allocator for
+ /// the returned `RawVec`.
++ #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))]
++ #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))]
+ pub const fn new_in(alloc: A) -> Self {
+ // `cap: 0` means "unallocated". zero-sized types are ignored.
+ Self { ptr: Unique::dangling(), cap: 0, alloc }
+ }
+
+ /// Like `with_capacity`, but parameterized over the choice of
+ /// allocator for the returned `RawVec`.
+ #[inline]
+ pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
+ Self::allocate_in(capacity, AllocInit::Uninitialized, alloc)
+ }
+
+ /// Like `with_capacity_zeroed`, but parameterized over the choice
+ /// of allocator for the returned `RawVec`.
+ #[inline]
+ pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
+ Self::allocate_in(capacity, AllocInit::Zeroed, alloc)
+ }
/// Converts a `Box<[T]>` into a `RawVec<T>`.
- pub fn from_box(slice: Box<[T]>) -> Self {
+ pub fn from_box(slice: Box<[T], A>) -> Self {
unsafe {
- let mut slice = ManuallyDrop::new(slice);
- RawVec::from_raw_parts(slice.as_mut_ptr(), slice.len())
+ let (slice, alloc) = Box::into_raw_with_alloc(slice);
+ RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), alloc)
}
}
///
/// Note, that the requested capacity and `self.capacity()` could differ, as
/// an allocator could overallocate and return a greater memory block than requested.
- pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>]> {
+ pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit<T>], A> {
// Sanity-check one half of the safety requirement (we cannot check the other half).
debug_assert!(
len <= self.capacity(),
let me = ManuallyDrop::new(self);
unsafe {
let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit<T>, len);
- Box::from_raw(slice)
+ Box::from_raw_in(slice, ptr::read(&me.alloc))
}
}
-}
-
-impl<T, A: AllocRef> RawVec<T, A> {
- /// Like `new`, but parameterized over the choice of allocator for
- /// the returned `RawVec`.
- #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))]
- #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))]
- pub const fn new_in(alloc: A) -> Self {
- // `cap: 0` means "unallocated". zero-sized types are ignored.
- Self { ptr: Unique::dangling(), cap: 0, alloc }
- }
-
- /// Like `with_capacity`, but parameterized over the choice of
- /// allocator for the returned `RawVec`.
- #[inline]
- pub fn with_capacity_in(capacity: usize, alloc: A) -> Self {
- Self::allocate_in(capacity, AllocInit::Uninitialized, alloc)
- }
-
- /// Like `with_capacity_zeroed`, but parameterized over the choice
- /// of allocator for the returned `RawVec`.
- #[inline]
- pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self {
- Self::allocate_in(capacity, AllocInit::Zeroed, alloc)
- }
fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self {
if mem::size_of::<T>() == 0 {
/// Ensures that the buffer contains at least enough space to hold `len +
/// additional` elements. If it doesn't already have enough capacity, will
/// reallocate enough space plus comfortable slack space to get amortized
- /// `O(1)` behavior. Will limit this behavior if it would needlessly cause
+ /// *O*(1) behavior. Will limit this behavior if it would needlessly cause
/// itself to panic.
///
/// If `len` exceeds `self.capacity()`, this may fail to actually allocate