From: Tim Diekmann Date: Sun, 25 Oct 2020 15:32:28 +0000 (+0100) Subject: Merge remote-tracking branch 'upstream/master' into box-alloc X-Git-Url: https://git.lizzy.rs/?a=commitdiff_plain;h=06e4497a04615ad95dff4240ca9980f19ed364ad;hp=-c;p=rust.git Merge remote-tracking branch 'upstream/master' into box-alloc --- 06e4497a04615ad95dff4240ca9980f19ed364ad diff --combined library/alloc/src/alloc.rs index bbde60952d3,dd7dcfbc4aa..0a4f88dedbb --- a/library/alloc/src/alloc.rs +++ b/library/alloc/src/alloc.rs @@@ -2,13 -2,8 +2,13 @@@ #![stable(feature = "alloc_module", since = "1.28.0")] -use core::intrinsics::{self, min_align_of_val, size_of_val}; -use core::ptr::{self, NonNull, Unique}; +#[cfg(not(test))] +use core::intrinsics; +use core::intrinsics::{min_align_of_val, size_of_val}; + +use core::ptr::Unique; +#[cfg(not(test))] +use core::ptr::{self, NonNull}; #[stable(feature = "alloc_module", since = "1.28.0")] #[doc(inline)] @@@ -44,12 -39,8 +44,12 @@@ extern "Rust" /// accessed through the [free functions in `alloc`](self#functions). #[unstable(feature = "allocator_api", issue = "32838")] #[derive(Copy, Clone, Default, Debug)] +#[cfg(not(test))] pub struct Global; +#[cfg(test)] +pub use std::alloc::Global; + /// Allocate memory with the global allocator. /// /// This function forwards calls to the [`GlobalAlloc::alloc`] method @@@ -153,7 -144,6 +153,7 @@@ pub unsafe fn alloc_zeroed(layout: Layo unsafe { __rust_alloc_zeroed(layout.size(), layout.align()) } } +#[cfg(not(test))] impl Global { #[inline] fn alloc_impl(&self, layout: Layout, zeroed: bool) -> Result, AllocError> { @@@ -217,7 -207,6 +217,7 @@@ } #[unstable(feature = "allocator_api", issue = "32838")] +#[cfg(not(test))] unsafe impl AllocRef for Global { #[inline] fn alloc(&self, layout: Layout) -> Result, AllocError> { @@@ -324,12 -313,12 +324,12 @@@ unsafe fn exchange_malloc(size: usize, // well. // For example if `Box` is changed to `struct Box(Unique, A)`, // this function has to be changed to `fn box_free(Unique, A)` as well. -pub(crate) unsafe fn box_free(ptr: Unique) { +pub(crate) unsafe fn box_free(ptr: Unique, alloc: A) { unsafe { let size = size_of_val(ptr.as_ref()); let align = min_align_of_val(ptr.as_ref()); let layout = Layout::from_size_align_unchecked(size, align); - Global.dealloc(ptr.cast().into(), layout) + alloc.dealloc(ptr.cast().into(), layout) } } @@@ -383,7 -372,7 +383,7 @@@ pub fn handle_alloc_error(layout: Layou unsafe { oom_impl(layout) } } - #[cfg(not(any(test, bootstrap)))] + #[cfg(not(any(target_os = "hermit", test, bootstrap)))] #[doc(hidden)] #[allow(unused_attributes)] #[unstable(feature = "alloc_internals", issue = "none")] diff --combined library/alloc/src/collections/btree/node.rs index 12f29d7efbc,f5aff9bf494..c8d3de9e5cd --- a/library/alloc/src/collections/btree/node.rs +++ b/library/alloc/src/collections/btree/node.rs @@@ -38,8 -38,8 +38,8 @@@ use crate::alloc::{AllocRef, Global, La use crate::boxed::Box; const B: usize = 6; - pub const MIN_LEN: usize = B - 1; pub const CAPACITY: usize = 2 * B - 1; + pub const MIN_LEN_AFTER_SPLIT: usize = B - 1; const KV_IDX_CENTER: usize = B - 1; const EDGE_IDX_LEFT_OF_CENTER: usize = B - 1; const EDGE_IDX_RIGHT_OF_CENTER: usize = B; @@@ -87,7 -87,6 +87,6 @@@ impl LeafNode #[repr(C)] // gdb_providers.py uses this type name for introspection. struct InternalNode { - // gdb_providers.py uses this field name for introspection. data: LeafNode, /// The pointers to the children of this node. `len + 1` of these are considered @@@ -121,17 -120,13 +120,13 @@@ struct BoxedNode impl BoxedNode { fn from_leaf(node: Box>) -> Self { - BoxedNode { ptr: Box::into_unique(node) } + BoxedNode { ptr: Box::into_unique(node).0 } } fn from_internal(node: Box>) -> Self { BoxedNode { ptr: Unique::from(&mut Box::leak(node).data) } } - unsafe fn from_ptr(ptr: NonNull>) -> Self { - BoxedNode { ptr: unsafe { Unique::new_unchecked(ptr.as_ptr()) } } - } - fn as_ptr(&self) -> NonNull> { NonNull::from(self.ptr) } @@@ -199,7 -194,7 +194,7 @@@ impl Root /// and is the opposite of `pop_internal_level`. pub fn push_internal_level(&mut self) -> NodeRef, K, V, marker::Internal> { let mut new_node = Box::new(unsafe { InternalNode::new() }); - new_node.edges[0].write(unsafe { BoxedNode::from_ptr(self.node.as_ptr()) }); + new_node.edges[0].write(unsafe { ptr::read(&mut self.node) }); self.node = BoxedNode::from_internal(new_node); self.height += 1; @@@ -225,8 -220,8 +220,8 @@@ let top = self.node.ptr; - let internal_node = unsafe { self.internal_node_as_mut() }; - self.node = unsafe { BoxedNode::from_ptr(internal_node.first_edge().descend().node) }; + let mut internal_node = unsafe { self.internal_node_as_mut() }; + self.node = unsafe { internal_node.as_internal_mut().edges[0].assume_init_read() }; self.height -= 1; self.node_as_mut().as_leaf_mut().parent = None; @@@ -616,7 -611,7 +611,7 @@@ impl<'a, K: 'a, V: 'a> NodeRef NodeRef (usiz } } - impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::Edge> { - /// Helps implementations of `insert_fit` for a particular `NodeType`, - /// by taking care of leaf data. + impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { /// Inserts a new key/value pair between the key/value pairs to the right and left of /// this edge. This method assumes that there is enough space in the node for the new /// pair to fit. - fn leafy_insert_fit(&mut self, key: K, val: V) { + /// + /// The returned pointer points to the inserted value. + fn insert_fit(&mut self, key: K, val: V) -> *mut V { debug_assert!(self.node.len() < CAPACITY); unsafe { slice_insert(self.node.keys_mut(), self.idx, key); slice_insert(self.node.vals_mut(), self.idx, val); - self.node.as_leaf_mut().len += 1; - } - } - } - impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::Edge> { - /// Inserts a new key/value pair between the key/value pairs to the right and left of - /// this edge. This method assumes that there is enough space in the node for the new - /// pair to fit. - /// - /// The returned pointer points to the inserted value. - fn insert_fit(&mut self, key: K, val: V) -> *mut V { - self.leafy_insert_fit(key, val); - unsafe { self.node.val_mut_at(self.idx) } + self.node.val_mut_at(self.idx) + } } } @@@ -1001,11 -978,14 +978,14 @@@ impl<'a, K: 'a, V: 'a> Handle) { + debug_assert!(self.node.len() < CAPACITY); debug_assert!(edge.height == self.node.height - 1); unsafe { + slice_insert(self.node.keys_mut(), self.idx, key); + slice_insert(self.node.vals_mut(), self.idx, val); slice_insert(self.node.edges_mut(), self.idx + 1, edge.node); - self.leafy_insert_fit(key, val); + self.node.as_leaf_mut().len += 1; self.node.correct_childrens_parent_links((self.idx + 1)..=self.node.len()); } @@@ -1136,15 -1116,21 +1116,21 @@@ impl<'a, K: 'a, V: 'a, NodeType> Handle } impl<'a, K: 'a, V: 'a, NodeType> Handle, K, V, NodeType>, marker::KV> { + /// Helps implementations of `split` for a particular `NodeType`, + /// by calculating the length of the new node. + fn split_new_node_len(&self) -> usize { + debug_assert!(self.idx < self.node.len()); + self.node.len() - self.idx - 1 + } + /// Helps implementations of `split` for a particular `NodeType`, /// by taking care of leaf data. - fn leafy_split(&mut self, new_node: &mut LeafNode) -> (K, V, usize) { + fn split_leaf_data(&mut self, new_node: &mut LeafNode) -> (K, V) { + let new_len = self.split_new_node_len(); unsafe { let k = ptr::read(self.node.key_at(self.idx)); let v = ptr::read(self.node.val_at(self.idx)); - let new_len = self.node.len() - self.idx - 1; - ptr::copy_nonoverlapping( self.node.key_at(self.idx + 1), MaybeUninit::slice_as_mut_ptr(&mut new_node.keys), @@@ -1158,7 -1144,7 +1144,7 @@@ self.node.as_leaf_mut().len = self.idx as u16; new_node.len = new_len as u16; - (k, v, new_len) + (k, v) } } } @@@ -1166,7 -1152,7 +1152,7 @@@ impl<'a, K: 'a, V: 'a> Handle, K, V, marker::Leaf>, marker::KV> { /// Splits the underlying node into three parts: /// - /// - The node is truncated to only contain the key/value pairs to the right of + /// - The node is truncated to only contain the key/value pairs to the left of /// this handle. /// - The key and value pointed to by this handle are extracted. /// - All the key/value pairs to the right of this handle are put into a newly @@@ -1175,9 -1161,10 +1161,10 @@@ unsafe { let mut new_node = Box::new(LeafNode::new()); - let (k, v, _) = self.leafy_split(&mut new_node); + let (k, v) = self.split_leaf_data(&mut new_node); - (self.node, k, v, Root { node: BoxedNode::from_leaf(new_node), height: 0 }) + let right = Root { node: BoxedNode::from_leaf(new_node), height: 0 }; + (self.node, k, v, right) } } @@@ -1211,29 -1198,28 +1198,28 @@@ impl<'a, K: 'a, V: 'a> Handle (NodeRef, K, V, marker::Internal>, K, V, Root) { unsafe { let mut new_node = Box::new(InternalNode::new()); - - let (k, v, new_len) = self.leafy_split(&mut new_node.data); - let height = self.node.height; - let old_node = &*self.node.as_internal_ptr(); - + // Move edges out before reducing length: + let new_len = self.split_new_node_len(); ptr::copy_nonoverlapping( - old_node.edges.as_ptr().add(self.idx + 1), - new_node.edges.as_mut_ptr(), + self.node.edge_at(self.idx + 1), + MaybeUninit::slice_as_mut_ptr(&mut new_node.edges), new_len + 1, ); + let (k, v) = self.split_leaf_data(&mut new_node.data); - let mut new_root = Root { node: BoxedNode::from_internal(new_node), height }; + let height = self.node.height; + let mut right = Root { node: BoxedNode::from_internal(new_node), height }; - new_root.internal_node_as_mut().correct_childrens_parent_links(0..=new_len); + right.internal_node_as_mut().correct_childrens_parent_links(0..=new_len); - (self.node, k, v, new_root) + (self.node, k, v, right) } } diff --combined library/alloc/src/raw_vec.rs index a60e676fda5,657b568e7f6..a4240308bb3 --- a/library/alloc/src/raw_vec.rs +++ b/library/alloc/src/raw_vec.rs @@@ -6,7 -6,7 +6,7 @@@ use core::cmp use core::intrinsics; use core::mem::{self, ManuallyDrop, MaybeUninit}; use core::ops::Drop; -use core::ptr::{NonNull, Unique}; +use core::ptr::{self, NonNull, Unique}; use core::slice; use crate::alloc::{handle_alloc_error, AllocRef, Global, Layout}; @@@ -111,36 -111,12 +111,37 @@@ impl RawVec pub unsafe fn from_raw_parts(ptr: *mut T, capacity: usize) -> Self { unsafe { Self::from_raw_parts_in(ptr, capacity, Global) } } +} + +impl RawVec { + /// Like `new`, but parameterized over the choice of allocator for + /// the returned `RawVec`. - #[allow_internal_unstable(const_fn)] ++ #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] ++ #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] + pub const fn new_in(alloc: A) -> Self { + // `cap: 0` means "unallocated". zero-sized types are ignored. + Self { ptr: Unique::dangling(), cap: 0, alloc } + } + + /// Like `with_capacity`, but parameterized over the choice of + /// allocator for the returned `RawVec`. + #[inline] + pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { + Self::allocate_in(capacity, AllocInit::Uninitialized, alloc) + } + + /// Like `with_capacity_zeroed`, but parameterized over the choice + /// of allocator for the returned `RawVec`. + #[inline] + pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { + Self::allocate_in(capacity, AllocInit::Zeroed, alloc) + } /// Converts a `Box<[T]>` into a `RawVec`. - pub fn from_box(slice: Box<[T]>) -> Self { + pub fn from_box(slice: Box<[T], A>) -> Self { unsafe { - let mut slice = ManuallyDrop::new(slice); - RawVec::from_raw_parts(slice.as_mut_ptr(), slice.len()) + let (slice, alloc) = Box::into_raw_with_alloc(slice); + RawVec::from_raw_parts_in(slice.as_mut_ptr(), slice.len(), alloc) } } @@@ -156,7 -132,7 +157,7 @@@ /// /// Note, that the requested capacity and `self.capacity()` could differ, as /// an allocator could overallocate and return a greater memory block than requested. - pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit]> { + pub unsafe fn into_box(self, len: usize) -> Box<[MaybeUninit], A> { // Sanity-check one half of the safety requirement (we cannot check the other half). debug_assert!( len <= self.capacity(), @@@ -166,9 -142,34 +167,9 @@@ let me = ManuallyDrop::new(self); unsafe { let slice = slice::from_raw_parts_mut(me.ptr() as *mut MaybeUninit, len); - Box::from_raw(slice) + Box::from_raw_in(slice, ptr::read(&me.alloc)) } } -} - -impl RawVec { - /// Like `new`, but parameterized over the choice of allocator for - /// the returned `RawVec`. - #[cfg_attr(not(bootstrap), rustc_allow_const_fn_unstable(const_fn))] - #[cfg_attr(bootstrap, allow_internal_unstable(const_fn))] - pub const fn new_in(alloc: A) -> Self { - // `cap: 0` means "unallocated". zero-sized types are ignored. - Self { ptr: Unique::dangling(), cap: 0, alloc } - } - - /// Like `with_capacity`, but parameterized over the choice of - /// allocator for the returned `RawVec`. - #[inline] - pub fn with_capacity_in(capacity: usize, alloc: A) -> Self { - Self::allocate_in(capacity, AllocInit::Uninitialized, alloc) - } - - /// Like `with_capacity_zeroed`, but parameterized over the choice - /// of allocator for the returned `RawVec`. - #[inline] - pub fn with_capacity_zeroed_in(capacity: usize, alloc: A) -> Self { - Self::allocate_in(capacity, AllocInit::Zeroed, alloc) - } fn allocate_in(capacity: usize, init: AllocInit, alloc: A) -> Self { if mem::size_of::() == 0 { @@@ -259,7 -260,7 +260,7 @@@ /// Ensures that the buffer contains at least enough space to hold `len + /// additional` elements. If it doesn't already have enough capacity, will /// reallocate enough space plus comfortable slack space to get amortized - /// `O(1)` behavior. Will limit this behavior if it would needlessly cause + /// *O*(1) behavior. Will limit this behavior if it would needlessly cause /// itself to panic. /// /// If `len` exceeds `self.capacity()`, this may fail to actually allocate diff --combined library/alloc/src/sync.rs index d70de1163c9,cd18535b069..73ff795c01a --- a/library/alloc/src/sync.rs +++ b/library/alloc/src/sync.rs @@@ -10,6 -10,7 +10,7 @@@ use core::cmp::Ordering use core::convert::{From, TryFrom}; use core::fmt; use core::hash::{Hash, Hasher}; + use core::hint; use core::intrinsics::abort; use core::iter; use core::marker::{PhantomData, Unpin, Unsize}; @@@ -764,6 -765,7 +765,7 @@@ impl Arc loop { // check if the weak counter is currently "locked"; if so, spin. if cur == usize::MAX { + hint::spin_loop(); cur = this.inner().weak.load(Relaxed); continue; } @@@ -1006,7 -1008,7 +1008,7 @@@ impl Arc fn from_box(v: Box) -> Arc { unsafe { - let box_unique = Box::into_unique(v); + let (box_unique, alloc) = Box::into_unique(v); let bptr = box_unique.as_ptr(); let value_size = size_of_val(&*bptr); @@@ -1020,7 -1022,7 +1022,7 @@@ ); // Free the allocation without dropping its contents - box_free(box_unique); + box_free(box_unique, alloc); Self::from_ptr(ptr) } diff --combined src/test/ui/issues/issue-41974.stderr index fb24aadde08,a092c94b9d5..cc4b3707dd6 --- a/src/test/ui/issues/issue-41974.stderr +++ b/src/test/ui/issues/issue-41974.stderr @@@ -1,13 -1,13 +1,13 @@@ -error[E0119]: conflicting implementations of trait `std::ops::Drop` for type `std::boxed::Box<_>`: +error[E0119]: conflicting implementations of trait `std::ops::Drop` for type `std::boxed::Box<_, _>`: --> $DIR/issue-41974.rs:7:1 | LL | impl Drop for T where T: A { | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ | = note: conflicting implementation in crate `alloc`: - - impl Drop for Box - where T: ?Sized; - = note: downstream crates may implement trait `A` for type `std::boxed::Box<_>` + - impl Drop for Box + where A: AllocRef, T: ?Sized; + = note: downstream crates may implement trait `A` for type `std::boxed::Box<_, _>` error[E0120]: the `Drop` trait may only be implemented for structs, enums, and unions --> $DIR/issue-41974.rs:7:18 @@@ -21,7 -21,7 +21,7 @@@ error[E0210]: type parameter `T` must b LL | impl Drop for T where T: A { | ^ type parameter `T` must be used as the type parameter for some local type | - = note: implementing a foreign trait is only possible if at least one of the types for which is it implemented is local + = note: implementing a foreign trait is only possible if at least one of the types for which it is implemented is local = note: only traits defined in the current crate can be implemented for a type parameter error: aborting due to 3 previous errors