X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=compiler%2Frustc_arena%2Fsrc%2Flib.rs;h=a5f1cbc96daa7ea19a1151f27924eb067290b32e;hb=68cfdbb5c1a6f5c7d796df65701edfd0abee89f4;hp=62995dfd2e2f0b96f731dee1ff9f06c060f779f5;hpb=d60b4f52c92facae291151dd5a23399f8044d01e;p=rust.git diff --git a/compiler/rustc_arena/src/lib.rs b/compiler/rustc_arena/src/lib.rs index 62995dfd2e2..a5f1cbc96da 100644 --- a/compiler/rustc_arena/src/lib.rs +++ b/compiler/rustc_arena/src/lib.rs @@ -19,6 +19,7 @@ #![feature(rustc_attrs)] #![cfg_attr(test, feature(test))] #![feature(strict_provenance)] +#![feature(ptr_const_cast)] use smallvec::SmallVec; @@ -27,7 +28,7 @@ use std::cmp; use std::marker::{PhantomData, Send}; use std::mem::{self, MaybeUninit}; -use std::ptr; +use std::ptr::{self, NonNull}; use std::slice; #[inline(never)] @@ -55,15 +56,24 @@ pub struct TypedArena { struct ArenaChunk { /// The raw storage for the arena chunk. - storage: Box<[MaybeUninit]>, + storage: NonNull<[MaybeUninit]>, /// The number of valid entries in the chunk. entries: usize, } +unsafe impl<#[may_dangle] T> Drop for ArenaChunk { + fn drop(&mut self) { + unsafe { Box::from_raw(self.storage.as_mut()) }; + } +} + impl ArenaChunk { #[inline] unsafe fn new(capacity: usize) -> ArenaChunk { - ArenaChunk { storage: Box::new_uninit_slice(capacity), entries: 0 } + ArenaChunk { + storage: NonNull::new(Box::into_raw(Box::new_uninit_slice(capacity))).unwrap(), + entries: 0, + } } /// Destroys this arena chunk. @@ -72,14 +82,15 @@ unsafe fn destroy(&mut self, len: usize) { // The branch on needs_drop() is an -O1 performance optimization. // Without the branch, dropping TypedArena takes linear time. if mem::needs_drop::() { - ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut self.storage[..len])); + let slice = &mut *(self.storage.as_mut()); + ptr::drop_in_place(MaybeUninit::slice_assume_init_mut(&mut slice[..len])); } } // Returns a pointer to the first allocated object. #[inline] fn start(&mut self) -> *mut T { - MaybeUninit::slice_as_mut_ptr(&mut self.storage) + self.storage.as_ptr() as *mut T } // Returns a pointer to the end of the allocated space. @@ -90,7 +101,7 @@ fn end(&mut self) -> *mut T { // A pointer as large as possible for zero-sized elements. ptr::invalid_mut(!0) } else { - self.start().add(self.storage.len()) + self.start().add((*self.storage.as_ptr()).len()) } } } @@ -274,7 +285,7 @@ fn grow(&self, additional: usize) { // If the previous chunk's len is less than HUGE_PAGE // bytes, then this chunk will be least double the previous // chunk's size. - new_cap = last_chunk.storage.len().min(HUGE_PAGE / elem_size / 2); + new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / elem_size / 2); new_cap *= 2; } else { new_cap = PAGE / elem_size; @@ -382,7 +393,7 @@ fn grow(&self, additional: usize) { // If the previous chunk's len is less than HUGE_PAGE // bytes, then this chunk will be least double the previous // chunk's size. - new_cap = last_chunk.storage.len().min(HUGE_PAGE / 2); + new_cap = (*last_chunk.storage.as_ptr()).len().min(HUGE_PAGE / 2); new_cap *= 2; } else { new_cap = PAGE;