self.end.set(last_chunk.end());
return;
} else {
- let prev_capacity = last_chunk.storage.cap();
+ new_capacity = last_chunk.storage.cap();
loop {
- new_capacity = prev_capacity.checked_mul(2).unwrap();
+ new_capacity = new_capacity.checked_mul(2).unwrap();
if new_capacity >= currently_used_cap + n {
break;
}
unsafe impl<T: Send> Send for TypedArena<T> {}
+pub struct DroplessArena {
+ /// A pointer to the next object to be allocated.
+ ptr: Cell<*mut u8>,
+
+ /// A pointer to the end of the allocated area. When this pointer is
+ /// reached, a new chunk is allocated.
+ end: Cell<*mut u8>,
+
+ /// A vector of arena chunks.
+ chunks: RefCell<Vec<TypedArenaChunk<u8>>>,
+}
+
+impl DroplessArena {
+ pub fn new() -> DroplessArena {
+ DroplessArena {
+ ptr: Cell::new(0 as *mut u8),
+ end: Cell::new(0 as *mut u8),
+ chunks: RefCell::new(vec![]),
+ }
+ }
+
+ pub fn in_arena<T: ?Sized>(&self, ptr: *const T) -> bool {
+ let ptr = ptr as *const u8 as *mut u8;
+ for chunk in &*self.chunks.borrow() {
+ if chunk.start() <= ptr && ptr < chunk.end() {
+ return true;
+ }
+ }
+
+ false
+ }
+
+ fn align_for<T>(&self) {
+ let align = mem::align_of::<T>();
+ let final_address = ((self.ptr.get() as usize) + align - 1) & !(align - 1);
+ self.ptr.set(final_address as *mut u8);
+ assert!(self.ptr <= self.end);
+ }
+
+ #[inline(never)]
+ #[cold]
+ fn grow<T>(&self, n: usize) {
+ let needed_bytes = n * mem::size_of::<T>();
+ unsafe {
+ let mut chunks = self.chunks.borrow_mut();
+ let (chunk, mut new_capacity);
+ if let Some(last_chunk) = chunks.last_mut() {
+ let used_bytes = self.ptr.get() as usize - last_chunk.start() as usize;
+ if last_chunk.storage.reserve_in_place(used_bytes, needed_bytes) {
+ self.end.set(last_chunk.end());
+ return;
+ } else {
+ new_capacity = last_chunk.storage.cap();
+ loop {
+ new_capacity = new_capacity.checked_mul(2).unwrap();
+ if new_capacity >= used_bytes + needed_bytes {
+ break;
+ }
+ }
+ }
+ } else {
+ new_capacity = cmp::max(needed_bytes, PAGE);
+ }
+ chunk = TypedArenaChunk::<u8>::new(new_capacity);
+ self.ptr.set(chunk.start());
+ self.end.set(chunk.end());
+ chunks.push(chunk);
+ }
+ }
+
+ #[inline]
+ pub fn alloc<T>(&self, object: T) -> &mut T {
+ unsafe {
+ assert!(!intrinsics::needs_drop::<T>());
+ assert!(mem::size_of::<T>() != 0);
+
+ self.align_for::<T>();
+ let future_end = intrinsics::arith_offset(self.ptr.get(), mem::size_of::<T>() as isize);
+ if (future_end as *mut u8) >= self.end.get() {
+ self.grow::<T>(1)
+ }
+
+ let ptr = self.ptr.get();
+ // Set the pointer past ourselves
+ self.ptr.set(intrinsics::arith_offset(
+ self.ptr.get(), mem::size_of::<T>() as isize
+ ) as *mut u8);
+ // Write into uninitialized memory.
+ ptr::write(ptr as *mut T, object);
+ &mut *(ptr as *mut T)
+ }
+ }
+
+ /// Allocates a slice of objects that are copied into the `DroplessArena`, returning a mutable
+ /// reference to it. Will panic if passed a zero-sized type.
+ ///
+ /// Panics:
+ /// - Zero-sized types
+ /// - Zero-length slices
+ #[inline]
+ pub fn alloc_slice<T>(&self, slice: &[T]) -> &mut [T]
+ where T: Copy {
+ unsafe {
+ assert!(!intrinsics::needs_drop::<T>());
+ }
+ assert!(mem::size_of::<T>() != 0);
+ assert!(slice.len() != 0);
+ self.align_for::<T>();
+
+ let future_end = unsafe {
+ intrinsics::arith_offset(self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize)
+ };
+ if (future_end as *mut u8) >= self.end.get() {
+ self.grow::<T>(slice.len());
+ }
+
+ unsafe {
+ let arena_slice = slice::from_raw_parts_mut(self.ptr.get() as *mut T, slice.len());
+ self.ptr.set(intrinsics::arith_offset(
+ self.ptr.get(), (slice.len() * mem::size_of::<T>()) as isize
+ ) as *mut u8);
+ arena_slice.copy_from_slice(slice);
+ arena_slice
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
extern crate test;
use marker;
use usize;
-use super::FusedIterator;
+use super::{FusedIterator, TrustedLen};
/// An iterator that repeats an element endlessly.
///
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T> TrustedLen for Empty<T> {}
+
#[unstable(feature = "fused", issue = "35602")]
impl<T> FusedIterator for Empty<T> {}
}
}
+#[unstable(feature = "trusted_len", issue = "37572")]
+unsafe impl<T> TrustedLen for Once<T> {}
+
#[unstable(feature = "fused", issue = "35602")]
impl<T> FusedIterator for Once<T> {}
/// An iterator over subslices separated by elements that match a predicate
/// function.
+///
+/// This struct is created by the [`split`] method on [slices].
+///
+/// [`split`]: ../../std/primitive.slice.html#method.split
+/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Split<'a, T:'a, P> where P: FnMut(&T) -> bool {
v: &'a [T],
/// An iterator over the subslices of the vector which are separated
/// by elements that match `pred`.
+///
+/// This struct is created by the [`split_mut`] method on [slices].
+///
+/// [`split_mut`]: ../../std/primitive.slice.html#method.split_mut
+/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitMut<'a, T:'a, P> where P: FnMut(&T) -> bool {
v: &'a mut [T],
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
+///
+/// This struct is created by the [`splitn`] method on [slices].
+///
+/// [`splitn`]: ../../std/primitive.slice.html#method.splitn
+/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<Split<'a, T, P>>
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
+///
+/// This struct is created by the [`rsplitn`] method on [slices].
+///
+/// [`rsplitn`]: ../../std/primitive.slice.html#method.rsplitn
+/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitN<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<Split<'a, T, P>>
/// An iterator over subslices separated by elements that match a predicate
/// function, limited to a given number of splits.
+///
+/// This struct is created by the [`splitn_mut`] method on [slices].
+///
+/// [`splitn_mut`]: ../../std/primitive.slice.html#method.splitn_mut
+/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<SplitMut<'a, T, P>>
/// An iterator over subslices separated by elements that match a
/// predicate function, limited to a given number of splits, starting
/// from the end of the slice.
+///
+/// This struct is created by the [`rsplitn_mut`] method on [slices].
+///
+/// [`rsplitn_mut`]: ../../std/primitive.slice.html#method.rsplitn_mut
+/// [slices]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RSplitNMut<'a, T: 'a, P> where P: FnMut(&T) -> bool {
inner: GenericSplitN<SplitMut<'a, T, P>>
forward_iterator! { RSplitNMut: T, &'a mut [T] }
/// An iterator over overlapping subslices of length `size`.
+///
+/// This struct is created by the [`windows`] method on [slices].
+///
+/// [`windows`]: ../../std/primitive.slice.html#method.windows
+/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Windows<'a, T:'a> {
///
/// When the slice len is not evenly divided by the chunk size, the last slice
/// of the iteration will be the remainder.
+///
+/// This struct is created by the [`chunks`] method on [slices].
+///
+/// [`chunks`]: ../../std/primitive.slice.html#method.chunks
+/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chunks<'a, T:'a> {
/// An iterator over a slice in (non-overlapping) mutable chunks (`size`
/// elements at a time). When the slice len is not evenly divided by the chunk
/// size, the last slice of the iteration will be the remainder.
+///
+/// This struct is created by the [`chunks_mut`] method on [slices].
+///
+/// [`chunks_mut`]: ../../std/primitive.slice.html#method.chunks_mut
+/// [slices]: ../../std/primitive.slice.html
#[derive(Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ChunksMut<'a, T:'a> {
use errors::DiagnosticBuilder;
use syntax_pos::{self, Span, DUMMY_SP};
use util::nodemap::{FxHashMap, FxHashSet, NodeMap};
+use arena::DroplessArena;
use self::combine::CombineFields;
use self::higher_ranked::HrMatchResult;
/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>).
pub struct InferCtxtBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
- arenas: ty::CtxtArenas<'tcx>,
+ arena: DroplessArena,
tables: Option<RefCell<ty::Tables<'tcx>>>,
param_env: Option<ty::ParameterEnvironment<'gcx>>,
projection_mode: Reveal,
-> InferCtxtBuilder<'a, 'gcx, 'tcx> {
InferCtxtBuilder {
global_tcx: self,
- arenas: ty::CtxtArenas::new(),
+ arena: DroplessArena::new(),
tables: tables.map(RefCell::new),
param_env: param_env,
projection_mode: projection_mode,
{
let InferCtxtBuilder {
global_tcx,
- ref arenas,
+ ref arena,
ref tables,
ref mut param_env,
projection_mode,
let param_env = param_env.take().unwrap_or_else(|| {
global_tcx.empty_parameter_environment()
});
- global_tcx.enter_local(arenas, |tcx| f(InferCtxt {
+ global_tcx.enter_local(arena, |tcx| f(InferCtxt {
tcx: tcx,
tables: tables,
projection_cache: RefCell::new(traits::ProjectionCache::new()),
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
-use arena::TypedArena;
+use arena::{TypedArena, DroplessArena};
use std::borrow::Borrow;
use std::cell::{Cell, RefCell};
use std::hash::{Hash, Hasher};
use hir;
/// Internal storage
-pub struct CtxtArenas<'tcx> {
+pub struct GlobalArenas<'tcx> {
// internings
- type_: TypedArena<TyS<'tcx>>,
- type_list: TypedArena<Ty<'tcx>>,
- substs: TypedArena<Kind<'tcx>>,
- bare_fn: TypedArena<BareFnTy<'tcx>>,
- region: TypedArena<Region>,
- stability: TypedArena<attr::Stability>,
layout: TypedArena<Layout>,
- existential_predicates: TypedArena<ExistentialPredicate<'tcx>>,
// references
generics: TypedArena<ty::Generics<'tcx>>,
mir: TypedArena<RefCell<Mir<'tcx>>>,
}
-impl<'tcx> CtxtArenas<'tcx> {
- pub fn new() -> CtxtArenas<'tcx> {
- CtxtArenas {
- type_: TypedArena::new(),
- type_list: TypedArena::new(),
- substs: TypedArena::new(),
- bare_fn: TypedArena::new(),
- region: TypedArena::new(),
- stability: TypedArena::new(),
+impl<'tcx> GlobalArenas<'tcx> {
+ pub fn new() -> GlobalArenas<'tcx> {
+ GlobalArenas {
layout: TypedArena::new(),
- existential_predicates: TypedArena::new(),
-
generics: TypedArena::new(),
trait_def: TypedArena::new(),
adt_def: TypedArena::new(),
- mir: TypedArena::new()
+ mir: TypedArena::new(),
}
}
}
pub struct CtxtInterners<'tcx> {
- /// The arenas that types etc are allocated from.
- arenas: &'tcx CtxtArenas<'tcx>,
+ /// The arena that types, regions, etc are allocated from
+ arena: &'tcx DroplessArena,
/// Specifically use a speedy hash algorithm for these hash sets,
/// they're accessed quite often.
substs: RefCell<FxHashSet<Interned<'tcx, Substs<'tcx>>>>,
bare_fn: RefCell<FxHashSet<Interned<'tcx, BareFnTy<'tcx>>>>,
region: RefCell<FxHashSet<Interned<'tcx, Region>>>,
- stability: RefCell<FxHashSet<&'tcx attr::Stability>>,
- layout: RefCell<FxHashSet<&'tcx Layout>>,
existential_predicates: RefCell<FxHashSet<Interned<'tcx, Slice<ExistentialPredicate<'tcx>>>>>,
}
impl<'gcx: 'tcx, 'tcx> CtxtInterners<'tcx> {
- fn new(arenas: &'tcx CtxtArenas<'tcx>) -> CtxtInterners<'tcx> {
+ fn new(arena: &'tcx DroplessArena) -> CtxtInterners<'tcx> {
CtxtInterners {
- arenas: arenas,
+ arena: arena,
type_: RefCell::new(FxHashSet()),
type_list: RefCell::new(FxHashSet()),
substs: RefCell::new(FxHashSet()),
bare_fn: RefCell::new(FxHashSet()),
region: RefCell::new(FxHashSet()),
- stability: RefCell::new(FxHashSet()),
- layout: RefCell::new(FxHashSet()),
existential_predicates: RefCell::new(FxHashSet()),
}
}
let ty_struct: TyS<'gcx> = unsafe {
mem::transmute(ty_struct)
};
- let ty: Ty<'gcx> = interner.arenas.type_.alloc(ty_struct);
+ let ty: Ty<'gcx> = interner.arena.alloc(ty_struct);
global_interner.unwrap().insert(Interned(ty));
return ty;
}
}
// Don't be &mut TyS.
- let ty: Ty<'tcx> = self.arenas.type_.alloc(ty_struct);
+ let ty: Ty<'tcx> = self.arena.alloc(ty_struct);
interner.insert(Interned(ty));
ty
};
}
pub struct GlobalCtxt<'tcx> {
+ global_arenas: &'tcx GlobalArenas<'tcx>,
global_interners: CtxtInterners<'tcx>,
pub specializes_cache: RefCell<traits::SpecializesCache>,
/// Map from function to the `#[derive]` mode that it's defining. Only used
/// by `proc-macro` crates.
pub derive_macros: RefCell<NodeMap<Symbol>>,
+
+ stability_interner: RefCell<FxHashSet<&'tcx attr::Stability>>,
+
+ layout_interner: RefCell<FxHashSet<&'tcx Layout>>,
}
impl<'tcx> GlobalCtxt<'tcx> {
pub fn alloc_generics(self, generics: ty::Generics<'gcx>)
-> &'gcx ty::Generics<'gcx> {
- self.global_interners.arenas.generics.alloc(generics)
+ self.global_arenas.generics.alloc(generics)
}
pub fn alloc_mir(self, mir: Mir<'gcx>) -> &'gcx RefCell<Mir<'gcx>> {
- self.global_interners.arenas.mir.alloc(RefCell::new(mir))
+ self.global_arenas.mir.alloc(RefCell::new(mir))
}
pub fn alloc_trait_def(self, def: ty::TraitDef) -> &'gcx ty::TraitDef {
- self.global_interners.arenas.trait_def.alloc(def)
+ self.global_arenas.trait_def.alloc(def)
}
pub fn alloc_adt_def(self,
variants: Vec<ty::VariantDef>)
-> &'gcx ty::AdtDef {
let def = ty::AdtDef::new(self, did, kind, variants);
- self.global_interners.arenas.adt_def.alloc(def)
+ self.global_arenas.adt_def.alloc(def)
}
pub fn intern_stability(self, stab: attr::Stability) -> &'gcx attr::Stability {
- if let Some(st) = self.global_interners.stability.borrow().get(&stab) {
+ if let Some(st) = self.stability_interner.borrow().get(&stab) {
return st;
}
- let interned = self.global_interners.arenas.stability.alloc(stab);
- if let Some(prev) = self.global_interners.stability
- .borrow_mut()
- .replace(interned) {
+ let interned = self.global_interners.arena.alloc(stab);
+ if let Some(prev) = self.stability_interner.borrow_mut().replace(interned) {
bug!("Tried to overwrite interned Stability: {:?}", prev)
}
interned
}
pub fn intern_layout(self, layout: Layout) -> &'gcx Layout {
- if let Some(layout) = self.global_interners.layout.borrow().get(&layout) {
+ if let Some(layout) = self.layout_interner.borrow().get(&layout) {
return layout;
}
- let interned = self.global_interners.arenas.layout.alloc(layout);
- if let Some(prev) = self.global_interners.layout
- .borrow_mut()
- .replace(interned) {
+ let interned = self.global_arenas.layout.alloc(layout);
+ if let Some(prev) = self.layout_interner.borrow_mut().replace(interned) {
bug!("Tried to overwrite interned Layout: {:?}", prev)
}
interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
/// reference to the context, to allow formatting values that need it.
pub fn create_and_enter<F, R>(s: &'tcx Session,
- arenas: &'tcx CtxtArenas<'tcx>,
+ arenas: &'tcx GlobalArenas<'tcx>,
+ arena: &'tcx DroplessArena,
resolutions: ty::Resolutions,
named_region_map: resolve_lifetime::NamedRegionMap,
map: ast_map::Map<'tcx>,
region_maps: RegionMaps,
lang_items: middle::lang_items::LanguageItems,
stability: stability::Index<'tcx>,
- crate_name: &str,
+ crate_name: &str,
f: F) -> R
where F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'tcx>) -> R
{
let data_layout = TargetDataLayout::parse(s);
- let interners = CtxtInterners::new(arenas);
+ let interners = CtxtInterners::new(arena);
let common_types = CommonTypes::new(&interners);
let dep_graph = map.dep_graph.clone();
let fulfilled_predicates = traits::GlobalFulfilledPredicates::new(dep_graph.clone());
tls::enter_global(GlobalCtxt {
specializes_cache: RefCell::new(traits::SpecializesCache::new()),
+ global_arenas: arenas,
global_interners: interners,
dep_graph: dep_graph.clone(),
types: common_types,
crate_name: Symbol::intern(crate_name),
data_layout: data_layout,
layout_cache: RefCell::new(FxHashMap()),
+ layout_interner: RefCell::new(FxHashSet()),
layout_depth: Cell::new(0),
derive_macros: RefCell::new(NodeMap()),
+ stability_interner: RefCell::new(FxHashSet()),
}, f)
}
}
impl<'gcx: 'tcx, 'tcx> GlobalCtxt<'gcx> {
- /// Call the closure with a local `TyCtxt` using the given arenas.
- pub fn enter_local<F, R>(&self, arenas: &'tcx CtxtArenas<'tcx>, f: F) -> R
+ /// Call the closure with a local `TyCtxt` using the given arena.
+ pub fn enter_local<F, R>(&self, arena: &'tcx DroplessArena, f: F) -> R
where F: for<'a> FnOnce(TyCtxt<'a, 'gcx, 'tcx>) -> R
{
- let interners = CtxtInterners::new(arenas);
+ let interners = CtxtInterners::new(arena);
tls::enter(self, &interners, f)
}
}
impl<'a, 'tcx> Lift<'tcx> for Ty<'a> {
type Lifted = Ty<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Ty<'tcx>> {
- if let Some(&Interned(ty)) = tcx.interners.type_.borrow().get(&self.sty) {
- if *self as *const _ == ty as *const _ {
- return Some(ty);
- }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
if !tcx.is_global() {
if self.len() == 0 {
return Some(Slice::empty());
}
- if let Some(&Interned(substs)) = tcx.interners.substs.borrow().get(&self[..]) {
- if *self as *const _ == substs as *const _ {
- return Some(substs);
- }
+ if tcx.interners.arena.in_arena(&self[..] as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
if !tcx.is_global() {
impl<'a, 'tcx> Lift<'tcx> for &'a Region {
type Lifted = &'tcx Region;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<&'tcx Region> {
- if let Some(&Interned(region)) = tcx.interners.region.borrow().get(*self) {
- if *self as *const _ == region as *const _ {
- return Some(region);
- }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
if !tcx.is_global() {
if self.len() == 0 {
return Some(Slice::empty());
}
- if let Some(&Interned(list)) = tcx.interners.type_list.borrow().get(&self[..]) {
- if *self as *const _ == list as *const _ {
- return Some(list);
- }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
if !tcx.is_global() {
if self.is_empty() {
return Some(Slice::empty());
}
- if let Some(&Interned(eps)) = tcx.interners.existential_predicates.borrow().get(&self[..]) {
- if *self as *const _ == eps as *const _ {
- return Some(eps);
- }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
if !tcx.is_global() {
type Lifted = &'tcx BareFnTy<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>)
-> Option<&'tcx BareFnTy<'tcx>> {
- if let Some(&Interned(fty)) = tcx.interners.bare_fn.borrow().get(*self) {
- if *self as *const _ == fty as *const _ {
- return Some(fty);
- }
+ if tcx.interners.arena.in_arena(*self as *const _) {
+ return Some(unsafe { mem::transmute(*self) });
}
// Also try in the global tcx if we're not that.
if !tcx.is_global() {
println!("Substs interner: #{}", self.interners.substs.borrow().len());
println!("BareFnTy interner: #{}", self.interners.bare_fn.borrow().len());
println!("Region interner: #{}", self.interners.region.borrow().len());
- println!("Stability interner: #{}", self.interners.stability.borrow().len());
- println!("Layout interner: #{}", self.interners.layout.borrow().len());
+ println!("Stability interner: #{}", self.stability_interner.borrow().len());
+ println!("Layout interner: #{}", self.layout_interner.borrow().len());
}
}
let v = unsafe {
mem::transmute(v)
};
- let i = ($alloc_to_ret)(self.global_interners.arenas.$name
- .$alloc_method(v));
+ let i = ($alloc_to_ret)(self.global_interners.arena.$alloc_method(v));
self.global_interners.$name.borrow_mut().insert(Interned(i));
return i;
}
}
}
- let i = ($alloc_to_ret)(self.interners.arenas.$name.$alloc_method(v));
+ let i = ($alloc_to_ret)(self.interners.arena.$alloc_method(v));
self.interners.$name.borrow_mut().insert(Interned(i));
i
}
pub use self::sty::TypeVariants::*;
pub use self::contents::TypeContents;
-pub use self::context::{TyCtxt, tls};
-pub use self::context::{CtxtArenas, Lift, Tables};
+pub use self::context::{TyCtxt, GlobalArenas, tls};
+pub use self::context::{Lift, Tables};
pub use self::trait_def::{TraitDef, TraitFlags};
use rustc::lint;
use rustc::middle::{self, dependency_format, stability, reachable};
use rustc::middle::privacy::AccessLevels;
-use rustc::ty::{self, TyCtxt, Resolutions};
+use rustc::ty::{self, TyCtxt, Resolutions, GlobalArenas};
use rustc::util::common::time;
use rustc::util::nodemap::{NodeSet, NodeMap};
use rustc_borrowck as borrowck;
use syntax::util::node_count::NodeCounter;
use syntax;
use syntax_ext;
+use arena::DroplessArena;
use derive_registrar;
write_out_deps(sess, &outputs, &crate_name);
- let arenas = ty::CtxtArenas::new();
+ let arena = DroplessArena::new();
+ let arenas = GlobalArenas::new();
// Construct the HIR map
let hir_map = time(sess.time_passes(),
sess,
outdir,
output,
+ &arena,
&arenas,
&cstore,
&hir_map,
hir_map,
analysis,
resolutions,
+ &arena,
&arenas,
&crate_name,
|tcx, analysis, incremental_hashes_map, result| {
pub output_filenames: Option<&'a OutputFilenames>,
pub out_dir: Option<&'a Path>,
pub out_file: Option<&'a Path>,
- pub arenas: Option<&'tcx ty::CtxtArenas<'tcx>>,
+ pub arena: Option<&'tcx DroplessArena>,
+ pub arenas: Option<&'tcx GlobalArenas<'tcx>>,
pub expanded_crate: Option<&'a ast::Crate>,
pub hir_crate: Option<&'a hir::Crate>,
pub ast_map: Option<&'a hir_map::Map<'tcx>>,
session: session,
out_dir: out_dir.as_ref().map(|s| &**s),
out_file: None,
+ arena: None,
arenas: None,
krate: None,
registry: None,
session: &'tcx Session,
out_dir: &'a Option<PathBuf>,
out_file: &'a Option<PathBuf>,
- arenas: &'tcx ty::CtxtArenas<'tcx>,
+ arena: &'tcx DroplessArena,
+ arenas: &'tcx GlobalArenas<'tcx>,
cstore: &'a CStore,
hir_map: &'a hir_map::Map<'tcx>,
analysis: &'a ty::CrateAnalysis<'static>,
-> Self {
CompileState {
crate_name: Some(crate_name),
+ arena: Some(arena),
arenas: Some(arenas),
cstore: Some(cstore),
ast_map: Some(hir_map),
hir_map: hir_map::Map<'tcx>,
mut analysis: ty::CrateAnalysis<'tcx>,
resolutions: Resolutions,
- arenas: &'tcx ty::CtxtArenas<'tcx>,
+ arena: &'tcx DroplessArena,
+ arenas: &'tcx GlobalArenas<'tcx>,
name: &str,
f: F)
-> Result<R, usize>
TyCtxt::create_and_enter(sess,
arenas,
+ arena,
resolutions,
named_region_map,
hir_map,
&state.expanded_crate.take().unwrap(),
state.crate_name.unwrap(),
ppm,
+ state.arena.unwrap(),
state.arenas.unwrap(),
opt_uii.clone(),
state.out_file);
use {abort_on_err, driver};
-use rustc::ty::{self, TyCtxt, Resolutions};
+use rustc::ty::{self, TyCtxt, GlobalArenas, Resolutions};
use rustc::cfg;
use rustc::cfg::graphviz::LabelledCFG;
use rustc::dep_graph::DepGraph;
use rustc::hir;
use rustc::hir::print as pprust_hir;
+use arena::DroplessArena;
+
#[derive(Copy, Clone, PartialEq, Debug)]
pub enum PpSourceMode {
PpmNormal,
ast_map: &hir_map::Map<'tcx>,
analysis: &ty::CrateAnalysis<'tcx>,
resolutions: &Resolutions,
- arenas: &'tcx ty::CtxtArenas<'tcx>,
+ arena: &'tcx DroplessArena,
+ arenas: &'tcx GlobalArenas<'tcx>,
id: &str,
payload: B,
f: F)
ast_map.clone(),
analysis.clone(),
resolutions.clone(),
+ arena,
arenas,
id,
|tcx, _, _, _| {
krate: &ast::Crate,
crate_name: &str,
ppm: PpMode,
- arenas: &'tcx ty::CtxtArenas<'tcx>,
+ arena: &'tcx DroplessArena,
+ arenas: &'tcx GlobalArenas<'tcx>,
opt_uii: Option<UserIdentifiedItem>,
ofile: Option<&Path>) {
let dep_graph = DepGraph::new(false);
analysis,
resolutions,
crate_name,
+ arena,
arenas,
ppm,
opt_uii,
ast_map,
analysis,
resolutions,
+ arena,
arenas,
crate_name,
box out,
ast_map,
analysis,
resolutions,
+ arena,
arenas,
crate_name,
(out, uii),
analysis: &ty::CrateAnalysis<'tcx>,
resolutions: &Resolutions,
crate_name: &str,
- arenas: &'tcx ty::CtxtArenas<'tcx>,
+ arena: &'tcx DroplessArena,
+ arenas: &'tcx GlobalArenas<'tcx>,
ppm: PpMode,
uii: Option<UserIdentifiedItem>,
ofile: Option<&Path>) {
ast_map.clone(),
analysis.clone(),
resolutions.clone(),
+ arena,
arenas,
crate_name,
|tcx, _, _, _| {
use syntax::feature_gate::UnstableFeatures;
use syntax::symbol::Symbol;
use syntax_pos::DUMMY_SP;
+use arena::DroplessArena;
use rustc::hir;
};
let _ignore = dep_graph.in_ignore();
- let arenas = ty::CtxtArenas::new();
+ let arena = DroplessArena::new();
+ let arenas = ty::GlobalArenas::new();
let ast_map = hir_map::map_crate(&mut hir_forest, defs);
// run just enough stuff to build a tcx:
let index = stability::Index::new(&ast_map);
TyCtxt::create_and_enter(&sess,
&arenas,
+ &arena,
resolutions,
named_region_map.unwrap(),
ast_map,
use rustc::hir::def_id::DefId;
use rustc::hir::def::{Def, ExportMap};
use rustc::middle::privacy::AccessLevels;
-use rustc::ty::{self, TyCtxt, Ty};
+use rustc::ty::{self, TyCtxt, GlobalArenas, Ty};
use rustc::hir::map as hir_map;
use rustc::lint;
use rustc::util::nodemap::{FxHashMap, NodeMap};
use clean;
use clean::Clean;
use html::render::RenderInfo;
+use arena::DroplessArena;
pub use rustc::session::config::Input;
pub use rustc::session::search_paths::SearchPaths;
).expect("phase_2_configure_and_expand aborted in rustdoc!")
};
- let arenas = ty::CtxtArenas::new();
+ let arena = DroplessArena::new();
+ let arenas = GlobalArenas::new();
let hir_map = hir_map::map_crate(&mut hir_forest, defs);
abort_on_err(driver::phase_3_run_analysis_passes(&sess,
hir_map,
analysis,
resolutions,
+ &arena,
&arenas,
&name,
|tcx, analysis, _, result| {