allocs: FxHashSet<&'tcx interpret::Allocation>,
/// Allows obtaining function instance handles via a unique identifier
- functions: FxHashMap<u64, Instance<'tcx>>,
+ functions: FxHashMap<interpret::AllocId, Instance<'tcx>>,
/// Inverse map of `interpret_functions`.
/// Used so we don't allocate a new pointer every time we need one
- function_cache: FxHashMap<Instance<'tcx>, u64>,
+ function_cache: FxHashMap<Instance<'tcx>, interpret::AllocId>,
/// Allows obtaining const allocs via a unique identifier
- alloc_by_id: FxHashMap<u64, &'tcx interpret::Allocation>,
+ alloc_by_id: FxHashMap<interpret::AllocId, &'tcx interpret::Allocation>,
/// The AllocId to assign to the next new regular allocation.
/// Always incremented, never gets smaller.
- next_id: u64,
+ next_id: interpret::AllocId,
/// Allows checking whether a constant already has an allocation
- ///
- /// The pointers are to the beginning of an `alloc_by_id` allocation
- alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::Pointer>,
+ alloc_cache: FxHashMap<interpret::GlobalId<'tcx>, interpret::AllocId>,
/// A cache for basic byte allocations keyed by their contents. This is used to deduplicate
/// allocations for string and bytestring literals.
- literal_alloc_cache: FxHashMap<Vec<u8>, u64>,
+ literal_alloc_cache: FxHashMap<Vec<u8>, interpret::AllocId>,
}
impl<'tcx> InterpretInterner<'tcx> {
- pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> u64 {
+ pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> interpret::AllocId {
if let Some(&alloc_id) = self.function_cache.get(&instance) {
return alloc_id;
}
pub fn get_fn(
&self,
- id: u64,
+ id: interpret::AllocId,
) -> Option<Instance<'tcx>> {
self.functions.get(&id).cloned()
}
pub fn get_alloc(
&self,
- id: u64,
+ id: interpret::AllocId,
) -> Option<&'tcx interpret::Allocation> {
self.alloc_by_id.get(&id).cloned()
}
pub fn get_cached(
&self,
global_id: interpret::GlobalId<'tcx>,
- ) -> Option<interpret::Pointer> {
+ ) -> Option<interpret::AllocId> {
self.alloc_cache.get(&global_id).cloned()
}
pub fn cache(
&mut self,
global_id: interpret::GlobalId<'tcx>,
- ptr: interpret::Pointer,
+ ptr: interpret::AllocId,
) {
if let Some(old) = self.alloc_cache.insert(global_id, ptr) {
bug!("tried to cache {:?}, but was already existing as {:#?}", global_id, old);
pub fn intern_at_reserved(
&mut self,
- id: u64,
+ id: interpret::AllocId,
alloc: &'tcx interpret::Allocation,
) {
if let Some(old) = self.alloc_by_id.insert(id, alloc) {
/// yet have an allocation backing it.
pub fn reserve(
&mut self,
- ) -> u64 {
+ ) -> interpret::AllocId {
let next = self.next_id;
- self.next_id = self.next_id
+ self.next_id.0 = self.next_id.0
.checked_add(1)
.expect("You overflowed a u64 by incrementing by 1... \
You've just earned yourself a free drink if we ever meet. \
}
/// Allocates a byte or string literal for `mir::interpret`
- pub fn allocate_cached(self, bytes: &[u8]) -> u64 {
+ pub fn allocate_cached(self, bytes: &[u8]) -> interpret::AllocId {
// check whether we already allocated this literal or a constant with the same memory
if let Some(&alloc_id) = self.interpret_interner.borrow().literal_alloc_cache.get(bytes) {
return alloc_id;
pub data: M::MemoryData,
/// Helps guarantee that stack allocations aren't deallocated via `rust_deallocate`
- alloc_kind: HashMap<u64, MemoryKind<M::MemoryKinds>>,
+ alloc_kind: HashMap<AllocId, MemoryKind<M::MemoryKinds>>,
/// Actual memory allocations (arbitrary bytes, may contain pointers into other allocations).
- alloc_map: HashMap<u64, Allocation>,
+ alloc_map: HashMap<AllocId, Allocation>,
/// Actual memory allocations (arbitrary bytes, may contain pointers into other allocations).
///
/// Stores statics while they are being processed, before they are interned and thus frozen
- uninitialized_statics: HashMap<u64, Allocation>,
+ uninitialized_statics: HashMap<AllocId, Allocation>,
/// Number of virtual bytes allocated.
memory_usage: u64,
pub fn allocations<'x>(
&'x self,
) -> impl Iterator<Item = (AllocId, &'x Allocation)> {
- self.alloc_map.iter().map(|(&id, alloc)| (AllocId(id), alloc))
+ self.alloc_map.iter().map(|(&id, alloc)| (id, alloc))
}
pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> MemoryPointer {
let id = self.tcx.interpret_interner.borrow_mut().create_fn_alloc(instance);
- MemoryPointer::new(AllocId(id), 0)
+ MemoryPointer::new(id, 0)
}
pub fn allocate_cached(&mut self, bytes: &[u8]) -> MemoryPointer {
let id = self.tcx.allocate_cached(bytes);
- MemoryPointer::new(AllocId(id), 0)
+ MemoryPointer::new(id, 0)
}
/// kind is `None` for statics
},
Some(MemoryKind::MutableStatic) => bug!("don't allocate mutable statics directly")
}
- Ok(MemoryPointer::new(AllocId(id), 0))
+ Ok(MemoryPointer::new(id, 0))
}
pub fn reallocate(
if ptr.offset != 0 {
return err!(ReallocateNonBasePtr);
}
- if self.alloc_map.contains_key(&ptr.alloc_id.0) {
- let alloc_kind = self.alloc_kind[&ptr.alloc_id.0];
+ if self.alloc_map.contains_key(&ptr.alloc_id) {
+ let alloc_kind = self.alloc_kind[&ptr.alloc_id];
if alloc_kind != kind {
return err!(ReallocatedWrongMemoryKind(
format!("{:?}", alloc_kind),
}
pub fn deallocate_local(&mut self, ptr: MemoryPointer) -> EvalResult<'tcx> {
- match self.alloc_kind.get(&ptr.alloc_id.0).cloned() {
+ match self.alloc_kind.get(&ptr.alloc_id).cloned() {
// for a constant like `const FOO: &i32 = &1;` the local containing
// the `1` is referred to by the global. We transitively marked everything
// the global refers to as static itself, so we don't free it here
return err!(DeallocateNonBasePtr);
}
- let alloc = match self.alloc_map.remove(&ptr.alloc_id.0) {
+ let alloc = match self.alloc_map.remove(&ptr.alloc_id) {
Some(alloc) => alloc,
- None => if self.uninitialized_statics.contains_key(&ptr.alloc_id.0) {
+ None => if self.uninitialized_statics.contains_key(&ptr.alloc_id) {
return err!(DeallocatedWrongMemoryKind(
"uninitializedstatic".to_string(),
format!("{:?}", kind),
))
- } else if self.tcx.interpret_interner.borrow().get_fn(ptr.alloc_id.0).is_some() {
+ } else if self.tcx.interpret_interner.borrow().get_fn(ptr.alloc_id).is_some() {
return err!(DeallocatedWrongMemoryKind(
"function".to_string(),
format!("{:?}", kind),
))
- } else if self.tcx.interpret_interner.borrow().get_alloc(ptr.alloc_id.0).is_some() {
+ } else if self.tcx.interpret_interner.borrow().get_alloc(ptr.alloc_id).is_some() {
return err!(DeallocatedWrongMemoryKind(
"static".to_string(),
format!("{:?}", kind),
},
};
- let alloc_kind = self.alloc_kind.remove(&ptr.alloc_id.0).expect("alloc_map out of sync with alloc_kind");
+ let alloc_kind = self.alloc_kind.remove(&ptr.alloc_id).expect("alloc_map out of sync with alloc_kind");
// It is okay for us to still holds locks on deallocation -- for example, we could store data we own
// in a local, and the local could be deallocated (from StorageDead) before the function returns.
// However, we should check *something*. For now, we make sure that there is no conflicting write
// lock by another frame. We *have* to permit deallocation if we hold a read lock.
// TODO: Figure out the exact rules here.
- M::free_lock(self, ptr.alloc_id.0, alloc.bytes.len() as u64)?;
+ M::free_lock(self, ptr.alloc_id, alloc.bytes.len() as u64)?;
if alloc_kind != kind {
return err!(DeallocatedWrongMemoryKind(
impl<'a, 'tcx, M: Machine<'tcx>> Memory<'a, 'tcx, M> {
pub fn get(&self, id: AllocId) -> EvalResult<'tcx, &Allocation> {
// normal alloc?
- match self.alloc_map.get(&id.0) {
+ match self.alloc_map.get(&id) {
Some(alloc) => Ok(alloc),
// uninitialized static alloc?
- None => match self.uninitialized_statics.get(&id.0) {
+ None => match self.uninitialized_statics.get(&id) {
Some(alloc) => Ok(alloc),
None => {
let int = self.tcx.interpret_interner.borrow();
// static alloc?
- int.get_alloc(id.0)
+ int.get_alloc(id)
// no alloc? produce an error
- .ok_or_else(|| if int.get_fn(id.0).is_some() {
+ .ok_or_else(|| if int.get_fn(id).is_some() {
EvalErrorKind::DerefFunctionPointer.into()
} else {
EvalErrorKind::DanglingPointerDeref.into()
id: AllocId,
) -> EvalResult<'tcx, &mut Allocation> {
// normal alloc?
- match self.alloc_map.get_mut(&id.0) {
+ match self.alloc_map.get_mut(&id) {
Some(alloc) => Ok(alloc),
// uninitialized static alloc?
- None => match self.uninitialized_statics.get_mut(&id.0) {
+ None => match self.uninitialized_statics.get_mut(&id) {
Some(alloc) => Ok(alloc),
None => {
let int = self.tcx.interpret_interner.borrow();
// no alloc or immutable alloc? produce an error
- if int.get_alloc(id.0).is_some() {
+ if int.get_alloc(id).is_some() {
err!(ModifiedConstantMemory)
- } else if int.get_fn(id.0).is_some() {
+ } else if int.get_fn(id).is_some() {
err!(DerefFunctionPointer)
} else {
err!(DanglingPointerDeref)
self.tcx
.interpret_interner
.borrow()
- .get_fn(ptr.alloc_id.0)
+ .get_fn(ptr.alloc_id)
.ok_or(EvalErrorKind::ExecuteMemory.into())
}
let (alloc, immutable) =
// normal alloc?
- match self.alloc_map.get(&id.0) {
- Some(a) => (a, match self.alloc_kind[&id.0] {
+ match self.alloc_map.get(&id) {
+ Some(a) => (a, match self.alloc_kind[&id] {
MemoryKind::Stack => " (stack)".to_owned(),
MemoryKind::Machine(m) => format!(" ({:?})", m),
MemoryKind::MutableStatic => " (static mut)".to_owned(),
}),
// uninitialized static alloc?
- None => match self.uninitialized_statics.get(&id.0) {
+ None => match self.uninitialized_statics.get(&id) {
Some(a) => (a, " (static in the process of initialization)".to_owned()),
None => {
let int = self.tcx.interpret_interner.borrow();
// static alloc?
- match int.get_alloc(id.0) {
+ match int.get_alloc(id) {
Some(a) => (a, "(immutable)".to_owned()),
- None => if let Some(func) = int.get_fn(id.0) {
+ None => if let Some(func) = int.get_fn(id) {
trace!("{} {}", msg, func);
continue;
} else {
let leaks: Vec<_> = self.alloc_map
.keys()
.filter_map(|key| if kinds[key] != MemoryKind::MutableStatic {
- Some(AllocId(*key))
+ Some(*key)
} else {
None
})
alloc: AllocId,
mutability: Mutability,
) -> EvalResult<'tcx> {
- match self.alloc_kind.get(&alloc.0) {
+ match self.alloc_kind.get(&alloc) {
// do not go into immutable statics
None |
// or mutable statics
mutability
);
if mutability == Mutability::Immutable {
- let alloc = self.alloc_map.remove(&alloc_id.0);
- let kind = self.alloc_kind.remove(&alloc_id.0);
+ let alloc = self.alloc_map.remove(&alloc_id);
+ let kind = self.alloc_kind.remove(&alloc_id);
assert_ne!(kind, Some(MemoryKind::MutableStatic));
- let uninit = self.uninitialized_statics.remove(&alloc_id.0);
+ let uninit = self.uninitialized_statics.remove(&alloc_id);
if let Some(alloc) = alloc.or(uninit) {
let alloc = self.tcx.intern_const_alloc(alloc);
- self.tcx.interpret_interner.borrow_mut().intern_at_reserved(alloc_id.0, alloc);
+ self.tcx.interpret_interner.borrow_mut().intern_at_reserved(alloc_id, alloc);
// recurse into inner allocations
for &alloc in alloc.relocations.values() {
self.mark_inner_allocation_initialized(alloc, mutability)?;
return Ok(());
}
// We are marking the static as initialized, so move it out of the uninit map
- if let Some(uninit) = self.uninitialized_statics.remove(&alloc_id.0) {
- self.alloc_map.insert(alloc_id.0, uninit);
+ if let Some(uninit) = self.uninitialized_statics.remove(&alloc_id) {
+ self.alloc_map.insert(alloc_id, uninit);
}
// do not use `self.get_mut(alloc_id)` here, because we might have already marked a
// sub-element or have circular pointers (e.g. `Rc`-cycles)
- let relocations = match self.alloc_map.get_mut(&alloc_id.0) {
+ let relocations = match self.alloc_map.get_mut(&alloc_id) {
Some(&mut Allocation {
ref mut relocations,
..
}) => {
- match self.alloc_kind.get(&alloc_id.0) {
+ match self.alloc_kind.get(&alloc_id) {
// const eval results can refer to "locals".
// E.g. `const Foo: &u32 = &1;` refers to the temp local that stores the `1`
None |
},
}
// overwrite or insert
- self.alloc_kind.insert(alloc_id.0, MemoryKind::MutableStatic);
+ self.alloc_kind.insert(alloc_id, MemoryKind::MutableStatic);
// take out the relocations vector to free the borrow on self, so we can call
// mark recursively
mem::replace(relocations, Default::default())
}
// put back the relocations
self.alloc_map
- .get_mut(&alloc_id.0)
+ .get_mut(&alloc_id)
.expect("checked above")
.relocations = relocations;
Ok(())