1 //! An interpreter for MIR used in CTFE and by miri
6 Err($crate::mir::interpret::InterpError::Unsupported(
7 $crate::mir::interpret::UnsupportedInfo::$($tt)*
13 macro_rules! err_inval {
15 Err($crate::mir::interpret::InterpError::InvalidProgram(
16 $crate::mir::interpret::InvalidProgramInfo::$($tt)*
24 Err($crate::mir::interpret::InterpError::UndefinedBehaviour(
25 $crate::mir::interpret::UndefinedBehaviourInfo::$($tt)*
31 macro_rules! err_panic {
33 Err($crate::mir::interpret::InterpError::Panic(
34 $crate::mir::interpret::PanicMessage::$($tt)*
40 macro_rules! err_exhaust {
42 Err($crate::mir::interpret::InterpError::ResourceExhaustion(
43 $crate::mir::interpret::ResourceExhaustionInfo::$($tt)*
53 pub use self::error::{
54 InterpErrorInfo, InterpResult, InterpError, AssertMessage, ConstEvalErr, struct_error,
55 FrameInfo, ConstEvalRawResult, ConstEvalResult, ErrorHandled, PanicMessage, UnsupportedInfo,
56 InvalidProgramInfo, ResourceExhaustionInfo, UndefinedBehaviourInfo,
59 pub use self::value::{Scalar, ScalarMaybeUndef, RawConst, ConstValue};
61 pub use self::allocation::{Allocation, AllocationExtra, Relocations, UndefMask};
63 pub use self::pointer::{Pointer, PointerArithmetic, CheckInAllocMsg};
67 use crate::hir::def_id::DefId;
68 use crate::ty::{self, TyCtxt, Instance, subst::UnpackedKind};
69 use crate::ty::layout::{self, Size};
71 use rustc_serialize::{Encoder, Decodable, Encodable};
72 use rustc_data_structures::fx::FxHashMap;
73 use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
74 use rustc_data_structures::tiny_list::TinyList;
75 use rustc_macros::HashStable;
76 use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian};
77 use crate::ty::codec::TyDecoder;
78 use std::sync::atomic::{AtomicU32, Ordering};
79 use std::num::NonZeroU32;
81 /// Uniquely identifies a specific constant or static.
82 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, RustcEncodable, RustcDecodable, HashStable)]
83 pub struct GlobalId<'tcx> {
84 /// For a constant or static, the `Instance` of the item itself.
85 /// For a promoted global, the `Instance` of the function they belong to.
86 pub instance: ty::Instance<'tcx>,
88 /// The index for promoted globals within their function's `mir::Body`.
89 pub promoted: Option<mir::Promoted>,
92 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
93 pub struct AllocId(pub u64);
95 impl rustc_serialize::UseSpecializedEncodable for AllocId {}
96 impl rustc_serialize::UseSpecializedDecodable for AllocId {}
98 #[derive(RustcDecodable, RustcEncodable)]
99 enum AllocDiscriminant {
105 pub fn specialized_encode_alloc_id<'tcx, E: Encoder>(
109 ) -> Result<(), E::Error> {
110 let alloc: GlobalAlloc<'tcx> =
111 tcx.alloc_map.lock().get(alloc_id).expect("no value for AllocId");
113 GlobalAlloc::Memory(alloc) => {
114 trace!("encoding {:?} with {:#?}", alloc_id, alloc);
115 AllocDiscriminant::Alloc.encode(encoder)?;
116 alloc.encode(encoder)?;
118 GlobalAlloc::Function(fn_instance) => {
119 trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
120 AllocDiscriminant::Fn.encode(encoder)?;
121 fn_instance.encode(encoder)?;
123 GlobalAlloc::Static(did) => {
124 // referring to statics doesn't need to know about their allocations,
125 // just about its DefId
126 AllocDiscriminant::Static.encode(encoder)?;
127 did.encode(encoder)?;
133 // Used to avoid infinite recursion when decoding cyclic allocations.
134 type DecodingSessionId = NonZeroU32;
139 InProgressNonAlloc(TinyList<DecodingSessionId>),
140 InProgress(TinyList<DecodingSessionId>, AllocId),
144 pub struct AllocDecodingState {
145 // For each AllocId we keep track of which decoding state it's currently in.
146 decoding_state: Vec<Mutex<State>>,
147 // The offsets of each allocation in the data stream.
148 data_offsets: Vec<u32>,
151 impl AllocDecodingState {
153 pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
154 static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
155 let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
157 // Make sure this is never zero
158 let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
160 AllocDecodingSession {
166 pub fn new(data_offsets: Vec<u32>) -> AllocDecodingState {
167 let decoding_state = vec![Mutex::new(State::Empty); data_offsets.len()];
176 #[derive(Copy, Clone)]
177 pub struct AllocDecodingSession<'s> {
178 state: &'s AllocDecodingState,
179 session_id: DecodingSessionId,
182 impl<'s> AllocDecodingSession<'s> {
183 // Decodes an AllocId in a thread-safe way.
184 pub fn decode_alloc_id<D>(&self, decoder: &mut D) -> Result<AllocId, D::Error>
188 // Read the index of the allocation
189 let idx = decoder.read_u32()? as usize;
190 let pos = self.state.data_offsets[idx] as usize;
192 // Decode the AllocDiscriminant now so that we know if we have to reserve an
194 let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
195 let alloc_kind = AllocDiscriminant::decode(decoder)?;
196 Ok((alloc_kind, decoder.position()))
199 // Check the decoding state, see if it's already decoded or if we should
202 let mut entry = self.state.decoding_state[idx].lock();
205 State::Done(alloc_id) => {
208 ref mut entry @ State::Empty => {
209 // We are allowed to decode
211 AllocDiscriminant::Alloc => {
212 // If this is an allocation, we need to reserve an
213 // AllocId so we can decode cyclic graphs.
214 let alloc_id = decoder.tcx().alloc_map.lock().reserve();
215 *entry = State::InProgress(
216 TinyList::new_single(self.session_id),
220 AllocDiscriminant::Fn | AllocDiscriminant::Static => {
221 // Fns and statics cannot be cyclic and their AllocId
222 // is determined later by interning
223 *entry = State::InProgressNonAlloc(
224 TinyList::new_single(self.session_id));
229 State::InProgressNonAlloc(ref mut sessions) => {
230 if sessions.contains(&self.session_id) {
231 bug!("This should be unreachable")
233 // Start decoding concurrently
234 sessions.insert(self.session_id);
238 State::InProgress(ref mut sessions, alloc_id) => {
239 if sessions.contains(&self.session_id) {
243 // Start decoding concurrently
244 sessions.insert(self.session_id);
251 // Now decode the actual data
252 let alloc_id = decoder.with_position(pos, |decoder| {
254 AllocDiscriminant::Alloc => {
255 let allocation = <&'tcx Allocation as Decodable>::decode(decoder)?;
256 // We already have a reserved AllocId.
257 let alloc_id = alloc_id.unwrap();
258 trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
259 decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, allocation);
262 AllocDiscriminant::Fn => {
263 assert!(alloc_id.is_none());
264 trace!("creating fn alloc id");
265 let instance = ty::Instance::decode(decoder)?;
266 trace!("decoded fn alloc instance: {:?}", instance);
267 let alloc_id = decoder.tcx().alloc_map.lock().create_fn_alloc(instance);
270 AllocDiscriminant::Static => {
271 assert!(alloc_id.is_none());
272 trace!("creating extern static alloc id at");
273 let did = DefId::decode(decoder)?;
274 let alloc_id = decoder.tcx().alloc_map.lock().create_static_alloc(did);
280 self.state.decoding_state[idx].with_lock(|entry| {
281 *entry = State::Done(alloc_id);
288 impl fmt::Display for AllocId {
289 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
290 write!(f, "{}", self.0)
294 /// An allocation in the global (tcx-managed) memory can be either a function pointer,
295 /// a static, or a "real" allocation with some data in it.
296 #[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable, HashStable)]
297 pub enum GlobalAlloc<'tcx> {
298 /// The alloc ID is used as a function pointer
299 Function(Instance<'tcx>),
300 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
301 /// This is also used to break the cycle in recursive statics.
303 /// The alloc ID points to memory.
304 Memory(&'tcx Allocation),
307 pub struct AllocMap<'tcx> {
308 /// Lets you know what an `AllocId` refers to.
309 alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
311 /// Used to ensure that statics and functions only get one associated `AllocId`.
312 /// Should never contain a `GlobalAlloc::Memory`!
313 /// FIXME: Should we just have two separate dedup maps for statics and functions each?
314 dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
316 /// The `AllocId` to assign to the next requested ID.
317 /// Always incremented, never gets smaller.
321 impl<'tcx> AllocMap<'tcx> {
322 pub fn new() -> Self {
324 alloc_map: Default::default(),
325 dedup: Default::default(),
330 /// Obtains a new allocation ID that can be referenced but does not
331 /// yet have an allocation backing it.
333 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
334 /// an `AllocId` from a query.
338 let next = self.next_id;
339 self.next_id.0 = self.next_id.0
341 .expect("You overflowed a u64 by incrementing by 1... \
342 You've just earned yourself a free drink if we ever meet. \
343 Seriously, how did you do that?!");
347 /// Reserve a new ID *if* this allocation has not been dedup-reserved before.
348 /// Should only be used for function pointers and statics, we don't want
349 /// to dedup IDs for "real" memory!
350 fn reserve_and_set_dedup(&mut self, alloc: GlobalAlloc<'tcx>) -> AllocId {
352 GlobalAlloc::Function(..) | GlobalAlloc::Static(..) => {},
353 GlobalAlloc::Memory(..) => bug!("Trying to dedup-reserve memory with real data!"),
355 if let Some(&alloc_id) = self.dedup.get(&alloc) {
358 let id = self.reserve();
359 debug!("creating alloc {:?} with id {}", alloc, id);
360 self.alloc_map.insert(id, alloc.clone());
361 self.dedup.insert(alloc, id);
365 /// Generates an `AllocId` for a static or return a cached one in case this function has been
366 /// called on the same static before.
367 pub fn create_static_alloc(&mut self, static_id: DefId) -> AllocId {
368 self.reserve_and_set_dedup(GlobalAlloc::Static(static_id))
371 /// Generates an `AllocId` for a function. Depending on the function type,
372 /// this might get deduplicated or assigned a new ID each time.
373 pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> AllocId {
374 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
375 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
376 // duplicated across crates.
377 // We thus generate a new `AllocId` for every mention of a function. This means that
378 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
379 // However, formatting code relies on function identity (see #58320), so we only do
380 // this for generic functions. Lifetime parameters are ignored.
381 let is_generic = instance.substs.into_iter().any(|kind| {
382 match kind.unpack() {
383 UnpackedKind::Lifetime(_) => false,
389 let id = self.reserve();
390 self.alloc_map.insert(id, GlobalAlloc::Function(instance));
394 self.reserve_and_set_dedup(GlobalAlloc::Function(instance))
398 /// Intern the `Allocation` and return a new `AllocId`, even if there's already an identical
399 /// `Allocation` with a different `AllocId`.
400 /// Statics with identical content will still point to the same `Allocation`, i.e.,
401 /// their data will be deduplicated through `Allocation` interning -- but they
402 /// are different places in memory and as such need different IDs.
403 pub fn create_memory_alloc(&mut self, mem: &'tcx Allocation) -> AllocId {
404 let id = self.reserve();
405 self.set_alloc_id_memory(id, mem);
409 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
410 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
411 /// illegal and will likely ICE.
412 /// This function exists to allow const eval to detect the difference between evaluation-
413 /// local dangling pointers and allocations in constants/statics.
415 pub fn get(&self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
416 self.alloc_map.get(&id).cloned()
419 /// Panics if the `AllocId` does not refer to an `Allocation`
420 pub fn unwrap_memory(&self, id: AllocId) -> &'tcx Allocation {
422 Some(GlobalAlloc::Memory(mem)) => mem,
423 _ => bug!("expected allocation id {} to point to memory", id),
427 /// Freeze an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
428 /// call this function twice, even with the same `Allocation` will ICE the compiler.
429 pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
430 if let Some(old) = self.alloc_map.insert(id, GlobalAlloc::Memory(mem)) {
431 bug!("tried to set allocation id {}, but it was already existing as {:#?}", id, old);
435 /// Freeze an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
436 /// twice for the same `(AllocId, Allocation)` pair.
437 fn set_alloc_id_same_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
438 self.alloc_map.insert_same(id, GlobalAlloc::Memory(mem));
442 ////////////////////////////////////////////////////////////////////////////////
443 // Methods to access integers in the target endianness
444 ////////////////////////////////////////////////////////////////////////////////
447 pub fn write_target_uint(
448 endianness: layout::Endian,
449 mut target: &mut [u8],
451 ) -> Result<(), io::Error> {
452 let len = target.len();
454 layout::Endian::Little => target.write_uint128::<LittleEndian>(data, len),
455 layout::Endian::Big => target.write_uint128::<BigEndian>(data, len),
460 pub fn read_target_uint(endianness: layout::Endian, mut source: &[u8]) -> Result<u128, io::Error> {
462 layout::Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
463 layout::Endian::Big => source.read_uint128::<BigEndian>(source.len()),
467 ////////////////////////////////////////////////////////////////////////////////
468 // Methods to facilitate working with signed integers stored in a u128
469 ////////////////////////////////////////////////////////////////////////////////
471 /// Truncate `value` to `size` bits and then sign-extend it to 128 bits
472 /// (i.e., if it is negative, fill with 1's on the left).
474 pub fn sign_extend(value: u128, size: Size) -> u128 {
475 let size = size.bits();
477 // Truncated until nothing is left.
481 let shift = 128 - size;
482 // shift the unsigned value to the left
483 // and back to the right as signed (essentially fills with FF on the left)
484 (((value << shift) as i128) >> shift) as u128
487 /// Truncate `value` to `size` bits.
489 pub fn truncate(value: u128, size: Size) -> u128 {
490 let size = size.bits();
492 // Truncated until nothing is left.
495 let shift = 128 - size;
496 // truncate (shift left to drop out leftover values, shift right to fill with zeroes)
497 (value << shift) >> shift