1 //! An interpreter for MIR used in CTFE and by miri.
4 macro_rules! err_unsup {
6 $crate::mir::interpret::InterpError::Unsupported(
7 $crate::mir::interpret::UnsupportedOpInfo::$($tt)*
13 macro_rules! err_unsup_format {
14 ($($tt:tt)*) => { err_unsup!(Unsupported(format!($($tt)*))) };
18 macro_rules! err_inval {
20 $crate::mir::interpret::InterpError::InvalidProgram(
21 $crate::mir::interpret::InvalidProgramInfo::$($tt)*
29 $crate::mir::interpret::InterpError::UndefinedBehavior(
30 $crate::mir::interpret::UndefinedBehaviorInfo::$($tt)*
36 macro_rules! err_ub_format {
37 ($($tt:tt)*) => { err_ub!(Ub(format!($($tt)*))) };
41 macro_rules! err_panic {
43 $crate::mir::interpret::InterpError::Panic(
44 $crate::mir::interpret::PanicInfo::$($tt)*
50 macro_rules! err_exhaust {
52 $crate::mir::interpret::InterpError::ResourceExhaustion(
53 $crate::mir::interpret::ResourceExhaustionInfo::$($tt)*
59 macro_rules! throw_unsup {
60 ($($tt:tt)*) => { return Err(err_unsup!($($tt)*).into()) };
64 macro_rules! throw_unsup_format {
65 ($($tt:tt)*) => { throw_unsup!(Unsupported(format!($($tt)*))) };
69 macro_rules! throw_inval {
70 ($($tt:tt)*) => { return Err(err_inval!($($tt)*).into()) };
74 macro_rules! throw_ub {
75 ($($tt:tt)*) => { return Err(err_ub!($($tt)*).into()) };
79 macro_rules! throw_ub_format {
80 ($($tt:tt)*) => { throw_ub!(Ub(format!($($tt)*))) };
84 macro_rules! throw_panic {
85 ($($tt:tt)*) => { return Err(err_panic!($($tt)*).into()) };
89 macro_rules! throw_exhaust {
90 ($($tt:tt)*) => { return Err(err_exhaust!($($tt)*).into()) };
94 macro_rules! throw_machine_stop {
96 return Err($crate::mir::interpret::InterpError::MachineStop(Box::new($($tt)*)).into())
106 pub use self::error::{
107 struct_error, AssertMessage, ConstEvalErr, ConstEvalRawResult, ConstEvalResult, ErrorHandled,
108 FrameInfo, InterpError, InterpErrorInfo, InterpResult, InvalidProgramInfo, PanicInfo,
109 ResourceExhaustionInfo, UndefinedBehaviorInfo, UnsupportedOpInfo,
112 pub use self::value::{get_slice_bytes, ConstValue, RawConst, Scalar, ScalarMaybeUndef};
114 pub use self::allocation::{Allocation, AllocationExtra, Relocations, UndefMask};
116 pub use self::pointer::{CheckInAllocMsg, Pointer, PointerArithmetic};
119 use crate::ty::codec::TyDecoder;
120 use crate::ty::layout::{self, Size};
121 use crate::ty::subst::GenericArgKind;
122 use crate::ty::{self, Instance, Ty, TyCtxt};
123 use byteorder::{BigEndian, LittleEndian, ReadBytesExt, WriteBytesExt};
124 use rustc_data_structures::fx::FxHashMap;
125 use rustc_data_structures::sync::{HashMapExt, Lock};
126 use rustc_data_structures::tiny_list::TinyList;
127 use rustc_hir::def_id::DefId;
128 use rustc_macros::HashStable;
129 use rustc_serialize::{Decodable, Encodable, Encoder};
132 use std::num::NonZeroU32;
133 use std::sync::atomic::{AtomicU32, Ordering};
134 use syntax::ast::LitKind;
136 /// Uniquely identifies one of the following:
139 /// - A const fn where all arguments (if any) are zero-sized types
140 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, RustcEncodable, RustcDecodable)]
141 #[derive(HashStable, Lift)]
142 pub struct GlobalId<'tcx> {
143 /// For a constant or static, the `Instance` of the item itself.
144 /// For a promoted global, the `Instance` of the function they belong to.
145 pub instance: ty::Instance<'tcx>,
147 /// The index for promoted globals within their function's `mir::Body`.
148 pub promoted: Option<mir::Promoted>,
151 /// Input argument for `tcx.lit_to_const`.
152 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, HashStable)]
153 pub struct LitToConstInput<'tcx> {
154 /// The absolute value of the resultant constant.
155 pub lit: &'tcx LitKind,
156 /// The type of the constant.
158 /// If the constant is negative.
162 /// Error type for `tcx.lit_to_const`.
163 #[derive(Copy, Clone, Debug, Eq, PartialEq, HashStable)]
164 pub enum LitToConstError {
169 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
170 pub struct AllocId(pub u64);
172 impl rustc_serialize::UseSpecializedEncodable for AllocId {}
173 impl rustc_serialize::UseSpecializedDecodable for AllocId {}
175 #[derive(RustcDecodable, RustcEncodable)]
176 enum AllocDiscriminant {
182 pub fn specialized_encode_alloc_id<'tcx, E: Encoder>(
186 ) -> Result<(), E::Error> {
187 let alloc: GlobalAlloc<'tcx> =
188 tcx.alloc_map.lock().get(alloc_id).expect("no value for given alloc ID");
190 GlobalAlloc::Memory(alloc) => {
191 trace!("encoding {:?} with {:#?}", alloc_id, alloc);
192 AllocDiscriminant::Alloc.encode(encoder)?;
193 alloc.encode(encoder)?;
195 GlobalAlloc::Function(fn_instance) => {
196 trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
197 AllocDiscriminant::Fn.encode(encoder)?;
198 fn_instance.encode(encoder)?;
200 GlobalAlloc::Static(did) => {
201 // References to statics doesn't need to know about their allocations,
202 // just about its `DefId`.
203 AllocDiscriminant::Static.encode(encoder)?;
204 did.encode(encoder)?;
210 // Used to avoid infinite recursion when decoding cyclic allocations.
211 type DecodingSessionId = NonZeroU32;
216 InProgressNonAlloc(TinyList<DecodingSessionId>),
217 InProgress(TinyList<DecodingSessionId>, AllocId),
221 pub struct AllocDecodingState {
222 // For each `AllocId`, we keep track of which decoding state it's currently in.
223 decoding_state: Vec<Lock<State>>,
224 // The offsets of each allocation in the data stream.
225 data_offsets: Vec<u32>,
228 impl AllocDecodingState {
229 pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
230 static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
231 let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
233 // Make sure this is never zero.
234 let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
236 AllocDecodingSession { state: self, session_id }
239 pub fn new(data_offsets: Vec<u32>) -> Self {
240 let decoding_state = vec![Lock::new(State::Empty); data_offsets.len()];
242 Self { decoding_state, data_offsets }
246 #[derive(Copy, Clone)]
247 pub struct AllocDecodingSession<'s> {
248 state: &'s AllocDecodingState,
249 session_id: DecodingSessionId,
252 impl<'s> AllocDecodingSession<'s> {
253 /// Decodes an `AllocId` in a thread-safe way.
254 pub fn decode_alloc_id<D>(&self, decoder: &mut D) -> Result<AllocId, D::Error>
258 // Read the index of the allocation.
259 let idx = decoder.read_u32()? as usize;
260 let pos = self.state.data_offsets[idx] as usize;
262 // Decode the `AllocDiscriminant` now so that we know if we have to reserve an
264 let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
265 let alloc_kind = AllocDiscriminant::decode(decoder)?;
266 Ok((alloc_kind, decoder.position()))
269 // Check the decoding state to see if it's already decoded or if we should
272 let mut entry = self.state.decoding_state[idx].lock();
275 State::Done(alloc_id) => {
278 ref mut entry @ State::Empty => {
279 // We are allowed to decode.
281 AllocDiscriminant::Alloc => {
282 // If this is an allocation, we need to reserve an
283 // `AllocId` so we can decode cyclic graphs.
284 let alloc_id = decoder.tcx().alloc_map.lock().reserve();
286 State::InProgress(TinyList::new_single(self.session_id), alloc_id);
289 AllocDiscriminant::Fn | AllocDiscriminant::Static => {
290 // Fns and statics cannot be cyclic, and their `AllocId`
291 // is determined later by interning.
293 State::InProgressNonAlloc(TinyList::new_single(self.session_id));
298 State::InProgressNonAlloc(ref mut sessions) => {
299 if sessions.contains(&self.session_id) {
300 bug!("this should be unreachable");
302 // Start decoding concurrently.
303 sessions.insert(self.session_id);
307 State::InProgress(ref mut sessions, alloc_id) => {
308 if sessions.contains(&self.session_id) {
312 // Start decoding concurrently.
313 sessions.insert(self.session_id);
320 // Now decode the actual data.
321 let alloc_id = decoder.with_position(pos, |decoder| {
323 AllocDiscriminant::Alloc => {
324 let alloc = <&'tcx Allocation as Decodable>::decode(decoder)?;
325 // We already have a reserved `AllocId`.
326 let alloc_id = alloc_id.unwrap();
327 trace!("decoded alloc {:?}: {:#?}", alloc_id, alloc);
328 decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, alloc);
331 AllocDiscriminant::Fn => {
332 assert!(alloc_id.is_none());
333 trace!("creating fn alloc ID");
334 let instance = ty::Instance::decode(decoder)?;
335 trace!("decoded fn alloc instance: {:?}", instance);
336 let alloc_id = decoder.tcx().alloc_map.lock().create_fn_alloc(instance);
339 AllocDiscriminant::Static => {
340 assert!(alloc_id.is_none());
341 trace!("creating extern static alloc ID");
342 let did = DefId::decode(decoder)?;
343 trace!("decoded static def-ID: {:?}", did);
344 let alloc_id = decoder.tcx().alloc_map.lock().create_static_alloc(did);
350 self.state.decoding_state[idx].with_lock(|entry| {
351 *entry = State::Done(alloc_id);
358 impl fmt::Display for AllocId {
359 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
360 write!(f, "{}", self.0)
364 /// An allocation in the global (tcx-managed) memory can be either a function pointer,
365 /// a static, or a "real" allocation with some data in it.
366 #[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable, HashStable)]
367 pub enum GlobalAlloc<'tcx> {
368 /// The alloc ID is used as a function pointer.
369 Function(Instance<'tcx>),
370 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
371 /// This is also used to break the cycle in recursive statics.
373 /// The alloc ID points to memory.
374 Memory(&'tcx Allocation),
377 pub struct AllocMap<'tcx> {
378 /// Maps `AllocId`s to their corresponding allocations.
379 alloc_map: FxHashMap<AllocId, GlobalAlloc<'tcx>>,
381 /// Used to ensure that statics and functions only get one associated `AllocId`.
382 /// Should never contain a `GlobalAlloc::Memory`!
384 // FIXME: Should we just have two separate dedup maps for statics and functions each?
385 dedup: FxHashMap<GlobalAlloc<'tcx>, AllocId>,
387 /// The `AllocId` to assign to the next requested ID.
388 /// Always incremented; never gets smaller.
392 impl<'tcx> AllocMap<'tcx> {
393 pub fn new() -> Self {
394 AllocMap { alloc_map: Default::default(), dedup: Default::default(), next_id: AllocId(0) }
397 /// Obtains a new allocation ID that can be referenced but does not
398 /// yet have an allocation backing it.
400 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
401 /// an `AllocId` from a query.
402 pub fn reserve(&mut self) -> AllocId {
403 let next = self.next_id;
404 self.next_id.0 = self.next_id.0.checked_add(1).expect(
405 "You overflowed a u64 by incrementing by 1... \
406 You've just earned yourself a free drink if we ever meet. \
407 Seriously, how did you do that?!",
412 /// Reserves a new ID *if* this allocation has not been dedup-reserved before.
413 /// Should only be used for function pointers and statics, we don't want
414 /// to dedup IDs for "real" memory!
415 fn reserve_and_set_dedup(&mut self, alloc: GlobalAlloc<'tcx>) -> AllocId {
417 GlobalAlloc::Function(..) | GlobalAlloc::Static(..) => {}
418 GlobalAlloc::Memory(..) => bug!("Trying to dedup-reserve memory with real data!"),
420 if let Some(&alloc_id) = self.dedup.get(&alloc) {
423 let id = self.reserve();
424 debug!("creating alloc {:?} with id {}", alloc, id);
425 self.alloc_map.insert(id, alloc.clone());
426 self.dedup.insert(alloc, id);
430 /// Generates an `AllocId` for a static or return a cached one in case this function has been
431 /// called on the same static before.
432 pub fn create_static_alloc(&mut self, static_id: DefId) -> AllocId {
433 self.reserve_and_set_dedup(GlobalAlloc::Static(static_id))
436 /// Generates an `AllocId` for a function. Depending on the function type,
437 /// this might get deduplicated or assigned a new ID each time.
438 pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> AllocId {
439 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
440 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
441 // duplicated across crates.
442 // We thus generate a new `AllocId` for every mention of a function. This means that
443 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
444 // However, formatting code relies on function identity (see #58320), so we only do
445 // this for generic functions. Lifetime parameters are ignored.
446 let is_generic = instance.substs.into_iter().any(|kind| match kind.unpack() {
447 GenericArgKind::Lifetime(_) => false,
452 let id = self.reserve();
453 self.alloc_map.insert(id, GlobalAlloc::Function(instance));
457 self.reserve_and_set_dedup(GlobalAlloc::Function(instance))
461 /// Interns the `Allocation` and return a new `AllocId`, even if there's already an identical
462 /// `Allocation` with a different `AllocId`.
463 /// Statics with identical content will still point to the same `Allocation`, i.e.,
464 /// their data will be deduplicated through `Allocation` interning -- but they
465 /// are different places in memory and as such need different IDs.
466 pub fn create_memory_alloc(&mut self, mem: &'tcx Allocation) -> AllocId {
467 let id = self.reserve();
468 self.set_alloc_id_memory(id, mem);
472 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
473 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
474 /// illegal and will likely ICE.
475 /// This function exists to allow const eval to detect the difference between evaluation-
476 /// local dangling pointers and allocations in constants/statics.
478 pub fn get(&self, id: AllocId) -> Option<GlobalAlloc<'tcx>> {
479 self.alloc_map.get(&id).cloned()
482 /// Panics if the `AllocId` does not refer to an `Allocation`
483 pub fn unwrap_memory(&self, id: AllocId) -> &'tcx Allocation {
485 Some(GlobalAlloc::Memory(mem)) => mem,
486 _ => bug!("expected allocation ID {} to point to memory", id),
490 /// Panics if the `AllocId` does not refer to a function
491 pub fn unwrap_fn(&self, id: AllocId) -> Instance<'tcx> {
493 Some(GlobalAlloc::Function(instance)) => instance,
494 _ => bug!("expected allocation ID {} to point to a function", id),
498 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
499 /// call this function twice, even with the same `Allocation` will ICE the compiler.
500 pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
501 if let Some(old) = self.alloc_map.insert(id, GlobalAlloc::Memory(mem)) {
502 bug!("tried to set allocation ID {}, but it was already existing as {:#?}", id, old);
506 /// Freezes an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
507 /// twice for the same `(AllocId, Allocation)` pair.
508 fn set_alloc_id_same_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
509 self.alloc_map.insert_same(id, GlobalAlloc::Memory(mem));
513 ////////////////////////////////////////////////////////////////////////////////
514 // Methods to access integers in the target endianness
515 ////////////////////////////////////////////////////////////////////////////////
518 pub fn write_target_uint(
519 endianness: layout::Endian,
520 mut target: &mut [u8],
522 ) -> Result<(), io::Error> {
523 let len = target.len();
525 layout::Endian::Little => target.write_uint128::<LittleEndian>(data, len),
526 layout::Endian::Big => target.write_uint128::<BigEndian>(data, len),
531 pub fn read_target_uint(endianness: layout::Endian, mut source: &[u8]) -> Result<u128, io::Error> {
533 layout::Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
534 layout::Endian::Big => source.read_uint128::<BigEndian>(source.len()),
538 ////////////////////////////////////////////////////////////////////////////////
539 // Methods to facilitate working with signed integers stored in a u128
540 ////////////////////////////////////////////////////////////////////////////////
542 /// Truncates `value` to `size` bits and then sign-extend it to 128 bits
543 /// (i.e., if it is negative, fill with 1's on the left).
545 pub fn sign_extend(value: u128, size: Size) -> u128 {
546 let size = size.bits();
548 // Truncated until nothing is left.
552 let shift = 128 - size;
553 // Shift the unsigned value to the left, then shift back to the right as signed
554 // (essentially fills with FF on the left).
555 (((value << shift) as i128) >> shift) as u128
558 /// Truncates `value` to `size` bits.
560 pub fn truncate(value: u128, size: Size) -> u128 {
561 let size = size.bits();
563 // Truncated until nothing is left.
566 let shift = 128 - size;
567 // Truncate (shift left to drop out leftover values, shift right to fill with zeroes).
568 (value << shift) >> shift