1 //! An interpreter for MIR used in CTFE and by miri
5 ($($tt:tt)*) => { Err($crate::mir::interpret::InterpError::$($tt)*.into()) };
13 pub use self::error::{
14 EvalError, EvalResult, InterpError, AssertMessage, ConstEvalErr, struct_error,
15 FrameInfo, ConstEvalRawResult, ConstEvalResult, ErrorHandled,
18 pub use self::value::{Scalar, ScalarMaybeUndef, RawConst, ConstValue};
20 pub use self::allocation::{
21 InboundsCheck, Allocation, AllocationExtra,
22 Relocations, UndefMask, CheckInAllocMsg,
25 pub use self::pointer::{Pointer, PointerArithmetic};
29 use crate::hir::def_id::DefId;
30 use crate::ty::{self, TyCtxt, Instance, subst::UnpackedKind};
31 use crate::ty::layout::{self, Size};
33 use crate::rustc_serialize::{Encoder, Decodable, Encodable};
34 use rustc_data_structures::fx::FxHashMap;
35 use rustc_data_structures::sync::{Lock as Mutex, HashMapExt};
36 use rustc_data_structures::tiny_list::TinyList;
37 use rustc_macros::HashStable;
38 use byteorder::{WriteBytesExt, ReadBytesExt, LittleEndian, BigEndian};
39 use crate::ty::codec::TyDecoder;
40 use std::sync::atomic::{AtomicU32, Ordering};
41 use std::num::NonZeroU32;
43 /// Uniquely identifies a specific constant or static.
44 #[derive(Copy, Clone, Debug, Eq, PartialEq, Hash, RustcEncodable, RustcDecodable, HashStable)]
45 pub struct GlobalId<'tcx> {
46 /// For a constant or static, the `Instance` of the item itself.
47 /// For a promoted global, the `Instance` of the function they belong to.
48 pub instance: ty::Instance<'tcx>,
50 /// The index for promoted globals within their function's `mir::Body`.
51 pub promoted: Option<mir::Promoted>,
54 #[derive(Copy, Clone, Eq, Hash, Ord, PartialEq, PartialOrd, Debug)]
55 pub struct AllocId(pub u64);
57 impl crate::rustc_serialize::UseSpecializedEncodable for AllocId {}
58 impl crate::rustc_serialize::UseSpecializedDecodable for AllocId {}
60 #[derive(RustcDecodable, RustcEncodable)]
61 enum AllocDiscriminant {
67 pub fn specialized_encode_alloc_id<
72 tcx: TyCtxt<'a, 'tcx, 'tcx>,
74 ) -> Result<(), E::Error> {
75 let alloc_kind: AllocKind<'tcx> =
76 tcx.alloc_map.lock().get(alloc_id).expect("no value for AllocId");
78 AllocKind::Memory(alloc) => {
79 trace!("encoding {:?} with {:#?}", alloc_id, alloc);
80 AllocDiscriminant::Alloc.encode(encoder)?;
81 alloc.encode(encoder)?;
83 AllocKind::Function(fn_instance) => {
84 trace!("encoding {:?} with {:#?}", alloc_id, fn_instance);
85 AllocDiscriminant::Fn.encode(encoder)?;
86 fn_instance.encode(encoder)?;
88 AllocKind::Static(did) => {
89 // referring to statics doesn't need to know about their allocations,
90 // just about its DefId
91 AllocDiscriminant::Static.encode(encoder)?;
98 // Used to avoid infinite recursion when decoding cyclic allocations.
99 type DecodingSessionId = NonZeroU32;
104 InProgressNonAlloc(TinyList<DecodingSessionId>),
105 InProgress(TinyList<DecodingSessionId>, AllocId),
109 pub struct AllocDecodingState {
110 // For each AllocId we keep track of which decoding state it's currently in.
111 decoding_state: Vec<Mutex<State>>,
112 // The offsets of each allocation in the data stream.
113 data_offsets: Vec<u32>,
116 impl AllocDecodingState {
118 pub fn new_decoding_session(&self) -> AllocDecodingSession<'_> {
119 static DECODER_SESSION_ID: AtomicU32 = AtomicU32::new(0);
120 let counter = DECODER_SESSION_ID.fetch_add(1, Ordering::SeqCst);
122 // Make sure this is never zero
123 let session_id = DecodingSessionId::new((counter & 0x7FFFFFFF) + 1).unwrap();
125 AllocDecodingSession {
131 pub fn new(data_offsets: Vec<u32>) -> AllocDecodingState {
132 let decoding_state = vec![Mutex::new(State::Empty); data_offsets.len()];
141 #[derive(Copy, Clone)]
142 pub struct AllocDecodingSession<'s> {
143 state: &'s AllocDecodingState,
144 session_id: DecodingSessionId,
147 impl<'s> AllocDecodingSession<'s> {
149 // Decodes an AllocId in a thread-safe way.
150 pub fn decode_alloc_id<'a, 'tcx, D>(&self,
152 -> Result<AllocId, D::Error>
153 where D: TyDecoder<'a, 'tcx>,
156 // Read the index of the allocation
157 let idx = decoder.read_u32()? as usize;
158 let pos = self.state.data_offsets[idx] as usize;
160 // Decode the AllocDiscriminant now so that we know if we have to reserve an
162 let (alloc_kind, pos) = decoder.with_position(pos, |decoder| {
163 let alloc_kind = AllocDiscriminant::decode(decoder)?;
164 Ok((alloc_kind, decoder.position()))
167 // Check the decoding state, see if it's already decoded or if we should
170 let mut entry = self.state.decoding_state[idx].lock();
173 State::Done(alloc_id) => {
176 ref mut entry @ State::Empty => {
177 // We are allowed to decode
179 AllocDiscriminant::Alloc => {
180 // If this is an allocation, we need to reserve an
181 // AllocId so we can decode cyclic graphs.
182 let alloc_id = decoder.tcx().alloc_map.lock().reserve();
183 *entry = State::InProgress(
184 TinyList::new_single(self.session_id),
188 AllocDiscriminant::Fn | AllocDiscriminant::Static => {
189 // Fns and statics cannot be cyclic and their AllocId
190 // is determined later by interning
191 *entry = State::InProgressNonAlloc(
192 TinyList::new_single(self.session_id));
197 State::InProgressNonAlloc(ref mut sessions) => {
198 if sessions.contains(&self.session_id) {
199 bug!("This should be unreachable")
201 // Start decoding concurrently
202 sessions.insert(self.session_id);
206 State::InProgress(ref mut sessions, alloc_id) => {
207 if sessions.contains(&self.session_id) {
211 // Start decoding concurrently
212 sessions.insert(self.session_id);
219 // Now decode the actual data
220 let alloc_id = decoder.with_position(pos, |decoder| {
222 AllocDiscriminant::Alloc => {
223 let allocation = <&'tcx Allocation as Decodable>::decode(decoder)?;
224 // We already have a reserved AllocId.
225 let alloc_id = alloc_id.unwrap();
226 trace!("decoded alloc {:?} {:#?}", alloc_id, allocation);
227 decoder.tcx().alloc_map.lock().set_alloc_id_same_memory(alloc_id, allocation);
230 AllocDiscriminant::Fn => {
231 assert!(alloc_id.is_none());
232 trace!("creating fn alloc id");
233 let instance = ty::Instance::decode(decoder)?;
234 trace!("decoded fn alloc instance: {:?}", instance);
235 let alloc_id = decoder.tcx().alloc_map.lock().create_fn_alloc(instance);
238 AllocDiscriminant::Static => {
239 assert!(alloc_id.is_none());
240 trace!("creating extern static alloc id at");
241 let did = DefId::decode(decoder)?;
242 let alloc_id = decoder.tcx().alloc_map.lock().intern_static(did);
248 self.state.decoding_state[idx].with_lock(|entry| {
249 *entry = State::Done(alloc_id);
256 impl fmt::Display for AllocId {
257 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
258 write!(f, "{}", self.0)
262 #[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable, HashStable)]
263 pub enum AllocKind<'tcx> {
264 /// The alloc ID is used as a function pointer
265 Function(Instance<'tcx>),
266 /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
267 /// This is also used to break the cycle in recursive statics.
269 /// The alloc ID points to memory.
270 Memory(&'tcx Allocation),
273 pub struct AllocMap<'tcx> {
274 /// Lets you know what an `AllocId` refers to.
275 id_to_kind: FxHashMap<AllocId, AllocKind<'tcx>>,
277 /// Used to ensure that statics only get one associated `AllocId`.
278 type_interner: FxHashMap<AllocKind<'tcx>, AllocId>,
280 /// The `AllocId` to assign to the next requested ID.
281 /// Always incremented, never gets smaller.
285 impl<'tcx> AllocMap<'tcx> {
286 pub fn new() -> Self {
288 id_to_kind: Default::default(),
289 type_interner: Default::default(),
294 /// Obtains a new allocation ID that can be referenced but does not
295 /// yet have an allocation backing it.
297 /// Make sure to call `set_alloc_id_memory` or `set_alloc_id_same_memory` before returning such
298 /// an `AllocId` from a query.
302 let next = self.next_id;
303 self.next_id.0 = self.next_id.0
305 .expect("You overflowed a u64 by incrementing by 1... \
306 You've just earned yourself a free drink if we ever meet. \
307 Seriously, how did you do that?!");
311 fn intern(&mut self, alloc_kind: AllocKind<'tcx>) -> AllocId {
312 if let Some(&alloc_id) = self.type_interner.get(&alloc_kind) {
315 let id = self.reserve();
316 debug!("creating alloc_kind {:?} with id {}", alloc_kind, id);
317 self.id_to_kind.insert(id, alloc_kind.clone());
318 self.type_interner.insert(alloc_kind, id);
322 pub fn create_fn_alloc(&mut self, instance: Instance<'tcx>) -> AllocId {
323 // Functions cannot be identified by pointers, as asm-equal functions can get deduplicated
324 // by the linker (we set the "unnamed_addr" attribute for LLVM) and functions can be
325 // duplicated across crates.
326 // We thus generate a new `AllocId` for every mention of a function. This means that
327 // `main as fn() == main as fn()` is false, while `let x = main as fn(); x == x` is true.
328 // However, formatting code relies on function identity (see #58320), so we only do
329 // this for generic functions. Lifetime parameters are ignored.
330 let is_generic = instance.substs.into_iter().any(|kind| {
331 match kind.unpack() {
332 UnpackedKind::Lifetime(_) => false,
338 let id = self.reserve();
339 self.id_to_kind.insert(id, AllocKind::Function(instance));
343 self.intern(AllocKind::Function(instance))
347 /// Returns `None` in case the `AllocId` is dangling. An `InterpretCx` can still have a
348 /// local `Allocation` for that `AllocId`, but having such an `AllocId` in a constant is
349 /// illegal and will likely ICE.
350 /// This function exists to allow const eval to detect the difference between evaluation-
351 /// local dangling pointers and allocations in constants/statics.
353 pub fn get(&self, id: AllocId) -> Option<AllocKind<'tcx>> {
354 self.id_to_kind.get(&id).cloned()
357 /// Panics if the `AllocId` does not refer to an `Allocation`
358 pub fn unwrap_memory(&self, id: AllocId) -> &'tcx Allocation {
360 Some(AllocKind::Memory(mem)) => mem,
361 _ => bug!("expected allocation id {} to point to memory", id),
365 /// Generates an `AllocId` for a static or return a cached one in case this function has been
366 /// called on the same static before.
367 pub fn intern_static(&mut self, static_id: DefId) -> AllocId {
368 self.intern(AllocKind::Static(static_id))
371 /// Intern the `Allocation` and return a new `AllocId`, even if there's already an identical
372 /// `Allocation` with a different `AllocId`.
373 // FIXME: is this really necessary? Can we ensure `FOO` and `BAR` being different after codegen
374 // in `static FOO: u32 = 42; static BAR: u32 = 42;` even if they reuse the same allocation
376 pub fn allocate(&mut self, mem: &'tcx Allocation) -> AllocId {
377 let id = self.reserve();
378 self.set_alloc_id_memory(id, mem);
382 /// Freeze an `AllocId` created with `reserve` by pointing it at an `Allocation`. Trying to
383 /// call this function twice, even with the same `Allocation` will ICE the compiler.
384 pub fn set_alloc_id_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
385 if let Some(old) = self.id_to_kind.insert(id, AllocKind::Memory(mem)) {
386 bug!("tried to set allocation id {}, but it was already existing as {:#?}", id, old);
390 /// Freeze an `AllocId` created with `reserve` by pointing it at an `Allocation`. May be called
391 /// twice for the same `(AllocId, Allocation)` pair.
392 fn set_alloc_id_same_memory(&mut self, id: AllocId, mem: &'tcx Allocation) {
393 self.id_to_kind.insert_same(id, AllocKind::Memory(mem));
397 ////////////////////////////////////////////////////////////////////////////////
398 // Methods to access integers in the target endianness
399 ////////////////////////////////////////////////////////////////////////////////
402 pub fn write_target_uint(
403 endianness: layout::Endian,
404 mut target: &mut [u8],
406 ) -> Result<(), io::Error> {
407 let len = target.len();
409 layout::Endian::Little => target.write_uint128::<LittleEndian>(data, len),
410 layout::Endian::Big => target.write_uint128::<BigEndian>(data, len),
415 pub fn read_target_uint(endianness: layout::Endian, mut source: &[u8]) -> Result<u128, io::Error> {
417 layout::Endian::Little => source.read_uint128::<LittleEndian>(source.len()),
418 layout::Endian::Big => source.read_uint128::<BigEndian>(source.len()),
422 ////////////////////////////////////////////////////////////////////////////////
423 // Methods to facilitate working with signed integers stored in a u128
424 ////////////////////////////////////////////////////////////////////////////////
426 /// Truncate `value` to `size` bits and then sign-extend it to 128 bits
427 /// (i.e., if it is negative, fill with 1's on the left).
429 pub fn sign_extend(value: u128, size: Size) -> u128 {
430 let size = size.bits();
432 // Truncated until nothing is left.
436 let shift = 128 - size;
437 // shift the unsigned value to the left
438 // and back to the right as signed (essentially fills with FF on the left)
439 (((value << shift) as i128) >> shift) as u128
442 /// Truncate `value` to `size` bits.
444 pub fn truncate(value: u128, size: Size) -> u128 {
445 let size = size.bits();
447 // Truncated until nothing is left.
450 let shift = 128 - size;
451 // truncate (shift left to drop out leftover values, shift right to fill with zeroes)
452 (value << shift) >> shift