1 //! This module contains the machinery necessary to detect infinite loops
2 //! during const-evaluation by taking snapshots of the state of the interpreter
3 //! at regular intervals.
5 // This lives in `interpret` because it needs access to all sots of private state. However,
6 // it is not used by the general miri engine, just by CTFE.
8 use std::hash::{Hash, Hasher};
10 use rustc::ich::StableHashingContextProvider;
12 use rustc::mir::interpret::{
13 AllocId, Allocation, InterpResult, Pointer, Relocations, Scalar, UndefMask,
16 use rustc::ty::layout::{Align, Size};
17 use rustc::ty::{self, TyCtxt};
18 use rustc_ast::ast::Mutability;
19 use rustc_data_structures::fx::FxHashSet;
20 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
21 use rustc_index::vec::IndexVec;
22 use rustc_macros::HashStable;
23 use rustc_span::source_map::Span;
25 use super::eval_context::{LocalState, StackPopCleanup};
27 Frame, Immediate, LocalValue, MemPlace, MemPlaceMeta, Memory, Operand, Place, ScalarMaybeUndef,
29 use crate::const_eval::CompileTimeInterpreter;
32 pub(crate) struct InfiniteLoopDetector<'mir, 'tcx> {
33 /// The set of all `InterpSnapshot` *hashes* observed by this detector.
35 /// When a collision occurs in this table, we store the full snapshot in
37 hashes: FxHashSet<u64>,
39 /// The set of all `InterpSnapshot`s observed by this detector.
41 /// An `InterpSnapshot` will only be fully cloned once it has caused a
42 /// collision in `hashes`. As a result, the detector must observe at least
43 /// *two* full cycles of an infinite loop before it triggers.
44 snapshots: FxHashSet<InterpSnapshot<'mir, 'tcx>>,
47 impl<'mir, 'tcx> InfiniteLoopDetector<'mir, 'tcx> {
48 pub fn observe_and_analyze(
52 memory: &Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
53 stack: &[Frame<'mir, 'tcx>],
54 ) -> InterpResult<'tcx, ()> {
55 // Compute stack's hash before copying anything
56 let mut hcx = tcx.get_stable_hashing_context();
57 let mut hasher = StableHasher::new();
58 stack.hash_stable(&mut hcx, &mut hasher);
59 let hash = hasher.finish::<u64>();
61 // Check if we know that hash already
62 if self.hashes.is_empty() {
63 // FIXME(#49980): make this warning a lint
66 "Constant evaluating a complex constant, this might take some time",
69 if self.hashes.insert(hash) {
74 // We need to make a full copy. NOW things that to get really expensive.
75 info!("snapshotting the state of the interpreter");
77 if self.snapshots.insert(InterpSnapshot::new(memory, stack)) {
78 // Spurious collision or first cycle
83 throw_exhaust!(InfiniteLoop)
87 trait SnapshotContext<'a> {
88 fn resolve(&'a self, id: &AllocId) -> Option<&'a Allocation>;
91 /// Taking a snapshot of the evaluation context produces a view of
92 /// the state of the interpreter that is invariant to `AllocId`s.
93 trait Snapshot<'a, Ctx: SnapshotContext<'a>> {
95 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;
98 macro_rules! __impl_snapshot_field {
99 ($field:ident, $ctx:expr) => {
100 $field.snapshot($ctx)
102 ($field:ident, $ctx:expr, $delegate:expr) => {
107 // This assumes the type has two type parameters, first for the tag (set to `()`),
109 macro_rules! impl_snapshot_for {
110 (enum $enum_name:ident {
111 $( $variant:ident $( ( $($field:ident $(-> $delegate:expr)?),* ) )? ),* $(,)?
114 impl<'a, Ctx> self::Snapshot<'a, Ctx> for $enum_name
115 where Ctx: self::SnapshotContext<'a>,
117 type Item = $enum_name<(), AllocIdSnapshot<'a>>;
120 fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {
123 $enum_name::$variant $( ( $(ref $field),* ) )? => {
124 $enum_name::$variant $(
125 ( $( __impl_snapshot_field!($field, __ctx $(, $delegate)?) ),* )
134 (struct $struct_name:ident { $($field:ident $(-> $delegate:expr)?),* $(,)? }) => {
135 impl<'a, Ctx> self::Snapshot<'a, Ctx> for $struct_name
136 where Ctx: self::SnapshotContext<'a>,
138 type Item = $struct_name<(), AllocIdSnapshot<'a>>;
141 fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {
147 $( $field: __impl_snapshot_field!($field, __ctx $(, $delegate)?) ),*
154 impl<'a, Ctx, T> Snapshot<'a, Ctx> for Option<T>
156 Ctx: SnapshotContext<'a>,
157 T: Snapshot<'a, Ctx>,
159 type Item = Option<<T as Snapshot<'a, Ctx>>::Item>;
161 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
163 Some(x) => Some(x.snapshot(ctx)),
169 #[derive(Eq, PartialEq)]
170 struct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);
172 impl<'a, Ctx> Snapshot<'a, Ctx> for AllocId
174 Ctx: SnapshotContext<'a>,
176 type Item = AllocIdSnapshot<'a>;
178 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
179 AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))
183 impl_snapshot_for!(struct Pointer {
185 offset -> *offset, // just copy offset verbatim
186 tag -> *tag, // just copy tag
189 impl<'a, Ctx> Snapshot<'a, Ctx> for Scalar
191 Ctx: SnapshotContext<'a>,
193 type Item = Scalar<(), AllocIdSnapshot<'a>>;
195 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
197 Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),
198 Scalar::Raw { size, data } => Scalar::Raw { data: *data, size: *size },
204 enum ScalarMaybeUndef {
218 impl_snapshot_for!(struct MemPlace {
221 align -> *align, // just copy alignment verbatim
224 impl<'a, Ctx> Snapshot<'a, Ctx> for Place
226 Ctx: SnapshotContext<'a>,
228 type Item = Place<(), AllocIdSnapshot<'a>>;
230 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
232 Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),
234 Place::Local { frame, local } => Place::Local { frame: *frame, local: *local },
261 impl<'a, Ctx> Snapshot<'a, Ctx> for Relocations
263 Ctx: SnapshotContext<'a>,
265 type Item = Relocations<(), AllocIdSnapshot<'a>>;
267 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
268 Relocations::from_presorted(
269 self.iter().map(|(size, ((), id))| (*size, ((), id.snapshot(ctx)))).collect(),
274 #[derive(Eq, PartialEq)]
275 struct AllocationSnapshot<'a> {
277 relocations: Relocations<(), AllocIdSnapshot<'a>>,
278 undef_mask: &'a UndefMask,
281 mutability: &'a Mutability,
284 impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation
286 Ctx: SnapshotContext<'a>,
288 type Item = AllocationSnapshot<'a>;
290 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
291 let Allocation { size, align, mutability, extra: (), .. } = self;
293 let all_bytes = 0..self.len();
294 // This 'inspect' is okay since following access respects undef and relocations. This does
295 // influence interpreter exeuction, but only to detect the error of cycles in evalution
297 let bytes = self.inspect_with_undef_and_ptr_outside_interpreter(all_bytes);
299 let undef_mask = self.undef_mask();
300 let relocations = self.relocations();
308 relocations: relocations.snapshot(ctx),
313 #[derive(Eq, PartialEq)]
314 struct FrameSnapshot<'a, 'tcx> {
315 instance: ty::Instance<'tcx>,
317 return_to_block: &'a StackPopCleanup,
318 return_place: Option<Place<(), AllocIdSnapshot<'a>>>,
319 locals: IndexVec<mir::Local, LocalValue<(), AllocIdSnapshot<'a>>>,
320 block: Option<mir::BasicBlock>,
324 impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>
326 Ctx: SnapshotContext<'a>,
328 type Item = FrameSnapshot<'a, 'tcx>;
330 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
349 return_place: return_place.map(|r| r.snapshot(ctx)),
350 locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),
355 impl<'a, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a LocalState<'tcx>
357 Ctx: SnapshotContext<'a>,
359 type Item = LocalValue<(), AllocIdSnapshot<'a>>;
361 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
362 let LocalState { value, layout: _ } = self;
367 impl<'b, 'mir, 'tcx> SnapshotContext<'b>
368 for Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>
370 fn resolve(&'b self, id: &AllocId) -> Option<&'b Allocation> {
371 self.get_raw(*id).ok()
375 /// The virtual machine state during const-evaluation at a given point in time.
376 /// We assume the `CompileTimeInterpreter` has no interesting extra state that
377 /// is worth considering here.
378 #[derive(HashStable)]
379 struct InterpSnapshot<'mir, 'tcx> {
380 // Not hashing memory: Avoid hashing memory all the time during execution
381 #[stable_hasher(ignore)]
382 memory: Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
383 stack: Vec<Frame<'mir, 'tcx>>,
386 impl InterpSnapshot<'mir, 'tcx> {
388 memory: &Memory<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>,
389 stack: &[Frame<'mir, 'tcx>],
391 InterpSnapshot { memory: memory.clone(), stack: stack.into() }
394 // Used to compare two snapshots
395 fn snapshot(&'b self) -> Vec<FrameSnapshot<'b, 'tcx>> {
396 // Start with the stack, iterate and recursively snapshot
397 self.stack.iter().map(|frame| frame.snapshot(&self.memory)).collect()
401 impl<'mir, 'tcx> Hash for InterpSnapshot<'mir, 'tcx> {
402 fn hash<H: Hasher>(&self, state: &mut H) {
403 // Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)
404 let mut hcx = self.memory.tcx.get_stable_hashing_context();
405 let mut hasher = StableHasher::new();
406 self.hash_stable(&mut hcx, &mut hasher);
407 hasher.finish::<u64>().hash(state)
411 impl<'mir, 'tcx> Eq for InterpSnapshot<'mir, 'tcx> {}
413 impl<'mir, 'tcx> PartialEq for InterpSnapshot<'mir, 'tcx> {
414 fn eq(&self, other: &Self) -> bool {
415 // FIXME: This looks to be a *ridiculously expensive* comparison operation.
416 // Doesn't this make tons of copies? Either `snapshot` is very badly named,
418 self.snapshot() == other.snapshot()