1 //! This module contains the machinery necessary to detect infinite loops
2 //! during const-evaluation by taking snapshots of the state of the interpreter
3 //! at regular intervals.
5 // This lives in `interpret` because it needs access to all sots of private state. However,
6 // it is not used by the general miri engine, just by CTFE.
8 use std::hash::{Hash, Hasher};
10 use rustc::ich::StableHashingContextProvider;
12 use rustc::mir::interpret::{
13 AllocId, Pointer, Scalar,
14 Relocations, Allocation, UndefMask,
15 InterpResult, InterpError,
18 use rustc::ty::{self, TyCtxt};
19 use rustc::ty::layout::Align;
20 use rustc_data_structures::fx::FxHashSet;
21 use rustc_data_structures::indexed_vec::IndexVec;
22 use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
23 use syntax::ast::Mutability;
24 use syntax::source_map::Span;
26 use super::eval_context::{LocalState, StackPopCleanup};
27 use super::{Frame, Memory, Operand, MemPlace, Place, Immediate, ScalarMaybeUndef, LocalValue};
28 use crate::const_eval::CompileTimeInterpreter;
31 pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir> {
32 /// The set of all `EvalSnapshot` *hashes* observed by this detector.
34 /// When a collision occurs in this table, we store the full snapshot in
36 hashes: FxHashSet<u64>,
38 /// The set of all `EvalSnapshot`s observed by this detector.
40 /// An `EvalSnapshot` will only be fully cloned once it has caused a
41 /// collision in `hashes`. As a result, the detector must observe at least
42 /// *two* full cycles of an infinite loop before it triggers.
43 snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx>>,
46 impl<'a, 'mir, 'tcx> InfiniteLoopDetector<'a, 'mir, 'tcx>
48 pub fn observe_and_analyze<'b>(
50 tcx: TyCtxt<'b, 'tcx, 'tcx>,
52 memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
53 stack: &[Frame<'mir, 'tcx>],
54 ) -> InterpResult<'tcx, ()> {
55 // Compute stack's hash before copying anything
56 let mut hcx = tcx.get_stable_hashing_context();
57 let mut hasher = StableHasher::<u64>::new();
58 stack.hash_stable(&mut hcx, &mut hasher);
59 let hash = hasher.finish();
61 // Check if we know that hash already
62 if self.hashes.is_empty() {
63 // FIXME(#49980): make this warning a lint
64 tcx.sess.span_warn(span,
65 "Constant evaluating a complex constant, this might take some time");
67 if self.hashes.insert(hash) {
72 // We need to make a full copy. NOW things that to get really expensive.
73 info!("snapshotting the state of the interpreter");
75 if self.snapshots.insert(EvalSnapshot::new(memory, stack)) {
76 // Spurious collision or first cycle
81 Err(InterpError::InfiniteLoop.into())
85 trait SnapshotContext<'a> {
86 fn resolve(&'a self, id: &AllocId) -> Option<&'a Allocation>;
89 /// Taking a snapshot of the evaluation context produces a view of
90 /// the state of the interpreter that is invariant to `AllocId`s.
91 trait Snapshot<'a, Ctx: SnapshotContext<'a>> {
93 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item;
96 macro_rules! __impl_snapshot_field {
97 ($field:ident, $ctx:expr) => ($field.snapshot($ctx));
98 ($field:ident, $ctx:expr, $delegate:expr) => ($delegate);
101 // This assumes the type has two type parameters, first for the tag (set to `()`),
103 macro_rules! impl_snapshot_for {
104 (enum $enum_name:ident {
105 $( $variant:ident $( ( $($field:ident $(-> $delegate:expr)?),* ) )? ),* $(,)?
108 impl<'a, Ctx> self::Snapshot<'a, Ctx> for $enum_name
109 where Ctx: self::SnapshotContext<'a>,
111 type Item = $enum_name<(), AllocIdSnapshot<'a>>;
114 fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {
117 $enum_name::$variant $( ( $(ref $field),* ) )? => {
118 $enum_name::$variant $(
119 ( $( __impl_snapshot_field!($field, __ctx $(, $delegate)?) ),* )
128 (struct $struct_name:ident { $($field:ident $(-> $delegate:expr)?),* $(,)? }) => {
129 impl<'a, Ctx> self::Snapshot<'a, Ctx> for $struct_name
130 where Ctx: self::SnapshotContext<'a>,
132 type Item = $struct_name<(), AllocIdSnapshot<'a>>;
135 fn snapshot(&self, __ctx: &'a Ctx) -> Self::Item {
141 $( $field: __impl_snapshot_field!($field, __ctx $(, $delegate)?) ),*
148 impl<'a, Ctx, T> Snapshot<'a, Ctx> for Option<T>
149 where Ctx: SnapshotContext<'a>,
152 type Item = Option<<T as Snapshot<'a, Ctx>>::Item>;
154 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
156 Some(x) => Some(x.snapshot(ctx)),
162 #[derive(Eq, PartialEq)]
163 struct AllocIdSnapshot<'a>(Option<AllocationSnapshot<'a>>);
165 impl<'a, Ctx> Snapshot<'a, Ctx> for AllocId
166 where Ctx: SnapshotContext<'a>,
168 type Item = AllocIdSnapshot<'a>;
170 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
171 AllocIdSnapshot(ctx.resolve(self).map(|alloc| alloc.snapshot(ctx)))
175 impl_snapshot_for!(struct Pointer {
177 offset -> *offset, // just copy offset verbatim
178 tag -> *tag, // just copy tag
181 impl<'a, Ctx> Snapshot<'a, Ctx> for Scalar
182 where Ctx: SnapshotContext<'a>,
184 type Item = Scalar<(), AllocIdSnapshot<'a>>;
186 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
188 Scalar::Ptr(p) => Scalar::Ptr(p.snapshot(ctx)),
189 Scalar::Raw{ size, data } => Scalar::Raw {
197 impl_snapshot_for!(enum ScalarMaybeUndef {
202 impl_stable_hash_for!(struct crate::interpret::MemPlace {
207 impl_snapshot_for!(struct MemPlace {
210 align -> *align, // just copy alignment verbatim
213 impl_stable_hash_for!(enum crate::interpret::Place {
215 Local { frame, local },
217 impl<'a, Ctx> Snapshot<'a, Ctx> for Place
218 where Ctx: SnapshotContext<'a>,
220 type Item = Place<(), AllocIdSnapshot<'a>>;
222 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
224 Place::Ptr(p) => Place::Ptr(p.snapshot(ctx)),
226 Place::Local{ frame, local } => Place::Local{
234 impl_stable_hash_for!(enum crate::interpret::Immediate {
238 impl_snapshot_for!(enum Immediate {
243 impl_stable_hash_for!(enum crate::interpret::Operand {
247 impl_snapshot_for!(enum Operand {
252 impl_stable_hash_for!(enum crate::interpret::LocalValue {
257 impl_snapshot_for!(enum LocalValue {
263 impl<'a, Ctx> Snapshot<'a, Ctx> for Relocations
264 where Ctx: SnapshotContext<'a>,
266 type Item = Relocations<(), AllocIdSnapshot<'a>>;
268 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
269 Relocations::from_presorted(self.iter()
270 .map(|(size, ((), id))| (*size, ((), id.snapshot(ctx))))
275 #[derive(Eq, PartialEq)]
276 struct AllocationSnapshot<'a> {
278 relocations: Relocations<(), AllocIdSnapshot<'a>>,
279 undef_mask: &'a UndefMask,
281 mutability: &'a Mutability,
284 impl<'a, Ctx> Snapshot<'a, Ctx> for &'a Allocation
285 where Ctx: SnapshotContext<'a>,
287 type Item = AllocationSnapshot<'a>;
289 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
290 let Allocation { bytes, relocations, undef_mask, align, mutability, extra: () } = self;
297 relocations: relocations.snapshot(ctx),
302 impl_stable_hash_for!(enum crate::interpret::eval_context::StackPopCleanup {
307 #[derive(Eq, PartialEq)]
308 struct FrameSnapshot<'a, 'tcx: 'a> {
309 instance: &'a ty::Instance<'tcx>,
311 return_to_block: &'a StackPopCleanup,
312 return_place: Option<Place<(), AllocIdSnapshot<'a>>>,
313 locals: IndexVec<mir::Local, LocalValue<(), AllocIdSnapshot<'a>>>,
314 block: &'a mir::BasicBlock,
318 impl_stable_hash_for!(impl<'mir, 'tcx: 'mir> for struct Frame<'mir, 'tcx> {
323 return_place -> (return_place.as_ref().map(|r| &**r)),
330 impl<'a, 'mir, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a Frame<'mir, 'tcx>
331 where Ctx: SnapshotContext<'a>,
333 type Item = FrameSnapshot<'a, 'tcx>;
335 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
354 return_place: return_place.map(|r| r.snapshot(ctx)),
355 locals: locals.iter().map(|local| local.snapshot(ctx)).collect(),
360 impl<'a, 'tcx, Ctx> Snapshot<'a, Ctx> for &'a LocalState<'tcx>
361 where Ctx: SnapshotContext<'a>,
363 type Item = LocalValue<(), AllocIdSnapshot<'a>>;
365 fn snapshot(&self, ctx: &'a Ctx) -> Self::Item {
366 let LocalState { value, layout: _ } = self;
371 impl_stable_hash_for!(struct LocalState<'tcx> {
376 impl<'a, 'b, 'mir, 'tcx: 'a+'mir> SnapshotContext<'b>
377 for Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>
379 fn resolve(&'b self, id: &AllocId) -> Option<&'b Allocation> {
384 /// The virtual machine state during const-evaluation at a given point in time.
385 /// We assume the `CompileTimeInterpreter` has no interesting extra state that
386 /// is worth considering here.
387 struct EvalSnapshot<'a, 'mir, 'tcx: 'a + 'mir> {
388 memory: Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
389 stack: Vec<Frame<'mir, 'tcx>>,
392 impl<'a, 'mir, 'tcx: 'a + 'mir> EvalSnapshot<'a, 'mir, 'tcx>
395 memory: &Memory<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
396 stack: &[Frame<'mir, 'tcx>]
399 memory: memory.clone(),
404 // Used to compare two snapshots
405 fn snapshot(&'b self)
406 -> Vec<FrameSnapshot<'b, 'tcx>>
408 // Start with the stack, iterate and recursively snapshot
409 self.stack.iter().map(|frame| frame.snapshot(&self.memory)).collect()
414 impl<'a, 'mir, 'tcx> Hash for EvalSnapshot<'a, 'mir, 'tcx>
416 fn hash<H: Hasher>(&self, state: &mut H) {
417 // Implement in terms of hash stable, so that k1 == k2 -> hash(k1) == hash(k2)
418 let mut hcx = self.memory.tcx.get_stable_hashing_context();
419 let mut hasher = StableHasher::<u64>::new();
420 self.hash_stable(&mut hcx, &mut hasher);
421 hasher.finish().hash(state)
425 impl_stable_hash_for!(impl<'tcx, 'b, 'mir> for struct EvalSnapshot<'b, 'mir, 'tcx> {
426 // Not hashing memory: Avoid hashing memory all the time during execution
431 impl<'a, 'mir, 'tcx> Eq for EvalSnapshot<'a, 'mir, 'tcx>
434 impl<'a, 'mir, 'tcx> PartialEq for EvalSnapshot<'a, 'mir, 'tcx>
436 fn eq(&self, other: &Self) -> bool {
437 // FIXME: This looks to be a *ridiculously expensive* comparison operation.
438 // Doesn't this make tons of copies? Either `snapshot` is very badly named,
440 self.snapshot() == other.snapshot()