2 use rustc::ty::layout::HasTyCtxt;
3 use rustc::ty::{self, Ty};
4 use std::borrow::{Borrow, Cow};
5 use std::collections::hash_map::Entry;
6 use std::convert::TryFrom;
9 use rustc_data_structures::fx::FxHashMap;
11 use rustc::mir::AssertMessage;
12 use rustc_span::source_map::Span;
13 use rustc_span::symbol::Symbol;
15 use crate::interpret::{
16 self, snapshot, AllocId, Allocation, GlobalId, ImmTy, InterpCx, InterpResult, Memory,
17 MemoryKind, OpTy, PlaceTy, Pointer, Scalar,
22 impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
23 /// Evaluate a const function where all arguments (if any) are zero-sized types.
24 /// The evaluation is memoized thanks to the query system.
26 /// Returns `true` if the call has been evaluated.
27 fn try_eval_const_fn_call(
29 instance: ty::Instance<'tcx>,
30 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
32 ) -> InterpResult<'tcx, bool> {
33 trace!("try_eval_const_fn_call: {:?}", instance);
34 // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot
35 // perform this optimization on items tagged with it.
36 if instance.def.requires_caller_location(self.tcx()) {
39 // For the moment we only do this for functions which take no arguments
40 // (or all arguments are ZSTs) so that we don't memoize too much.
41 if args.iter().any(|a| !a.layout.is_zst()) {
45 let dest = match ret {
46 Some((dest, _)) => dest,
47 // Don't memoize diverging function calls.
48 None => return Ok(false),
51 let gid = GlobalId { instance, promoted: None };
53 let place = self.const_eval_raw(gid)?;
55 self.copy_op(place.into(), dest)?;
57 self.return_to_block(ret.map(|r| r.1))?;
58 self.dump_place(*dest);
62 /// "Intercept" a function call to a panic-related function
63 /// because we have something special to do for it.
64 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
68 instance: ty::Instance<'tcx>,
70 ) -> InterpResult<'tcx> {
71 let def_id = instance.def_id();
72 if Some(def_id) == self.tcx.lang_items().panic_fn()
73 || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
76 assert!(args.len() == 1);
78 let msg_place = self.deref_operand(args[0])?;
79 let msg = Symbol::intern(self.read_str(msg_place)?);
80 let span = self.find_closest_untracked_caller_location().unwrap_or(span);
81 let (file, line, col) = self.location_triple_for_span(span);
82 Err(ConstEvalErrKind::Panic { msg, file, line, col }.into())
89 /// The number of steps between loop detector snapshots.
90 /// Should be a power of two for performance reasons.
91 const DETECTOR_SNAPSHOT_PERIOD: isize = 256;
93 // Extra machine state for CTFE, and the Machine instance
94 pub struct CompileTimeInterpreter<'mir, 'tcx> {
95 /// When this value is negative, it indicates the number of interpreter
96 /// steps *until* the loop detector is enabled. When it is positive, it is
97 /// the number of steps after the detector has been enabled modulo the loop
99 pub(super) steps_since_detector_enabled: isize,
101 pub(super) is_detector_enabled: bool,
103 /// Extra state to detect loops.
104 pub(super) loop_detector: snapshot::InfiniteLoopDetector<'mir, 'tcx>,
107 #[derive(Copy, Clone, Debug)]
108 pub struct MemoryExtra {
109 /// Whether this machine may read from statics
110 pub(super) can_access_statics: bool,
113 impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
114 pub(super) fn new(const_eval_limit: usize) -> Self {
115 let steps_until_detector_enabled =
116 isize::try_from(const_eval_limit).unwrap_or(std::isize::MAX);
118 CompileTimeInterpreter {
119 loop_detector: Default::default(),
120 steps_since_detector_enabled: -steps_until_detector_enabled,
121 is_detector_enabled: const_eval_limit != 0,
126 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
128 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
132 FxHashMap::contains_key(self, k)
136 fn insert(&mut self, k: K, v: V) -> Option<V> {
137 FxHashMap::insert(self, k, v)
141 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
145 FxHashMap::remove(self, k)
149 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
150 self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
154 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
159 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
165 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
166 match self.entry(k) {
167 Entry::Occupied(e) => Ok(e.into_mut()),
168 Entry::Vacant(e) => {
176 crate type CompileTimeEvalContext<'mir, 'tcx> =
177 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
179 impl interpret::MayLeak for ! {
181 fn may_leak(self) -> bool {
182 // `self` is uninhabited
187 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
188 type MemoryKinds = !;
189 type PointerTag = ();
192 type FrameExtra = ();
193 type MemoryExtra = MemoryExtra;
194 type AllocExtra = ();
196 type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>;
198 const STATIC_KIND: Option<!> = None; // no copying of statics allowed
200 // We do not check for alignment to avoid having to carry an `Align`
201 // in `ConstValue::ByRef`.
202 const CHECK_ALIGN: bool = false;
205 fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
206 false // for now, we don't enforce validity
209 fn find_mir_or_eval_fn(
210 ecx: &mut InterpCx<'mir, 'tcx, Self>,
212 instance: ty::Instance<'tcx>,
214 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
215 _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts
216 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
217 debug!("find_mir_or_eval_fn: {:?}", instance);
219 // Only check non-glue functions
220 if let ty::InstanceDef::Item(def_id) = instance.def {
221 // Execution might have wandered off into other crates, so we cannot do a stability-
222 // sensitive check here. But we can at least rule out functions that are not const
224 if ecx.tcx.is_const_fn_raw(def_id) {
225 // If this function is a `const fn` then under certain circumstances we
226 // can evaluate call via the query system, thus memoizing all future calls.
227 if ecx.try_eval_const_fn_call(instance, ret, args)? {
231 // Some functions we support even if they are non-const -- but avoid testing
232 // that for const fn!
233 ecx.hook_panic_fn(span, instance, args)?;
234 // We certainly do *not* want to actually call the fn
235 // though, so be sure we return here.
236 throw_unsup_format!("calling non-const function `{}`", instance)
239 // This is a const fn. Call it.
240 Ok(Some(match ecx.load_mir(instance.def, None) {
243 if let err_unsup!(NoMirFor(did)) = err.kind {
244 let path = ecx.tcx.def_path_str(did);
245 return Err(ConstEvalErrKind::NeedsRfc(format!(
246 "calling extern function `{}`",
257 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
259 _args: &[OpTy<'tcx>],
260 _ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
261 _unwind: Option<mir::BasicBlock>,
262 ) -> InterpResult<'tcx> {
267 ecx: &mut InterpCx<'mir, 'tcx, Self>,
269 instance: ty::Instance<'tcx>,
271 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
272 _unwind: Option<mir::BasicBlock>,
273 ) -> InterpResult<'tcx> {
274 if ecx.emulate_intrinsic(span, instance, args, ret)? {
277 // An intrinsic that we do not support
278 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
279 Err(ConstEvalErrKind::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into())
283 ecx: &mut InterpCx<'mir, 'tcx, Self>,
284 msg: &AssertMessage<'tcx>,
285 _unwind: Option<mir::BasicBlock>,
286 ) -> InterpResult<'tcx> {
287 use rustc::mir::AssertKind::*;
288 // Convert `AssertKind<Operand>` to `AssertKind<u64>`.
289 let err = match msg {
290 BoundsCheck { ref len, ref index } => {
292 .read_immediate(ecx.eval_operand(len, None)?)
293 .expect("can't eval len")
295 .to_machine_usize(&*ecx)?;
297 .read_immediate(ecx.eval_operand(index, None)?)
298 .expect("can't eval index")
300 .to_machine_usize(&*ecx)?;
301 BoundsCheck { len, index }
303 Overflow(op) => Overflow(*op),
304 OverflowNeg => OverflowNeg,
305 DivisionByZero => DivisionByZero,
306 RemainderByZero => RemainderByZero,
307 ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
308 ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
310 Err(ConstEvalErrKind::AssertFailure(err).into())
313 fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
314 Err(ConstEvalErrKind::NeedsRfc("pointer-to-integer cast".to_string()).into())
318 _ecx: &InterpCx<'mir, 'tcx, Self>,
322 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
323 Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
327 fn init_allocation_extra<'b>(
328 _memory_extra: &MemoryExtra,
330 alloc: Cow<'b, Allocation>,
331 _kind: Option<MemoryKind<!>>,
332 ) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) {
333 // We do not use a tag so we can just cheaply forward the allocation
338 fn tag_static_base_pointer(_memory_extra: &MemoryExtra, _id: AllocId) -> Self::PointerTag {
343 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
344 _dest: PlaceTy<'tcx>,
345 ) -> InterpResult<'tcx> {
346 Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
349 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
350 if !ecx.machine.is_detector_enabled {
355 let steps = &mut ecx.machine.steps_since_detector_enabled;
362 *steps %= DETECTOR_SNAPSHOT_PERIOD;
368 let span = ecx.frame().span;
369 ecx.machine.loop_detector.observe_and_analyze(*ecx.tcx, span, &ecx.memory, &ecx.stack[..])
373 fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
377 fn before_access_static(
378 memory_extra: &MemoryExtra,
379 _allocation: &Allocation,
380 ) -> InterpResult<'tcx> {
381 if memory_extra.can_access_statics {
384 Err(ConstEvalErrKind::ConstAccessesStatic.into())
389 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
390 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
391 // at the bottom of this file.