2 use rustc_middle::ty::layout::HasTyCtxt;
3 use rustc_middle::ty::InstanceDef;
4 use rustc_middle::ty::{self, Ty};
5 use std::borrow::Borrow;
6 use std::collections::hash_map::Entry;
9 use rustc_data_structures::fx::FxHashMap;
11 use rustc_ast::Mutability;
12 use rustc_hir::def_id::DefId;
13 use rustc_middle::mir::AssertMessage;
14 use rustc_session::Limit;
15 use rustc_span::symbol::{sym, Symbol};
17 use crate::interpret::{
18 self, compile_time_machine, AllocId, Allocation, Frame, GlobalId, ImmTy, InterpCx,
19 InterpResult, Memory, OpTy, PlaceTy, Pointer, Scalar,
24 impl<'mir, 'tcx> InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>> {
25 /// Evaluate a const function where all arguments (if any) are zero-sized types.
26 /// The evaluation is memoized thanks to the query system.
28 /// Returns `true` if the call has been evaluated.
29 fn try_eval_const_fn_call(
31 instance: ty::Instance<'tcx>,
32 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
34 ) -> InterpResult<'tcx, bool> {
35 trace!("try_eval_const_fn_call: {:?}", instance);
36 // Because `#[track_caller]` adds an implicit non-ZST argument, we also cannot
37 // perform this optimization on items tagged with it.
38 if instance.def.requires_caller_location(self.tcx()) {
41 // Only memoize instrinsics. This was added in #79594 while adding the `const_allocate` intrinsic.
42 // We only memoize intrinsics because it would be unsound to memoize functions
43 // which might interact with the heap.
44 // Additionally, const_allocate intrinsic is impure and thus should not be memoized;
45 // it will not be memoized because it has non-ZST args
46 if !matches!(instance.def, InstanceDef::Intrinsic(_)) {
49 // For the moment we only do this for functions which take no arguments
50 // (or all arguments are ZSTs) so that we don't memoize too much.
51 if args.iter().any(|a| !a.layout.is_zst()) {
55 let dest = match ret {
56 Some((dest, _)) => dest,
57 // Don't memoize diverging function calls.
58 None => return Ok(false),
61 let gid = GlobalId { instance, promoted: None };
63 let place = self.eval_to_allocation(gid)?;
65 self.copy_op(place.into(), dest)?;
67 self.return_to_block(ret.map(|r| r.1))?;
68 trace!("{:?}", self.dump_place(*dest));
72 /// "Intercept" a function call to a panic-related function
73 /// because we have something special to do for it.
74 /// If this returns successfully (`Ok`), the function should just be evaluated normally.
77 instance: ty::Instance<'tcx>,
79 ) -> InterpResult<'tcx> {
80 let def_id = instance.def_id();
81 if Some(def_id) == self.tcx.lang_items().panic_fn()
82 || Some(def_id) == self.tcx.lang_items().panic_str()
83 || Some(def_id) == self.tcx.lang_items().begin_panic_fn()
86 assert!(args.len() == 1);
88 let msg_place = self.deref_operand(args[0])?;
89 let msg = Symbol::intern(self.read_str(msg_place)?);
90 let span = self.find_closest_untracked_caller_location();
91 let (file, line, col) = self.location_triple_for_span(span);
92 Err(ConstEvalErrKind::Panic { msg, file, line, col }.into())
99 /// Extra machine state for CTFE, and the Machine instance
100 pub struct CompileTimeInterpreter<'mir, 'tcx> {
101 /// For now, the number of terminators that can be evaluated before we throw a resource
102 /// exhuastion error.
104 /// Setting this to `0` disables the limit and allows the interpreter to run forever.
105 pub steps_remaining: usize,
107 /// The virtual call stack.
108 pub(crate) stack: Vec<Frame<'mir, 'tcx, (), ()>>,
111 #[derive(Copy, Clone, Debug)]
112 pub struct MemoryExtra {
113 /// We need to make sure consts never point to anything mutable, even recursively. That is
114 /// relied on for pattern matching on consts with references.
115 /// To achieve this, two pieces have to work together:
116 /// * Interning makes everything outside of statics immutable.
117 /// * Pointers to allocations inside of statics can never leak outside, to a non-static global.
118 /// This boolean here controls the second part.
119 pub(super) can_access_statics: bool,
122 impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
123 pub(super) fn new(const_eval_limit: Limit) -> Self {
124 CompileTimeInterpreter { steps_remaining: const_eval_limit.0, stack: Vec::new() }
128 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
130 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
134 FxHashMap::contains_key(self, k)
138 fn insert(&mut self, k: K, v: V) -> Option<V> {
139 FxHashMap::insert(self, k, v)
143 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
147 FxHashMap::remove(self, k)
151 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
152 self.iter().filter_map(move |(k, v)| f(k, &*v)).collect()
156 fn get_or<E>(&self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&V, E> {
161 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
167 fn get_mut_or<E>(&mut self, k: K, vacant: impl FnOnce() -> Result<V, E>) -> Result<&mut V, E> {
168 match self.entry(k) {
169 Entry::Occupied(e) => Ok(e.into_mut()),
170 Entry::Vacant(e) => {
178 crate type CompileTimeEvalContext<'mir, 'tcx> =
179 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
181 impl interpret::MayLeak for ! {
183 fn may_leak(self) -> bool {
184 // `self` is uninhabited
189 impl<'mir, 'tcx: 'mir> CompileTimeEvalContext<'mir, 'tcx> {
190 fn guaranteed_eq(&mut self, a: Scalar, b: Scalar) -> bool {
192 // Comparisons between integers are always known.
193 (Scalar::Int { .. }, Scalar::Int { .. }) => a == b,
194 // Equality with integers can never be known for sure.
195 (Scalar::Int { .. }, Scalar::Ptr(_)) | (Scalar::Ptr(_), Scalar::Int { .. }) => false,
196 // FIXME: return `true` for when both sides are the same pointer, *except* that
197 // some things (like functions and vtables) do not have stable addresses
198 // so we need to be careful around them (see e.g. #73722).
199 (Scalar::Ptr(_), Scalar::Ptr(_)) => false,
203 fn guaranteed_ne(&mut self, a: Scalar, b: Scalar) -> bool {
205 // Comparisons between integers are always known.
206 (Scalar::Int(_), Scalar::Int(_)) => a != b,
207 // Comparisons of abstract pointers with null pointers are known if the pointer
208 // is in bounds, because if they are in bounds, the pointer can't be null.
209 // Inequality with integers other than null can never be known for sure.
210 (Scalar::Int(int), Scalar::Ptr(ptr)) | (Scalar::Ptr(ptr), Scalar::Int(int)) => {
211 int.is_null() && !self.memory.ptr_may_be_null(ptr)
213 // FIXME: return `true` for at least some comparisons where we can reliably
214 // determine the result of runtime inequality tests at compile-time.
215 // Examples include comparison of addresses in different static items.
216 (Scalar::Ptr(_), Scalar::Ptr(_)) => false,
221 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
222 compile_time_machine!(<'mir, 'tcx>);
224 type MemoryExtra = MemoryExtra;
226 fn find_mir_or_eval_fn(
227 ecx: &mut InterpCx<'mir, 'tcx, Self>,
228 instance: ty::Instance<'tcx>,
230 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
231 _unwind: Option<mir::BasicBlock>, // unwinding is not supported in consts
232 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
233 debug!("find_mir_or_eval_fn: {:?}", instance);
235 // Only check non-glue functions
236 if let ty::InstanceDef::Item(def) = instance.def {
237 // Execution might have wandered off into other crates, so we cannot do a stability-
238 // sensitive check here. But we can at least rule out functions that are not const
240 if ecx.tcx.is_const_fn_raw(def.did) {
241 // If this function is a `const fn` then under certain circumstances we
242 // can evaluate call via the query system, thus memoizing all future calls.
243 if ecx.try_eval_const_fn_call(instance, ret, args)? {
247 // Some functions we support even if they are non-const -- but avoid testing
248 // that for const fn!
249 ecx.hook_panic_fn(instance, args)?;
250 // We certainly do *not* want to actually call the fn
251 // though, so be sure we return here.
252 throw_unsup_format!("calling non-const function `{}`", instance)
255 // This is a const fn. Call it.
256 Ok(Some(match ecx.load_mir(instance.def, None) {
259 if let err_unsup!(NoMirFor(did)) = err.kind {
260 let path = ecx.tcx.def_path_str(did);
261 return Err(ConstEvalErrKind::NeedsRfc(format!(
262 "calling extern function `{}`",
273 ecx: &mut InterpCx<'mir, 'tcx, Self>,
274 instance: ty::Instance<'tcx>,
276 ret: Option<(PlaceTy<'tcx>, mir::BasicBlock)>,
277 _unwind: Option<mir::BasicBlock>,
278 ) -> InterpResult<'tcx> {
279 // Shared intrinsics.
280 if ecx.emulate_intrinsic(instance, args, ret)? {
283 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
285 // CTFE-specific intrinsics.
286 let (dest, ret) = match ret {
288 return Err(ConstEvalErrKind::NeedsRfc(format!(
289 "calling intrinsic `{}`",
296 match intrinsic_name {
297 sym::ptr_guaranteed_eq | sym::ptr_guaranteed_ne => {
298 let a = ecx.read_immediate(args[0])?.to_scalar()?;
299 let b = ecx.read_immediate(args[1])?.to_scalar()?;
300 let cmp = if intrinsic_name == sym::ptr_guaranteed_eq {
301 ecx.guaranteed_eq(a, b)
303 ecx.guaranteed_ne(a, b)
305 ecx.write_scalar(Scalar::from_bool(cmp), dest)?;
308 return Err(ConstEvalErrKind::NeedsRfc(format!(
309 "calling intrinsic `{}`",
316 ecx.go_to_block(ret);
321 ecx: &mut InterpCx<'mir, 'tcx, Self>,
322 msg: &AssertMessage<'tcx>,
323 _unwind: Option<mir::BasicBlock>,
324 ) -> InterpResult<'tcx> {
325 use rustc_middle::mir::AssertKind::*;
326 // Convert `AssertKind<Operand>` to `AssertKind<Scalar>`.
328 |op| ecx.read_immediate(ecx.eval_operand(op, None)?).map(|x| x.to_const_int());
329 let err = match msg {
330 BoundsCheck { ref len, ref index } => {
331 let len = eval_to_int(len)?;
332 let index = eval_to_int(index)?;
333 BoundsCheck { len, index }
335 Overflow(op, l, r) => Overflow(*op, eval_to_int(l)?, eval_to_int(r)?),
336 OverflowNeg(op) => OverflowNeg(eval_to_int(op)?),
337 DivisionByZero(op) => DivisionByZero(eval_to_int(op)?),
338 RemainderByZero(op) => RemainderByZero(eval_to_int(op)?),
339 ResumedAfterReturn(generator_kind) => ResumedAfterReturn(*generator_kind),
340 ResumedAfterPanic(generator_kind) => ResumedAfterPanic(*generator_kind),
342 Err(ConstEvalErrKind::AssertFailure(err).into())
345 fn ptr_to_int(_mem: &Memory<'mir, 'tcx, Self>, _ptr: Pointer) -> InterpResult<'tcx, u64> {
346 Err(ConstEvalErrKind::NeedsRfc("pointer-to-integer cast".to_string()).into())
350 _ecx: &InterpCx<'mir, 'tcx, Self>,
354 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
355 Err(ConstEvalErrKind::NeedsRfc("pointer arithmetic or comparison".to_string()).into())
359 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
360 _dest: PlaceTy<'tcx>,
361 ) -> InterpResult<'tcx> {
362 Err(ConstEvalErrKind::NeedsRfc("heap allocations via `box` keyword".to_string()).into())
365 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
366 // The step limit has already been hit in a previous call to `before_terminator`.
367 if ecx.machine.steps_remaining == 0 {
371 ecx.machine.steps_remaining -= 1;
372 if ecx.machine.steps_remaining == 0 {
373 throw_exhaust!(StepLimitReached)
381 ecx: &mut InterpCx<'mir, 'tcx, Self>,
382 frame: Frame<'mir, 'tcx>,
383 ) -> InterpResult<'tcx, Frame<'mir, 'tcx>> {
384 // Enforce stack size limit. Add 1 because this is run before the new frame is pushed.
385 if !ecx.tcx.sess.recursion_limit().value_within_limit(ecx.stack().len() + 1) {
386 throw_exhaust!(StackFrameLimitReached)
394 ecx: &'a InterpCx<'mir, 'tcx, Self>,
395 ) -> &'a [Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>] {
401 ecx: &'a mut InterpCx<'mir, 'tcx, Self>,
402 ) -> &'a mut Vec<Frame<'mir, 'tcx, Self::PointerTag, Self::FrameExtra>> {
403 &mut ecx.machine.stack
406 fn before_access_global(
407 memory_extra: &MemoryExtra,
409 allocation: &Allocation,
410 static_def_id: Option<DefId>,
412 ) -> InterpResult<'tcx> {
414 // Write access. These are never allowed, but we give a targeted error message.
415 if allocation.mutability == Mutability::Not {
416 Err(err_ub!(WriteToReadOnly(alloc_id)).into())
418 Err(ConstEvalErrKind::ModifiedGlobal.into())
421 // Read access. These are usually allowed, with some exceptions.
422 if memory_extra.can_access_statics {
423 // Machine configuration allows us read from anything (e.g., `static` initializer).
425 } else if static_def_id.is_some() {
426 // Machine configuration does not allow us to read statics
427 // (e.g., `const` initializer).
428 // See const_eval::machine::MemoryExtra::can_access_statics for why
429 // this check is so important: if we could read statics, we could read pointers
430 // to mutable allocations *inside* statics. These allocations are not themselves
431 // statics, so pointers to them can get around the check in `validity.rs`.
432 Err(ConstEvalErrKind::ConstAccessesStatic.into())
434 // Immutable global, this read is fine.
435 // But make sure we never accept a read from something mutable, that would be
436 // unsound. The reason is that as the content of this allocation may be different
437 // now and at run-time, so if we permit reading now we might return the wrong value.
438 assert_eq!(allocation.mutability, Mutability::Not);
445 // Please do not add any code below the above `Machine` trait impl. I (oli-obk) plan more cleanups
446 // so we can end up having a file with just that impl, but for now, let's keep the impl discoverable
447 // at the bottom of this file.