1 //! This module specifies the type based interner for constants.
3 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
4 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
6 //! In principle, this is not very complicated: we recursively walk the final value, follow all the
7 //! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8 //! is picking the right mutability for the allocations in a `static` initializer: we want to make
9 //! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
10 //! same time, we need to make memory that could be mutated by the program mutable to avoid
11 //! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
12 //! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
13 //! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
14 //! always immutable. At least for `const` however we use this opportunity to reject any `const`
15 //! that contains allocations whose mutability we cannot identify.)
17 use super::validity::RefTracking;
18 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
19 use rustc_errors::ErrorGuaranteed;
21 use rustc_middle::mir::interpret::InterpResult;
22 use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
24 use rustc_ast::Mutability;
27 AllocId, Allocation, ConstAllocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy,
30 use crate::const_eval;
32 pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
40 MemoryMap = FxHashMap<AllocId, (MemoryKind<T>, Allocation)>,
43 struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> {
44 /// The ectx from which we intern.
45 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
46 /// Previously encountered safe references.
47 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
48 /// A list of all encountered allocations. After type-based interning, we traverse this list to
49 /// also intern allocations that are only referenced by a raw pointer or inside a union.
50 leftover_allocations: &'rt mut FxHashSet<AllocId>,
51 /// The root kind of the value that we're looking at. This field is never mutated for a
52 /// particular allocation. It is primarily used to make as many allocations as possible
53 /// read-only so LLVM can place them in const memory.
55 /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
56 /// the intern mode of references we encounter.
57 inside_unsafe_cell: bool,
60 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
62 /// A static and its current mutability. Below shared references inside a `static mut`,
63 /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
65 Static(hir::Mutability),
70 /// Signalling data structure to ensure we don't recurse
71 /// into the memory of other constants or statics
74 /// Intern an allocation without looking at its children.
75 /// `mode` is the mode of the environment where we found this pointer.
76 /// `mutability` is the mutability of the place to be interned; even if that says
77 /// `immutable` things might become mutable if `ty` is not frozen.
78 /// `ty` can be `None` if there is no potential interior mutability
79 /// to account for (e.g. for vtables).
80 fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>(
81 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
82 leftover_allocations: &'rt mut FxHashSet<AllocId>,
86 ) -> Option<IsStaticOrFn> {
87 trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
90 let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
91 // Pointer not found in local memory map. It is either a pointer to the global
93 // If the pointer is dangling (neither in local nor global memory), we leave it
94 // to validation to error -- it has the much better error messages, pointing out where
95 // in the value the dangling reference lies.
96 // The `delay_span_bug` ensures that we don't forget such a check in validation.
97 if tcx.try_get_global_alloc(alloc_id).is_none() {
98 tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
100 // treat dangling pointers like other statics
101 // just to stop trying to recurse into them
102 return Some(IsStaticOrFn);
104 // This match is just a canary for future changes to `MemoryKind`, which most likely need
105 // changes in this function.
108 | MemoryKind::Machine(const_eval::MemoryKind::Heap)
109 | MemoryKind::CallerLocation => {}
111 // Set allocation mutability as appropriate. This is used by LLVM to put things into
112 // read-only memory, and also by Miri when evaluating other globals that
114 if let InternMode::Static(mutability) = mode {
115 // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
116 // no interior mutability.
117 let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env));
118 // For statics, allocation mutability is the combination of place mutability and
120 // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
121 let immutable = mutability == Mutability::Not && frozen;
123 alloc.mutability = Mutability::Not;
125 // Just making sure we are not "upgrading" an immutable allocation to mutable.
126 assert_eq!(alloc.mutability, Mutability::Mut);
129 // No matter what, *constants are never mutable*. Mutating them is UB.
130 // See const_eval::machine::MemoryExtra::can_access_statics for why
131 // immutability is so important.
133 // Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
134 alloc.mutability = Mutability::Not;
136 // link the alloc id to the actual allocation
137 leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
138 let alloc = tcx.intern_const_alloc(alloc);
139 tcx.set_alloc_id_memory(alloc_id, alloc);
143 impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
144 InternVisitor<'rt, 'mir, 'tcx, M>
150 ty: Option<Ty<'tcx>>,
151 ) -> Option<IsStaticOrFn> {
152 intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
156 impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
157 ValueVisitor<'mir, 'tcx, M> for InternVisitor<'rt, 'mir, 'tcx, M>
159 type V = MPlaceTy<'tcx>;
162 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
168 mplace: &MPlaceTy<'tcx>,
169 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
170 ) -> InterpResult<'tcx> {
171 // We want to walk the aggregate to look for references to intern. While doing that we
172 // also need to take special care of interior mutability.
174 // As an optimization, however, if the allocation does not contain any references: we don't
175 // need to do the walk. It can be costly for big arrays for example (e.g. issue #93215).
176 let is_walk_needed = |mplace: &MPlaceTy<'tcx>| -> InterpResult<'tcx, bool> {
177 // ZSTs cannot contain pointers, we can avoid the interning walk.
178 if mplace.layout.is_zst() {
182 // Now, check whether this allocation could contain references.
184 // Note, this check may sometimes not be cheap, so we only do it when the walk we'd like
185 // to avoid could be expensive: on the potentially larger types, arrays and slices,
186 // rather than on all aggregates unconditionally.
187 if matches!(mplace.layout.ty.kind(), ty::Array(..) | ty::Slice(..)) {
188 let Some((size, align)) = self.ecx.size_and_align_of_mplace(&mplace)? else {
189 // We do the walk if we can't determine the size of the mplace: we may be
190 // dealing with extern types here in the future.
194 // If there are no relocations in this allocation, it does not contain references
195 // that point to another allocation, and we can avoid the interning walk.
196 if let Some(alloc) = self.ecx.get_ptr_alloc(mplace.ptr, size, align)? {
197 if !alloc.has_relocations() {
201 // We're encountering a ZST here, and can avoid the walk as well.
206 // In the general case, we do the walk.
210 // If this allocation contains no references to intern, we avoid the potentially costly
213 // We can do this before the checks for interior mutability below, because only references
214 // are relevant in that situation, and we're checking if there are any here.
215 if !is_walk_needed(mplace)? {
219 if let Some(def) = mplace.layout.ty.ty_adt_def() {
220 if def.is_unsafe_cell() {
221 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
222 // References we encounter inside here are interned as pointing to mutable
224 // Remember the `old` value to handle nested `UnsafeCell`.
225 let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
226 let walked = self.walk_aggregate(mplace, fields);
227 self.inside_unsafe_cell = old;
232 self.walk_aggregate(mplace, fields)
235 fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
236 // Handle Reference types, as these are the only relocations supported by const eval.
237 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
238 let tcx = self.ecx.tcx;
239 let ty = mplace.layout.ty;
240 if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
241 let value = self.ecx.read_immediate(&mplace.into())?;
242 let mplace = self.ecx.ref_to_mplace(&value)?;
243 assert_eq!(mplace.layout.ty, referenced_ty);
244 // Handle trait object vtables.
245 if let ty::Dynamic(..) =
246 tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
248 let ptr = mplace.meta.unwrap_meta().to_pointer(&tcx)?;
249 if let Some(alloc_id) = ptr.provenance {
250 // Explicitly choose const mode here, since vtables are immutable, even
251 // if the reference of the fat pointer is mutable.
252 self.intern_shallow(alloc_id, InternMode::Const, None);
254 // Validation will error (with a better message) on an invalid vtable pointer.
255 // Let validation show the error message, but make sure it *does* error.
257 .delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
260 // Check if we have encountered this pointer+layout combination before.
261 // Only recurse for allocation-backed pointers.
262 if let Some(alloc_id) = mplace.ptr.provenance {
263 // Compute the mode with which we intern this. Our goal here is to make as many
264 // statics as we can immutable so they can be placed in read-only memory by LLVM.
265 let ref_mode = match self.mode {
266 InternMode::Static(mutbl) => {
267 // In statics, merge outer mutability with reference mutability and
268 // take into account whether we are in an `UnsafeCell`.
270 // The only way a mutable reference actually works as a mutable reference is
271 // by being in a `static mut` directly or behind another mutable reference.
272 // If there's an immutable reference or we are inside a `static`, then our
273 // mutable reference is equivalent to an immutable one. As an example:
274 // `&&mut Foo` is semantically equivalent to `&&Foo`
275 match ref_mutability {
276 _ if self.inside_unsafe_cell => {
277 // Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
278 // mutability does not matter.
279 InternMode::Static(ref_mutability)
282 // A shared reference, things become immutable.
283 // We do *not* consider `freeze` here: `intern_shallow` considers
284 // `freeze` for the actual mutability of this allocation; the intern
285 // mode for references contained in this allocation is tracked more
286 // precisely when traversing the referenced data (by tracking
287 // `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
288 // has the left inner reference interned into a read-only
290 InternMode::Static(Mutability::Not)
293 // Mutable reference.
294 InternMode::Static(mutbl)
298 InternMode::Const => {
299 // Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
300 // checking for mutable references that we encounter -- they must all be
305 match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
306 // No need to recurse, these are interned already and statics may have
307 // cycles, so we don't want to recurse there
308 Some(IsStaticOrFn) => {}
309 // intern everything referenced by this value. The mutability is taken from the
310 // reference. It is checked above that mutable references only happen in
312 None => self.ref_tracking.track((mplace, ref_mode), || ()),
317 // Not a reference -- proceed recursively.
318 self.walk_value(mplace)
323 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
324 pub enum InternKind {
325 /// The `mutability` of the static, ignoring the type which may have interior mutability.
326 Static(hir::Mutability),
331 /// Intern `ret` and everything it references.
333 /// This *cannot raise an interpreter error*. Doing so is left to validation, which
334 /// tracks where in the value we are and thus can show much better error messages.
335 /// Any errors here would anyway be turned into `const_err` lints, whereas validation failures
337 #[tracing::instrument(level = "debug", skip(ecx))]
338 pub fn intern_const_alloc_recursive<
341 M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
343 ecx: &mut InterpCx<'mir, 'tcx, M>,
344 intern_kind: InternKind,
345 ret: &MPlaceTy<'tcx>,
346 ) -> Result<(), ErrorGuaranteed> {
348 let base_intern_mode = match intern_kind {
349 InternKind::Static(mutbl) => InternMode::Static(mutbl),
350 // `Constant` includes array lengths.
351 InternKind::Constant | InternKind::Promoted => InternMode::Const,
354 // Type based interning.
355 // `ref_tracking` tracks typed references we have already interned and still need to crawl for
356 // more typed information inside them.
357 // `leftover_allocations` collects *all* allocations we see, because some might not
358 // be available in a typed way. They get interned at the end.
359 let mut ref_tracking = RefTracking::empty();
360 let leftover_allocations = &mut FxHashSet::default();
362 // start with the outermost allocation
365 leftover_allocations,
366 // The outermost allocation must exist, because we allocated it with
367 // `Memory::allocate`.
368 ret.ptr.provenance.unwrap(),
373 ref_tracking.track((*ret, base_intern_mode), || ());
375 while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
376 let res = InternVisitor {
377 ref_tracking: &mut ref_tracking,
380 leftover_allocations,
381 inside_unsafe_cell: false,
383 .visit_value(&mplace);
384 // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
385 // references are "leftover"-interned, and later validation will show a proper error
386 // and point at the right part of the value causing the problem.
390 ecx.tcx.sess.delay_span_bug(
393 "error during interning should later cause validation failure: {}",
401 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
402 // pointers, ... So we can't intern them according to their type rules
404 let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
406 debug!("dead_alloc_map: {:#?}", ecx.memory.dead_alloc_map);
407 while let Some(alloc_id) = todo.pop() {
408 if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
409 // We can't call the `intern_shallow` method here, as its logic is tailored to safe
410 // references and a `leftover_allocations` set (where we only have a todo-list here).
411 // So we hand-roll the interning logic here again.
413 // Statics may contain mutable allocations even behind relocations.
414 // Even for immutable statics it would be ok to have mutable allocations behind
415 // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
416 InternKind::Static(_) => {}
417 // Raw pointers in promoteds may only point to immutable things so we mark
418 // everything as immutable.
419 // It is UB to mutate through a raw pointer obtained via an immutable reference:
420 // Since all references and pointers inside a promoted must by their very definition
421 // be created from an immutable reference (and promotion also excludes interior
422 // mutability), mutating through them would be UB.
423 // There's no way we can check whether the user is using raw pointers correctly,
424 // so all we can do is mark this as immutable here.
425 InternKind::Promoted => {
426 // See const_eval::machine::MemoryExtra::can_access_statics for why
427 // immutability is so important.
428 alloc.mutability = Mutability::Not;
430 InternKind::Constant => {
431 // If it's a constant, we should not have any "leftovers" as everything
432 // is tracked by const-checking.
433 // FIXME: downgrade this to a warning? It rejects some legitimate consts,
434 // such as `const CONST_RAW: *const Vec<i32> = &Vec::new() as *const _;`.
437 .span_err(ecx.tcx.span, "untyped pointers are not allowed in constant");
438 // For better errors later, mark the allocation as immutable.
439 alloc.mutability = Mutability::Not;
442 let alloc = tcx.intern_const_alloc(alloc);
443 tcx.set_alloc_id_memory(alloc_id, alloc);
444 for &(_, alloc_id) in alloc.inner().relocations().iter() {
445 if leftover_allocations.insert(alloc_id) {
449 } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
450 // Codegen does not like dangling pointers, and generally `tcx` assumes that
451 // all allocations referenced anywhere actually exist. So, make sure we error here.
455 .span_err(ecx.tcx.span, "encountered dangling pointer in final constant");
456 return Err(reported);
457 } else if ecx.tcx.try_get_global_alloc(alloc_id).is_none() {
458 // We have hit an `AllocId` that is neither in local or global memory and isn't
459 // marked as dangling by local memory. That should be impossible.
460 span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
466 impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
467 InterpCx<'mir, 'tcx, M>
469 /// A helper function that allocates memory for the layout given and gives you access to mutate
470 /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
471 /// current `Memory` and returned.
472 pub fn intern_with_temp_alloc(
474 layout: TyAndLayout<'tcx>,
476 &mut InterpCx<'mir, 'tcx, M>,
477 &PlaceTy<'tcx, M::Provenance>,
478 ) -> InterpResult<'tcx, ()>,
479 ) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
480 let dest = self.allocate(layout, MemoryKind::Stack)?;
481 f(self, &dest.into())?;
482 let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
483 alloc.mutability = Mutability::Not;
484 Ok(self.tcx.intern_const_alloc(alloc))