1 //! This module specifies the type based interner for constants.
3 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
4 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
6 //! In principle, this is not very complicated: we recursively walk the final value, follow all the
7 //! pointers, and move all reachable allocations to the global `tcx` memory. The only complication
8 //! is picking the right mutability for the allocations in a `static` initializer: we want to make
9 //! as many allocations as possible immutable so LLVM can put them into read-only memory. At the
10 //! same time, we need to make memory that could be mutated by the program mutable to avoid
11 //! incorrect compilations. To achieve this, we do a type-based traversal of the final value,
12 //! tracking mutable and shared references and `UnsafeCell` to determine the current mutability.
13 //! (In principle, we could skip this type-based part for `const` and promoteds, as they need to be
14 //! always immutable. At least for `const` however we use this opportunity to reject any `const`
15 //! that contains allocations whose mutability we cannot identify.)
17 use super::validity::RefTracking;
18 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
19 use rustc_errors::ErrorGuaranteed;
21 use rustc_middle::mir::interpret::InterpResult;
22 use rustc_middle::ty::{self, layout::TyAndLayout, Ty};
24 use rustc_ast::Mutability;
27 AllocId, Allocation, ConstAllocation, InterpCx, MPlaceTy, Machine, MemoryKind, PlaceTy,
30 use crate::const_eval;
32 pub trait CompileTimeMachine<'mir, 'tcx, T> = Machine<
40 MemoryMap = FxHashMap<AllocId, (MemoryKind<T>, Allocation)>,
43 struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>> {
44 /// The ectx from which we intern.
45 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
46 /// Previously encountered safe references.
47 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, InternMode)>,
48 /// A list of all encountered allocations. After type-based interning, we traverse this list to
49 /// also intern allocations that are only referenced by a raw pointer or inside a union.
50 leftover_allocations: &'rt mut FxHashSet<AllocId>,
51 /// The root kind of the value that we're looking at. This field is never mutated for a
52 /// particular allocation. It is primarily used to make as many allocations as possible
53 /// read-only so LLVM can place them in const memory.
55 /// This field stores whether we are *currently* inside an `UnsafeCell`. This can affect
56 /// the intern mode of references we encounter.
57 inside_unsafe_cell: bool,
60 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
62 /// A static and its current mutability. Below shared references inside a `static mut`,
63 /// this is *immutable*, and below mutable references inside an `UnsafeCell`, this
65 Static(hir::Mutability),
70 /// Signalling data structure to ensure we don't recurse
71 /// into the memory of other constants or statics
74 /// Intern an allocation without looking at its children.
75 /// `mode` is the mode of the environment where we found this pointer.
76 /// `mutablity` is the mutability of the place to be interned; even if that says
77 /// `immutable` things might become mutable if `ty` is not frozen.
78 /// `ty` can be `None` if there is no potential interior mutability
79 /// to account for (e.g. for vtables).
80 fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>(
81 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
82 leftover_allocations: &'rt mut FxHashSet<AllocId>,
86 ) -> Option<IsStaticOrFn> {
87 trace!("intern_shallow {:?} with {:?}", alloc_id, mode);
90 let Some((kind, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) else {
91 // Pointer not found in local memory map. It is either a pointer to the global
93 // If the pointer is dangling (neither in local nor global memory), we leave it
94 // to validation to error -- it has the much better error messages, pointing out where
95 // in the value the dangling reference lies.
96 // The `delay_span_bug` ensures that we don't forget such a check in validation.
97 if tcx.get_global_alloc(alloc_id).is_none() {
98 tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
100 // treat dangling pointers like other statics
101 // just to stop trying to recurse into them
102 return Some(IsStaticOrFn);
104 // This match is just a canary for future changes to `MemoryKind`, which most likely need
105 // changes in this function.
108 | MemoryKind::Machine(const_eval::MemoryKind::Heap)
109 | MemoryKind::CallerLocation => {}
111 // Set allocation mutability as appropriate. This is used by LLVM to put things into
112 // read-only memory, and also by Miri when evaluating other globals that
114 if let InternMode::Static(mutability) = mode {
115 // For this, we need to take into account `UnsafeCell`. When `ty` is `None`, we assume
116 // no interior mutability.
117 let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx, ecx.param_env));
118 // For statics, allocation mutability is the combination of place mutability and
120 // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
121 let immutable = mutability == Mutability::Not && frozen;
123 alloc.mutability = Mutability::Not;
125 // Just making sure we are not "upgrading" an immutable allocation to mutable.
126 assert_eq!(alloc.mutability, Mutability::Mut);
129 // No matter what, *constants are never mutable*. Mutating them is UB.
130 // See const_eval::machine::MemoryExtra::can_access_statics for why
131 // immutability is so important.
133 // Validation will ensure that there is no `UnsafeCell` on an immutable allocation.
134 alloc.mutability = Mutability::Not;
136 // link the alloc id to the actual allocation
137 leftover_allocations.extend(alloc.relocations().iter().map(|&(_, alloc_id)| alloc_id));
138 let alloc = tcx.intern_const_alloc(alloc);
139 tcx.set_alloc_id_memory(alloc_id, alloc);
143 impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
144 InternVisitor<'rt, 'mir, 'tcx, M>
150 ty: Option<Ty<'tcx>>,
151 ) -> Option<IsStaticOrFn> {
152 intern_shallow(self.ecx, self.leftover_allocations, alloc_id, mode, ty)
156 impl<'rt, 'mir, 'tcx: 'mir, M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>>
157 ValueVisitor<'mir, 'tcx, M> for InternVisitor<'rt, 'mir, 'tcx, M>
159 type V = MPlaceTy<'tcx>;
162 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
168 mplace: &MPlaceTy<'tcx>,
169 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
170 ) -> InterpResult<'tcx> {
171 // ZSTs cannot contain pointers, so we can skip them.
172 if mplace.layout.is_zst() {
176 if let Some(def) = mplace.layout.ty.ty_adt_def() {
177 if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
178 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
179 // References we encounter inside here are interned as pointing to mutable
181 // Remember the `old` value to handle nested `UnsafeCell`.
182 let old = std::mem::replace(&mut self.inside_unsafe_cell, true);
183 let walked = self.walk_aggregate(mplace, fields);
184 self.inside_unsafe_cell = old;
189 self.walk_aggregate(mplace, fields)
192 fn visit_value(&mut self, mplace: &MPlaceTy<'tcx>) -> InterpResult<'tcx> {
193 // Handle Reference types, as these are the only relocations supported by const eval.
194 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
195 let tcx = self.ecx.tcx;
196 let ty = mplace.layout.ty;
197 if let ty::Ref(_, referenced_ty, ref_mutability) = *ty.kind() {
198 let value = self.ecx.read_immediate(&(*mplace).into())?;
199 let mplace = self.ecx.ref_to_mplace(&value)?;
200 assert_eq!(mplace.layout.ty, referenced_ty);
201 // Handle trait object vtables.
202 if let ty::Dynamic(..) =
203 tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind()
205 let ptr = self.ecx.scalar_to_ptr(mplace.meta.unwrap_meta());
206 if let Some(alloc_id) = ptr.provenance {
207 // Explicitly choose const mode here, since vtables are immutable, even
208 // if the reference of the fat pointer is mutable.
209 self.intern_shallow(alloc_id, InternMode::Const, None);
211 // Validation will error (with a better message) on an invalid vtable pointer.
212 // Let validation show the error message, but make sure it *does* error.
214 .delay_span_bug(tcx.span, "vtables pointers cannot be integer pointers");
217 // Check if we have encountered this pointer+layout combination before.
218 // Only recurse for allocation-backed pointers.
219 if let Some(alloc_id) = mplace.ptr.provenance {
220 // Compute the mode with which we intern this. Our goal here is to make as many
221 // statics as we can immutable so they can be placed in read-only memory by LLVM.
222 let ref_mode = match self.mode {
223 InternMode::Static(mutbl) => {
224 // In statics, merge outer mutability with reference mutability and
225 // take into account whether we are in an `UnsafeCell`.
227 // The only way a mutable reference actually works as a mutable reference is
228 // by being in a `static mut` directly or behind another mutable reference.
229 // If there's an immutable reference or we are inside a `static`, then our
230 // mutable reference is equivalent to an immutable one. As an example:
231 // `&&mut Foo` is semantically equivalent to `&&Foo`
232 match ref_mutability {
233 _ if self.inside_unsafe_cell => {
234 // Inside an `UnsafeCell` is like inside a `static mut`, the "outer"
235 // mutability does not matter.
236 InternMode::Static(ref_mutability)
239 // A shared reference, things become immutable.
240 // We do *not* consider `freeze` here: `intern_shallow` considers
241 // `freeze` for the actual mutability of this allocation; the intern
242 // mode for references contained in this allocation is tracked more
243 // precisely when traversing the referenced data (by tracking
244 // `UnsafeCell`). This makes sure that `&(&i32, &Cell<i32>)` still
245 // has the left inner reference interned into a read-only
247 InternMode::Static(Mutability::Not)
250 // Mutable reference.
251 InternMode::Static(mutbl)
255 InternMode::Const => {
256 // Ignore `UnsafeCell`, everything is immutable. Validity does some sanity
257 // checking for mutable references that we encounter -- they must all be
262 match self.intern_shallow(alloc_id, ref_mode, Some(referenced_ty)) {
263 // No need to recurse, these are interned already and statics may have
264 // cycles, so we don't want to recurse there
265 Some(IsStaticOrFn) => {}
266 // intern everything referenced by this value. The mutability is taken from the
267 // reference. It is checked above that mutable references only happen in
269 None => self.ref_tracking.track((mplace, ref_mode), || ()),
274 // Not a reference -- proceed recursively.
275 self.walk_value(mplace)
280 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
281 pub enum InternKind {
282 /// The `mutability` of the static, ignoring the type which may have interior mutability.
283 Static(hir::Mutability),
288 /// Intern `ret` and everything it references.
290 /// This *cannot raise an interpreter error*. Doing so is left to validation, which
291 /// tracks where in the value we are and thus can show much better error messages.
292 /// Any errors here would anyway be turned into `const_err` lints, whereas validation failures
294 #[tracing::instrument(level = "debug", skip(ecx))]
295 pub fn intern_const_alloc_recursive<
298 M: CompileTimeMachine<'mir, 'tcx, const_eval::MemoryKind>,
300 ecx: &mut InterpCx<'mir, 'tcx, M>,
301 intern_kind: InternKind,
302 ret: &MPlaceTy<'tcx>,
303 ) -> Result<(), ErrorGuaranteed> {
305 let base_intern_mode = match intern_kind {
306 InternKind::Static(mutbl) => InternMode::Static(mutbl),
307 // `Constant` includes array lengths.
308 InternKind::Constant | InternKind::Promoted => InternMode::Const,
311 // Type based interning.
312 // `ref_tracking` tracks typed references we have already interned and still need to crawl for
313 // more typed information inside them.
314 // `leftover_allocations` collects *all* allocations we see, because some might not
315 // be available in a typed way. They get interned at the end.
316 let mut ref_tracking = RefTracking::empty();
317 let leftover_allocations = &mut FxHashSet::default();
319 // start with the outermost allocation
322 leftover_allocations,
323 // The outermost allocation must exist, because we allocated it with
324 // `Memory::allocate`.
325 ret.ptr.provenance.unwrap(),
330 ref_tracking.track((*ret, base_intern_mode), || ());
332 while let Some(((mplace, mode), _)) = ref_tracking.todo.pop() {
333 let res = InternVisitor {
334 ref_tracking: &mut ref_tracking,
337 leftover_allocations,
338 inside_unsafe_cell: false,
340 .visit_value(&mplace);
341 // We deliberately *ignore* interpreter errors here. When there is a problem, the remaining
342 // references are "leftover"-interned, and later validation will show a proper error
343 // and point at the right part of the value causing the problem.
347 ecx.tcx.sess.delay_span_bug(
350 "error during interning should later cause validation failure: {}",
358 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
359 // pointers, ... So we can't intern them according to their type rules
361 let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
362 while let Some(alloc_id) = todo.pop() {
363 if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
364 // We can't call the `intern_shallow` method here, as its logic is tailored to safe
365 // references and a `leftover_allocations` set (where we only have a todo-list here).
366 // So we hand-roll the interning logic here again.
368 // Statics may contain mutable allocations even behind relocations.
369 // Even for immutable statics it would be ok to have mutable allocations behind
370 // raw pointers, e.g. for `static FOO: *const AtomicUsize = &AtomicUsize::new(42)`.
371 InternKind::Static(_) => {}
372 // Raw pointers in promoteds may only point to immutable things so we mark
373 // everything as immutable.
374 // It is UB to mutate through a raw pointer obtained via an immutable reference:
375 // Since all references and pointers inside a promoted must by their very definition
376 // be created from an immutable reference (and promotion also excludes interior
377 // mutability), mutating through them would be UB.
378 // There's no way we can check whether the user is using raw pointers correctly,
379 // so all we can do is mark this as immutable here.
380 InternKind::Promoted => {
381 // See const_eval::machine::MemoryExtra::can_access_statics for why
382 // immutability is so important.
383 alloc.mutability = Mutability::Not;
385 InternKind::Constant => {
386 // If it's a constant, we should not have any "leftovers" as everything
387 // is tracked by const-checking.
388 // FIXME: downgrade this to a warning? It rejects some legitimate consts,
389 // such as `const CONST_RAW: *const Vec<i32> = &Vec::new() as *const _;`.
392 .span_err(ecx.tcx.span, "untyped pointers are not allowed in constant");
393 // For better errors later, mark the allocation as immutable.
394 alloc.mutability = Mutability::Not;
397 let alloc = tcx.intern_const_alloc(alloc);
398 tcx.set_alloc_id_memory(alloc_id, alloc);
399 for &(_, alloc_id) in alloc.inner().relocations().iter() {
400 if leftover_allocations.insert(alloc_id) {
404 } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
405 // Codegen does not like dangling pointers, and generally `tcx` assumes that
406 // all allocations referenced anywhere actually exist. So, make sure we error here.
407 ecx.tcx.sess.span_err(ecx.tcx.span, "encountered dangling pointer in final constant");
408 return Err(ErrorGuaranteed);
409 } else if ecx.tcx.get_global_alloc(alloc_id).is_none() {
410 // We have hit an `AllocId` that is neither in local or global memory and isn't
411 // marked as dangling by local memory. That should be impossible.
412 span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);
418 impl<'mir, 'tcx: 'mir, M: super::intern::CompileTimeMachine<'mir, 'tcx, !>>
419 InterpCx<'mir, 'tcx, M>
421 /// A helper function that allocates memory for the layout given and gives you access to mutate
422 /// it. Once your own mutation code is done, the backing `Allocation` is removed from the
423 /// current `Memory` and returned.
424 pub fn intern_with_temp_alloc(
426 layout: TyAndLayout<'tcx>,
428 &mut InterpCx<'mir, 'tcx, M>,
429 &PlaceTy<'tcx, M::PointerTag>,
430 ) -> InterpResult<'tcx, ()>,
431 ) -> InterpResult<'tcx, ConstAllocation<'tcx>> {
432 let dest = self.allocate(layout, MemoryKind::Stack)?;
433 f(self, &dest.into())?;
434 let mut alloc = self.memory.alloc_map.remove(&dest.ptr.provenance.unwrap()).unwrap().1;
435 alloc.mutability = Mutability::Not;
436 Ok(self.tcx.intern_const_alloc(alloc))