1 //! This module specifies the type based interner for constants.
3 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
4 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
6 use super::validity::RefTracking;
8 use rustc::mir::interpret::{ErrorHandled, InterpResult};
9 use rustc::ty::{self, Ty};
10 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
12 use syntax::ast::Mutability;
14 use super::{AllocId, Allocation, InterpCx, MPlaceTy, Machine, MemoryKind, Scalar, ValueVisitor};
16 pub trait CompileTimeMachine<'mir, 'tcx> = Machine<
24 MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>,
27 struct InternVisitor<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> {
28 /// The ectx from which we intern.
29 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
30 /// Previously encountered safe references.
31 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>,
32 /// A list of all encountered allocations. After type-based interning, we traverse this list to
33 /// also intern allocations that are only referenced by a raw pointer or inside a union.
34 leftover_allocations: &'rt mut FxHashSet<AllocId>,
35 /// The root node of the value that we're looking at. This field is never mutated and only used
36 /// for sanity assertions that will ICE when `const_qualif` screws up.
38 /// This field stores the mutability of the value *currently* being checked.
39 /// When encountering a mutable reference, we determine the pointee mutability
40 /// taking into account the mutability of the context: `& &mut i32` is entirely immutable,
41 /// despite the nested mutable reference!
42 /// The field gets updated when an `UnsafeCell` is encountered.
43 mutability: Mutability,
46 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
48 /// Mutable references must in fact be immutable due to their surrounding immutability in a
49 /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut`
50 /// that will actually be treated as mutable.
52 /// UnsafeCell is OK in the value of a constant: `const FOO = Cell::new(0)` creates
53 /// a new cell every time it is used.
55 /// `UnsafeCell` ICEs.
59 /// Signalling data structure to ensure we don't recurse
60 /// into the memory of other constants or statics
63 /// Intern an allocation without looking at its children.
64 /// `mode` is the mode of the environment where we found this pointer.
65 /// `mutablity` is the mutability of the place to be interned; even if that says
66 /// `immutable` things might become mutable if `ty` is not frozen.
67 /// `ty` can be `None` if there is no potential interior mutability
68 /// to account for (e.g. for vtables).
69 fn intern_shallow<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>>(
70 ecx: &'rt mut InterpCx<'mir, 'tcx, M>,
71 leftover_allocations: &'rt mut FxHashSet<AllocId>,
74 mutability: Mutability,
76 ) -> InterpResult<'tcx, Option<IsStaticOrFn>> {
77 trace!("InternVisitor::intern {:?} with {:?}", alloc_id, mutability,);
80 let (kind, mut alloc) = match ecx.memory.alloc_map.remove(&alloc_id) {
83 // Pointer not found in local memory map. It is either a pointer to the global
85 // If the pointer is dangling (neither in local nor global memory), we leave it
86 // to validation to error. The `delay_span_bug` ensures that we don't forget such
87 // a check in validation.
88 if tcx.alloc_map.lock().get(alloc_id).is_none() {
89 tcx.sess.delay_span_bug(ecx.tcx.span, "tried to intern dangling pointer");
91 // treat dangling pointers like other statics
92 // just to stop trying to recurse into them
93 return Ok(Some(IsStaticOrFn));
96 // This match is just a canary for future changes to `MemoryKind`, which most likely need
97 // changes in this function.
99 MemoryKind::Stack | MemoryKind::Vtable | MemoryKind::CallerLocation => {}
101 // Set allocation mutability as appropriate. This is used by LLVM to put things into
102 // read-only memory, and also by Miri when evluating other constants/statics that
104 if mode == InternMode::Static {
105 // When `ty` is `None`, we assume no interior mutability.
106 let frozen = ty.map_or(true, |ty| ty.is_freeze(ecx.tcx.tcx, ecx.param_env, ecx.tcx.span));
107 // For statics, allocation mutability is the combination of the place mutability and
108 // the type mutability.
109 // The entire allocation needs to be mutable if it contains an `UnsafeCell` anywhere.
110 if mutability == Mutability::Not && frozen {
111 alloc.mutability = Mutability::Not;
113 // Just making sure we are not "upgrading" an immutable allocation to mutable.
114 assert_eq!(alloc.mutability, Mutability::Mut);
117 // We *could* be non-frozen at `ConstBase`, for constants like `Cell::new(0)`.
118 // But we still intern that as immutable as the memory cannot be changed once the
119 // initial value was computed.
120 // Constants are never mutable.
124 "Something went very wrong: mutability requested for a constant"
126 alloc.mutability = Mutability::Not;
128 // link the alloc id to the actual allocation
129 let alloc = tcx.intern_const_alloc(alloc);
130 leftover_allocations.extend(alloc.relocations().iter().map(|&(_, ((), reloc))| reloc));
131 tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
135 impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> InternVisitor<'rt, 'mir, 'tcx, M> {
139 mutability: Mutability,
140 ty: Option<Ty<'tcx>>,
141 ) -> InterpResult<'tcx, Option<IsStaticOrFn>> {
142 intern_shallow(self.ecx, self.leftover_allocations, self.mode, alloc_id, mutability, ty)
146 impl<'rt, 'mir, 'tcx, M: CompileTimeMachine<'mir, 'tcx>> ValueVisitor<'mir, 'tcx, M>
147 for InternVisitor<'rt, 'mir, 'tcx, M>
149 type V = MPlaceTy<'tcx>;
152 fn ecx(&self) -> &InterpCx<'mir, 'tcx, M> {
158 mplace: MPlaceTy<'tcx>,
159 fields: impl Iterator<Item = InterpResult<'tcx, Self::V>>,
160 ) -> InterpResult<'tcx> {
161 if let Some(def) = mplace.layout.ty.ty_adt_def() {
162 if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
163 // We are crossing over an `UnsafeCell`, we can mutate again. This means that
164 // References we encounter inside here are interned as pointing to mutable
166 let old = std::mem::replace(&mut self.mutability, Mutability::Mut);
170 "UnsafeCells are not allowed behind references in constants. This should have \
171 been prevented statically by const qualification. If this were allowed one \
172 would be able to change a constant at one use site and other use sites could \
173 observe that mutation.",
175 let walked = self.walk_aggregate(mplace, fields);
176 self.mutability = old;
180 self.walk_aggregate(mplace, fields)
183 fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> {
184 // Handle Reference types, as these are the only relocations supported by const eval.
185 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
186 let ty = mplace.layout.ty;
187 if let ty::Ref(_, referenced_ty, mutability) = ty.kind {
188 let value = self.ecx.read_immediate(mplace.into())?;
189 let mplace = self.ecx.ref_to_mplace(value)?;
190 // Handle trait object vtables
191 if let ty::Dynamic(..) =
192 self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.ecx.param_env).kind
194 // Validation has already errored on an invalid vtable pointer so we can safely not
195 // do anything if this is not a real pointer
196 if let Scalar::Ptr(vtable) = mplace.meta.unwrap() {
197 // explitly choose `Immutable` here, since vtables are immutable, even
198 // if the reference of the fat pointer is mutable
199 self.intern_shallow(vtable.alloc_id, Mutability::Not, None)?;
201 self.ecx().tcx.sess.delay_span_bug(
202 syntax_pos::DUMMY_SP,
203 "vtables pointers cannot be integer pointers",
207 // Check if we have encountered this pointer+layout combination before.
208 // Only recurse for allocation-backed pointers.
209 if let Scalar::Ptr(ptr) = mplace.ptr {
210 // We do not have any `frozen` logic here, because it's essentially equivalent to
211 // the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze",
212 // and we check that in `visit_aggregate`.
213 // This is not an inherent limitation, but one that we know to be true, because
214 // const qualification enforces it. We can lift it in the future.
215 match (self.mode, mutability) {
216 // immutable references are fine everywhere
217 (_, hir::Mutability::Not) => {}
218 // all is "good and well" in the unsoundness of `static mut`
220 // mutable references are ok in `static`. Either they are treated as immutable
221 // because they are behind an immutable one, or they are behind an `UnsafeCell`
223 (InternMode::Static, hir::Mutability::Mut) => {}
224 // we statically prevent `&mut T` via `const_qualif` and double check this here
225 (InternMode::ConstBase, hir::Mutability::Mut)
226 | (InternMode::Const, hir::Mutability::Mut) => match referenced_ty.kind {
228 if n.eval_usize(self.ecx.tcx.tcx, self.ecx.param_env) == 0 => {}
229 ty::Slice(_) if mplace.meta.unwrap().to_machine_usize(self.ecx)? == 0 => {}
230 _ => bug!("const qualif failed to prevent mutable references"),
233 // Compute the mutability with which we'll start visiting the allocation. This is
234 // what gets changed when we encounter an `UnsafeCell`.
236 // The only way a mutable reference actually works as a mutable reference is
237 // by being in a `static mut` directly or behind another mutable reference.
238 // If there's an immutable reference or we are inside a static, then our
239 // mutable reference is equivalent to an immutable one. As an example:
240 // `&&mut Foo` is semantically equivalent to `&&Foo`
241 let mutability = self.mutability.and(mutability);
242 // Recursing behind references changes the intern mode for constants in order to
243 // cause assertions to trigger if we encounter any `UnsafeCell`s.
244 let mode = match self.mode {
245 InternMode::ConstBase => InternMode::Const,
248 match self.intern_shallow(ptr.alloc_id, mutability, Some(mplace.layout.ty))? {
249 // No need to recurse, these are interned already and statics may have
250 // cycles, so we don't want to recurse there
251 Some(IsStaticOrFn) => {}
252 // intern everything referenced by this value. The mutability is taken from the
253 // reference. It is checked above that mutable references only happen in
255 None => self.ref_tracking.track((mplace, mutability, mode), || ()),
263 pub fn intern_const_alloc_recursive<M: CompileTimeMachine<'mir, 'tcx>>(
264 ecx: &mut InterpCx<'mir, 'tcx, M>,
265 // The `mutability` of the place, ignoring the type.
266 place_mut: Option<hir::Mutability>,
268 ) -> InterpResult<'tcx> {
270 let (base_mutability, base_intern_mode) = match place_mut {
271 // `static mut` doesn't care about interior mutability, it's mutable anyway
272 Some(mutbl) => (mutbl, InternMode::Static),
273 // consts, promoteds. FIXME: what about array lengths, array initializers?
274 None => (Mutability::Not, InternMode::ConstBase),
277 // Type based interning.
278 // `ref_tracking` tracks typed references we have seen and still need to crawl for
279 // more typed information inside them.
280 // `leftover_allocations` collects *all* allocations we see, because some might not
281 // be available in a typed way. They get interned at the end.
282 let mut ref_tracking = RefTracking::new((ret, base_mutability, base_intern_mode));
283 let leftover_allocations = &mut FxHashSet::default();
285 // start with the outermost allocation
288 leftover_allocations,
290 // The outermost allocation must exist, because we allocated it with
291 // `Memory::allocate`.
292 ret.ptr.assert_ptr().alloc_id,
297 while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() {
298 let interned = InternVisitor {
299 ref_tracking: &mut ref_tracking,
302 leftover_allocations,
305 .visit_value(mplace);
306 if let Err(error) = interned {
307 // This can happen when e.g. the tag of an enum is not a valid discriminant. We do have
308 // to read enum discriminants in order to find references in enum variant fields.
309 if let err_unsup!(ValidationFailure(_)) = error.kind {
310 let err = crate::const_eval::error_to_const_error(&ecx, error);
311 match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") {
313 diag.note(crate::const_eval::note_on_undefined_behavior_error());
316 Err(ErrorHandled::TooGeneric) | Err(ErrorHandled::Reported) => {}
322 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
323 // pointers, ... So we can't intern them according to their type rules
325 let mut todo: Vec<_> = leftover_allocations.iter().cloned().collect();
326 while let Some(alloc_id) = todo.pop() {
327 if let Some((_, mut alloc)) = ecx.memory.alloc_map.remove(&alloc_id) {
328 // We can't call the `intern_shallow` method here, as its logic is tailored to safe
329 // references and a `leftover_allocations` set (where we only have a todo-list here).
330 // So we hand-roll the interning logic here again.
331 match base_intern_mode {
332 InternMode::Static => {}
333 InternMode::Const | InternMode::ConstBase => {
334 // If it's not a static, it *must* be immutable.
335 // We cannot have mutable memory inside a constant.
336 // We use `delay_span_bug` here, because this can be reached in the presence
337 // of fancy transmutes.
338 if alloc.mutability == Mutability::Mut {
339 // For better errors later, mark the allocation as immutable
340 // (on top of the delayed ICE).
341 alloc.mutability = Mutability::Not;
342 ecx.tcx.sess.delay_span_bug(ecx.tcx.span, "mutable allocation in constant");
346 let alloc = tcx.intern_const_alloc(alloc);
347 tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
348 for &(_, ((), reloc)) in alloc.relocations().iter() {
349 if leftover_allocations.insert(reloc) {
353 } else if ecx.memory.dead_alloc_map.contains_key(&alloc_id) {
355 throw_unsup!(ValidationFailure("encountered dangling pointer in final constant".into()))
356 } else if ecx.tcx.alloc_map.lock().get(alloc_id).is_none() {
357 span_bug!(ecx.tcx.span, "encountered unknown alloc id {:?}", alloc_id);