1 //! This module specifies the type based interner for constants.
3 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
4 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
6 use rustc::ty::layout::LayoutOf;
7 use rustc::ty::{Ty, TyCtxt, ParamEnv, self};
8 use rustc::mir::interpret::{
9 InterpResult, ErrorHandled,
12 use rustc::hir::def_id::DefId;
13 use super::validity::RefTracking;
14 use rustc_data_structures::fx::FxHashSet;
16 use syntax::ast::Mutability;
20 ValueVisitor, MemoryKind, Pointer, AllocId, MPlaceTy, InterpError, Scalar,
22 use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext};
24 struct InternVisitor<'rt, 'mir, 'tcx> {
25 /// previously encountered safe references
26 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>,
27 ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>,
28 param_env: ParamEnv<'tcx>,
29 /// The root node of the value that we're looking at. This field is never mutated and only used
30 /// for sanity assertions that will ICE when `const_qualif` screws up.
32 /// This field stores the mutability of the value *currently* being checked.
33 /// It is set to mutable when an `UnsafeCell` is encountered
34 /// When recursing across a reference, we don't recurse but store the
35 /// value to be checked in `ref_tracking` together with the mutability at which we are checking
37 /// When encountering an immutable reference, we treat everything as immutable that is behind
39 mutability: Mutability,
40 /// A list of all encountered relocations. After type-based interning, we traverse this list to
41 /// also intern allocations that are only referenced by a raw pointer or inside a union.
42 leftover_relocations: &'rt mut FxHashSet<AllocId>,
45 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
47 /// Mutable references must in fact be immutable due to their surrounding immutability in a
48 /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut`
49 /// that will actually be treated as mutable.
51 /// UnsafeCell is OK in the value of a constant, but not behind references in a constant
57 /// Signalling data structure to ensure we don't recurse
58 /// into the memory of other constants or statics
61 impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> {
62 /// Intern an allocation without looking at its children
66 mutability: Mutability,
67 ) -> InterpResult<'tcx, Option<IsStaticOrFn>> {
69 "InternVisitor::intern {:?} with {:?}",
73 let tcx = self.ecx.tcx;
74 let memory = self.ecx.memory_mut();
75 let (kind, mut alloc) = match memory.alloc_map.remove(&ptr.alloc_id) {
78 // if the pointer is dangling (neither in local nor global memory), we leave it
79 // to validation to error. The `delay_span_bug` ensures that we don't forget such
80 // a check in validation.
81 if tcx.alloc_map.lock().get(ptr.alloc_id).is_none() {
82 tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer");
84 // treat dangling pointers like other statics
85 // just to stop trying to recurse into them
86 return Ok(Some(IsStaticOrFn));
89 // This match is just a canary for future changes to `MemoryKind`, which most likely need
90 // changes in this function.
92 MemoryKind::Stack | MemoryKind::Vtable => {},
94 // Ensure llvm knows to only put this into immutable memory if the value is immutable either
95 // by being behind a reference or by being part of a static or const without interior
97 alloc.mutability = mutability;
98 // link the alloc id to the actual allocation
99 let alloc = tcx.intern_const_alloc(alloc);
100 self.leftover_relocations.extend(alloc.relocations.iter().map(|&(_, ((), reloc))| reloc));
101 tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc);
106 impl<'rt, 'mir, 'tcx>
107 ValueVisitor<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>
109 InternVisitor<'rt, 'mir, 'tcx>
111 type V = MPlaceTy<'tcx>;
114 fn ecx(&self) -> &CompileTimeEvalContext<'mir, 'tcx> {
120 mplace: MPlaceTy<'tcx>,
121 fields: impl Iterator<Item=InterpResult<'tcx, Self::V>>,
122 ) -> InterpResult<'tcx> {
123 if let Some(def) = mplace.layout.ty.ty_adt_def() {
124 if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
125 // We are crossing over an `UnsafeCell`, we can mutate again
126 let old = std::mem::replace(&mut self.mutability, Mutability::Mutable);
128 self.mode, InternMode::Const,
129 "UnsafeCells are not allowed behind references in constants. This should have \
130 been prevented statically by const qualification. If this were allowed one \
131 would be able to change a constant at one use site and other use sites may \
132 arbitrarily decide to change, too.",
134 let walked = self.walk_aggregate(mplace, fields);
135 self.mutability = old;
139 self.walk_aggregate(mplace, fields)
142 fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> {
143 // Handle Reference types, as these are the only relocations supported by const eval.
144 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
145 let ty = mplace.layout.ty;
146 if let ty::Ref(_, _, mutability) = ty.sty {
147 let value = self.ecx.read_immediate(mplace.into())?;
148 // Handle trait object vtables
149 if let Ok(meta) = value.to_meta() {
150 let layout = self.ecx.layout_of(ty.builtin_deref(true).unwrap().ty)?;
151 if layout.is_unsized() {
152 if let ty::Dynamic(..) = self.ecx.tcx.struct_tail(layout.ty).sty {
153 if let Ok(vtable) = meta.unwrap().to_ptr() {
154 // explitly choose `Immutable` here, since vtables are immutable, even
155 // if the reference of the fat pointer is mutable
156 self.intern_shallow(vtable, Mutability::Immutable)?;
161 let mplace = self.ecx.ref_to_mplace(value)?;
162 // Check if we have encountered this pointer+layout combination before.
163 // Only recurse for allocation-backed pointers.
164 if let Scalar::Ptr(ptr) = mplace.ptr {
165 // We do not have any `frozen` logic here, because it's essentially equivalent to
166 // the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze",
167 // and we check that in `visit_aggregate`.
168 // This is not an inherent limitation, but one that we know to be true, because
169 // const qualification enforces it. We can lift it in the future.
170 match (self.mode, mutability) {
171 // immutable references are fine everywhere
172 (_, hir::Mutability::MutImmutable) => {},
173 // all is "good and well" in the unsoundness of `static mut`
175 // mutable references are ok in `static`. Either they are treated as immutable
176 // because they are behind an immutable one, or they are behind an `UnsafeCell`
178 (InternMode::Static, hir::Mutability::MutMutable) => {},
179 // we statically prevent `&mut T` via `const_qualif` and double check this here
180 (InternMode::ConstBase, hir::Mutability::MutMutable) |
181 (InternMode::Const, hir::Mutability::MutMutable) =>
182 bug!("const qualif failed to prevent mutable references"),
184 // Compute the mutability with which we'll start visiting the allocation. This is
185 // what gets changed when we encounter an `UnsafeCell`
186 let mutability = match (self.mutability, mutability) {
187 // The only way a mutable reference actually works as a mutable reference is
188 // by being in a `static mut` directly or behind another mutable reference.
189 // If there's an immutable reference or we are inside a static, then our
190 // mutable reference is equivalent to an immutable one. As an example:
191 // `&&mut Foo` is semantically equivalent to `&&Foo`
192 (Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable,
193 _ => Mutability::Immutable,
195 // Compute the mutability of the allocation
196 let intern_mutability = intern_mutability(
203 // Recursing behind references changes the intern mode for constants in order to
204 // cause assertions to trigger if we encounter any `UnsafeCell`s.
205 let mode = match self.mode {
206 InternMode::ConstBase => InternMode::Const,
209 match self.intern_shallow(ptr, intern_mutability)? {
210 // No need to recurse, these are interned already and statics may have
211 // cycles, so we don't want to recurse there
212 Some(IsStaticOrFn) => {},
213 // intern everything referenced by this value. The mutability is taken from the
214 // reference. It is checked above that mutable references only happen in
216 None => self.ref_tracking.track((mplace, mutability, mode), || ()),
224 /// Figure out the mutability of the allocation.
225 /// Mutable if it has interior mutability *anywhere* in the type.
226 fn intern_mutability<'tcx>(
228 param_env: ParamEnv<'tcx>,
231 mutability: Mutability,
233 let has_interior_mutability = !ty.is_freeze(tcx, param_env, span);
234 if has_interior_mutability {
241 pub fn intern_const_alloc_recursive(
242 ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
245 // FIXME(oli-obk): can we scrap the param env? I think we can, the final value of a const eval
246 // must always be monomorphic, right?
247 param_env: ty::ParamEnv<'tcx>,
248 ) -> InterpResult<'tcx> {
250 // this `mutability` is the mutability of the place, ignoring the type
251 let (mutability, base_intern_mode) = match tcx.static_mutability(def_id) {
252 Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static),
253 None => (Mutability::Immutable, InternMode::ConstBase),
254 // `static mut` doesn't care about interior mutability, it's mutable anyway
255 Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static),
258 // type based interning
259 let mut ref_tracking = RefTracking::new((ret, mutability, base_intern_mode));
260 let leftover_relocations = &mut FxHashSet::default();
262 // This mutability is the combination of the place mutability and the type mutability. If either
263 // is mutable, `alloc_mutability` is mutable. This exists because the entire allocation needs
264 // to be mutable if it contains an `UnsafeCell` anywhere. The other `mutability` exists so that
265 // the visitor does not treat everything outside the `UnsafeCell` as mutable.
266 let alloc_mutability = intern_mutability(
267 tcx.tcx, param_env, ret.layout.ty, tcx.span, mutability,
270 // start with the outermost allocation
272 ref_tracking: &mut ref_tracking,
274 mode: base_intern_mode,
275 leftover_relocations,
278 }.intern_shallow(ret.ptr.to_ptr()?, alloc_mutability)?;
280 while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() {
281 let interned = InternVisitor {
282 ref_tracking: &mut ref_tracking,
285 leftover_relocations,
288 }.visit_value(mplace);
289 if let Err(error) = interned {
290 // This can happen when e.g. the tag of an enum is not a valid discriminant. We do have
291 // to read enum discriminants in order to find references in enum variant fields.
292 if let InterpError::ValidationFailure(_) = error.kind {
293 let err = crate::const_eval::error_to_const_error(&ecx, error);
294 match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") {
296 diag.note("The rules on what exactly is undefined behavior aren't clear, \
297 so this check might be overzealous. Please open an issue on the rust \
298 compiler repository if you believe it should not be considered \
303 Err(ErrorHandled::TooGeneric) |
304 Err(ErrorHandled::Reported) => {},
310 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
311 // pointers, ... So we can't intern them according to their type rules
313 let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect();
314 while let Some(alloc_id) = todo.pop() {
315 if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) {
316 // We can't call the `intern` method here, as its logic is tailored to safe references.
317 // So we hand-roll the interning logic here again
318 let alloc = tcx.intern_const_alloc(alloc);
319 tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
320 for &(_, ((), reloc)) in alloc.relocations.iter() {
321 if leftover_relocations.insert(reloc) {
325 } else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) {
327 return err!(ValidationFailure(
328 "encountered dangling pointer in final constant".into(),