1 //! This module specifies the type based interner for constants.
3 //! After a const evaluation has computed a value, before we destroy the const evaluator's session
4 //! memory, we need to extract all memory allocations to the global memory pool so they stay around.
6 use rustc::ty::{Ty, TyCtxt, ParamEnv, self};
7 use rustc::mir::interpret::{InterpResult, ErrorHandled};
9 use rustc::hir::def_id::DefId;
10 use super::validity::RefTracking;
11 use rustc_data_structures::fx::FxHashSet;
13 use syntax::ast::Mutability;
17 ValueVisitor, MemoryKind, Pointer, AllocId, MPlaceTy, Scalar,
19 use crate::const_eval::{CompileTimeInterpreter, CompileTimeEvalContext};
21 struct InternVisitor<'rt, 'mir, 'tcx> {
22 /// previously encountered safe references
23 ref_tracking: &'rt mut RefTracking<(MPlaceTy<'tcx>, Mutability, InternMode)>,
24 ecx: &'rt mut CompileTimeEvalContext<'mir, 'tcx>,
25 param_env: ParamEnv<'tcx>,
26 /// The root node of the value that we're looking at. This field is never mutated and only used
27 /// for sanity assertions that will ICE when `const_qualif` screws up.
29 /// This field stores the mutability of the value *currently* being checked.
30 /// It is set to mutable when an `UnsafeCell` is encountered
31 /// When recursing across a reference, we don't recurse but store the
32 /// value to be checked in `ref_tracking` together with the mutability at which we are checking
34 /// When encountering an immutable reference, we treat everything as immutable that is behind
36 mutability: Mutability,
37 /// A list of all encountered relocations. After type-based interning, we traverse this list to
38 /// also intern allocations that are only referenced by a raw pointer or inside a union.
39 leftover_relocations: &'rt mut FxHashSet<AllocId>,
42 #[derive(Copy, Clone, Debug, PartialEq, Hash, Eq)]
44 /// Mutable references must in fact be immutable due to their surrounding immutability in a
45 /// `static`. In a `static mut` we start out as mutable and thus can also contain further `&mut`
46 /// that will actually be treated as mutable.
48 /// UnsafeCell is OK in the value of a constant, but not behind references in a constant
54 /// Signalling data structure to ensure we don't recurse
55 /// into the memory of other constants or statics
58 impl<'rt, 'mir, 'tcx> InternVisitor<'rt, 'mir, 'tcx> {
59 /// Intern an allocation without looking at its children
63 mutability: Mutability,
64 ) -> InterpResult<'tcx, Option<IsStaticOrFn>> {
66 "InternVisitor::intern {:?} with {:?}",
70 let tcx = self.ecx.tcx;
71 let memory = self.ecx.memory_mut();
72 let (kind, mut alloc) = match memory.alloc_map.remove(&ptr.alloc_id) {
75 // if the pointer is dangling (neither in local nor global memory), we leave it
76 // to validation to error. The `delay_span_bug` ensures that we don't forget such
77 // a check in validation.
78 if tcx.alloc_map.lock().get(ptr.alloc_id).is_none() {
79 tcx.sess.delay_span_bug(self.ecx.tcx.span, "tried to intern dangling pointer");
81 // treat dangling pointers like other statics
82 // just to stop trying to recurse into them
83 return Ok(Some(IsStaticOrFn));
86 // This match is just a canary for future changes to `MemoryKind`, which most likely need
87 // changes in this function.
89 MemoryKind::Stack | MemoryKind::Vtable => {},
91 // Ensure llvm knows to only put this into immutable memory if the value is immutable either
92 // by being behind a reference or by being part of a static or const without interior
94 alloc.mutability = mutability;
95 // link the alloc id to the actual allocation
96 let alloc = tcx.intern_const_alloc(alloc);
97 self.leftover_relocations.extend(alloc.relocations.iter().map(|&(_, ((), reloc))| reloc));
98 tcx.alloc_map.lock().set_alloc_id_memory(ptr.alloc_id, alloc);
103 impl<'rt, 'mir, 'tcx>
104 ValueVisitor<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>
106 InternVisitor<'rt, 'mir, 'tcx>
108 type V = MPlaceTy<'tcx>;
111 fn ecx(&self) -> &CompileTimeEvalContext<'mir, 'tcx> {
117 mplace: MPlaceTy<'tcx>,
118 fields: impl Iterator<Item=InterpResult<'tcx, Self::V>>,
119 ) -> InterpResult<'tcx> {
120 if let Some(def) = mplace.layout.ty.ty_adt_def() {
121 if Some(def.did) == self.ecx.tcx.lang_items().unsafe_cell_type() {
122 // We are crossing over an `UnsafeCell`, we can mutate again
123 let old = std::mem::replace(&mut self.mutability, Mutability::Mutable);
125 self.mode, InternMode::Const,
126 "UnsafeCells are not allowed behind references in constants. This should have \
127 been prevented statically by const qualification. If this were allowed one \
128 would be able to change a constant at one use site and other use sites may \
129 arbitrarily decide to change, too.",
131 let walked = self.walk_aggregate(mplace, fields);
132 self.mutability = old;
136 self.walk_aggregate(mplace, fields)
139 fn visit_primitive(&mut self, mplace: MPlaceTy<'tcx>) -> InterpResult<'tcx> {
140 // Handle Reference types, as these are the only relocations supported by const eval.
141 // Raw pointers (and boxes) are handled by the `leftover_relocations` logic.
142 let ty = mplace.layout.ty;
143 if let ty::Ref(_, referenced_ty, mutability) = ty.sty {
144 let value = self.ecx.read_immediate(mplace.into())?;
145 // Handle trait object vtables
146 if let Ok(meta) = value.to_meta() {
147 if let ty::Dynamic(..) =
148 self.ecx.tcx.struct_tail_erasing_lifetimes(referenced_ty, self.param_env).sty
150 if let Ok(vtable) = meta.unwrap().to_ptr() {
151 // explitly choose `Immutable` here, since vtables are immutable, even
152 // if the reference of the fat pointer is mutable
153 self.intern_shallow(vtable, Mutability::Immutable)?;
157 let mplace = self.ecx.ref_to_mplace(value)?;
158 // Check if we have encountered this pointer+layout combination before.
159 // Only recurse for allocation-backed pointers.
160 if let Scalar::Ptr(ptr) = mplace.ptr {
161 // We do not have any `frozen` logic here, because it's essentially equivalent to
162 // the mutability except for the outermost item. Only `UnsafeCell` can "unfreeze",
163 // and we check that in `visit_aggregate`.
164 // This is not an inherent limitation, but one that we know to be true, because
165 // const qualification enforces it. We can lift it in the future.
166 match (self.mode, mutability) {
167 // immutable references are fine everywhere
168 (_, hir::Mutability::MutImmutable) => {},
169 // all is "good and well" in the unsoundness of `static mut`
171 // mutable references are ok in `static`. Either they are treated as immutable
172 // because they are behind an immutable one, or they are behind an `UnsafeCell`
174 (InternMode::Static, hir::Mutability::MutMutable) => {},
175 // we statically prevent `&mut T` via `const_qualif` and double check this here
176 (InternMode::ConstBase, hir::Mutability::MutMutable) |
177 (InternMode::Const, hir::Mutability::MutMutable) => {
178 match referenced_ty.sty {
180 if n.eval_usize(self.ecx.tcx.tcx, self.param_env) == 0 => {}
182 if value.to_meta().unwrap().unwrap().to_usize(self.ecx)? == 0 => {}
183 _ => bug!("const qualif failed to prevent mutable references"),
187 // Compute the mutability with which we'll start visiting the allocation. This is
188 // what gets changed when we encounter an `UnsafeCell`
189 let mutability = match (self.mutability, mutability) {
190 // The only way a mutable reference actually works as a mutable reference is
191 // by being in a `static mut` directly or behind another mutable reference.
192 // If there's an immutable reference or we are inside a static, then our
193 // mutable reference is equivalent to an immutable one. As an example:
194 // `&&mut Foo` is semantically equivalent to `&&Foo`
195 (Mutability::Mutable, hir::Mutability::MutMutable) => Mutability::Mutable,
196 _ => Mutability::Immutable,
198 // Compute the mutability of the allocation
199 let intern_mutability = intern_mutability(
206 // Recursing behind references changes the intern mode for constants in order to
207 // cause assertions to trigger if we encounter any `UnsafeCell`s.
208 let mode = match self.mode {
209 InternMode::ConstBase => InternMode::Const,
212 match self.intern_shallow(ptr, intern_mutability)? {
213 // No need to recurse, these are interned already and statics may have
214 // cycles, so we don't want to recurse there
215 Some(IsStaticOrFn) => {},
216 // intern everything referenced by this value. The mutability is taken from the
217 // reference. It is checked above that mutable references only happen in
219 None => self.ref_tracking.track((mplace, mutability, mode), || ()),
227 /// Figure out the mutability of the allocation.
228 /// Mutable if it has interior mutability *anywhere* in the type.
229 fn intern_mutability<'tcx>(
231 param_env: ParamEnv<'tcx>,
234 mutability: Mutability,
236 let has_interior_mutability = !ty.is_freeze(tcx, param_env, span);
237 if has_interior_mutability {
244 pub fn intern_const_alloc_recursive(
245 ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
248 // FIXME(oli-obk): can we scrap the param env? I think we can, the final value of a const eval
249 // must always be monomorphic, right?
250 param_env: ty::ParamEnv<'tcx>,
251 ) -> InterpResult<'tcx> {
253 // this `mutability` is the mutability of the place, ignoring the type
254 let (mutability, base_intern_mode) = match tcx.static_mutability(def_id) {
255 Some(hir::Mutability::MutImmutable) => (Mutability::Immutable, InternMode::Static),
256 None => (Mutability::Immutable, InternMode::ConstBase),
257 // `static mut` doesn't care about interior mutability, it's mutable anyway
258 Some(hir::Mutability::MutMutable) => (Mutability::Mutable, InternMode::Static),
261 // type based interning
262 let mut ref_tracking = RefTracking::new((ret, mutability, base_intern_mode));
263 let leftover_relocations = &mut FxHashSet::default();
265 // This mutability is the combination of the place mutability and the type mutability. If either
266 // is mutable, `alloc_mutability` is mutable. This exists because the entire allocation needs
267 // to be mutable if it contains an `UnsafeCell` anywhere. The other `mutability` exists so that
268 // the visitor does not treat everything outside the `UnsafeCell` as mutable.
269 let alloc_mutability = intern_mutability(
270 tcx.tcx, param_env, ret.layout.ty, tcx.span, mutability,
273 // start with the outermost allocation
275 ref_tracking: &mut ref_tracking,
277 mode: base_intern_mode,
278 leftover_relocations,
281 }.intern_shallow(ret.ptr.to_ptr()?, alloc_mutability)?;
283 while let Some(((mplace, mutability, mode), _)) = ref_tracking.todo.pop() {
284 let interned = InternVisitor {
285 ref_tracking: &mut ref_tracking,
288 leftover_relocations,
291 }.visit_value(mplace);
292 if let Err(error) = interned {
293 // This can happen when e.g. the tag of an enum is not a valid discriminant. We do have
294 // to read enum discriminants in order to find references in enum variant fields.
295 if let err_unsup!(ValidationFailure(_)) = error.kind {
296 let err = crate::const_eval::error_to_const_error(&ecx, error);
297 match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") {
299 diag.note(crate::const_eval::note_on_undefined_behavior_error());
302 Err(ErrorHandled::TooGeneric) |
303 Err(ErrorHandled::Reported) => {},
309 // Intern the rest of the allocations as mutable. These might be inside unions, padding, raw
310 // pointers, ... So we can't intern them according to their type rules
312 let mut todo: Vec<_> = leftover_relocations.iter().cloned().collect();
313 while let Some(alloc_id) = todo.pop() {
314 if let Some((_, alloc)) = ecx.memory_mut().alloc_map.remove(&alloc_id) {
315 // We can't call the `intern` method here, as its logic is tailored to safe references.
316 // So we hand-roll the interning logic here again
317 let alloc = tcx.intern_const_alloc(alloc);
318 tcx.alloc_map.lock().set_alloc_id_memory(alloc_id, alloc);
319 for &(_, ((), reloc)) in alloc.relocations.iter() {
320 if leftover_relocations.insert(reloc) {
324 } else if ecx.memory().dead_alloc_map.contains_key(&alloc_id) {
326 throw_unsup!(ValidationFailure("encountered dangling pointer in final constant".into()))