1 // Not in interpret to make sure we do not use private implementation details
5 use std::borrow::{Borrow, Cow};
7 use std::collections::hash_map::Entry;
9 use rustc::hir::{self, def_id::DefId};
10 use rustc::hir::def::Def;
11 use rustc::mir::interpret::{ConstEvalErr, ErrorHandled};
13 use rustc::ty::{self, TyCtxt, Instance, query::TyCtxtAt};
14 use rustc::ty::layout::{self, LayoutOf, TyLayout, VariantIdx};
15 use rustc::ty::subst::Subst;
16 use rustc::traits::Reveal;
17 use rustc_data_structures::indexed_vec::IndexVec;
18 use rustc_data_structures::fx::FxHashMap;
19 use rustc::util::common::ErrorReported;
21 use syntax::ast::Mutability;
22 use syntax::source_map::{Span, DUMMY_SP};
24 use crate::interpret::{self,
25 PlaceTy, MPlaceTy, MemPlace, OpTy, Operand, Immediate, Scalar, RawConst, ConstValue, Pointer,
26 EvalResult, EvalError, EvalErrorKind, GlobalId, EvalContext, StackPopCleanup,
27 Allocation, AllocId, MemoryKind,
28 snapshot, RefTracking,
31 /// Number of steps until the detector even starts doing anything.
32 /// Also, a warning is shown to the user when this number is reached.
33 const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000;
34 /// The number of steps between loop detector snapshots.
35 /// Should be a power of two for performance reasons.
36 const DETECTOR_SNAPSHOT_PERIOD: isize = 256;
38 /// Warning: do not use this function if you expect to start interpreting the given `Mir`.
39 /// The `EvalContext` is only meant to be used to query values from constants and statics.
41 /// This function is used during const propagation. We cannot use `mk_eval_cx`, because copy
42 /// propagation happens *during* the computation of the MIR of the current function. So if we
43 /// tried to call the `optimized_mir` query, we'd get a cycle error because we are (transitively)
44 /// inside the `optimized_mir` query of the `Instance` given.
46 /// Since we are looking at the MIR of the function in an abstract manner, we don't have a
47 /// `ParamEnv` available to us. This function creates a `ParamEnv` for the given instance.
48 pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
49 tcx: TyCtxt<'a, 'tcx, 'tcx>,
50 instance: Instance<'tcx>,
51 mir: &'mir mir::Mir<'tcx>,
53 ) -> EvalResult<'tcx, CompileTimeEvalContext<'a, 'mir, 'tcx>> {
54 debug!("mk_borrowck_eval_cx: {:?}", instance);
55 let param_env = tcx.param_env(instance.def_id());
56 mk_eval_cx_inner(tcx, instance, mir, span, param_env)
59 /// This is just a helper function to reduce code duplication between `mk_borrowck_eval_cx` and
60 /// `mk_eval_cx`. Do not call this function directly.
61 fn mk_eval_cx_inner<'a, 'mir, 'tcx>(
62 tcx: TyCtxt<'a, 'tcx, 'tcx>,
63 instance: Instance<'tcx>,
64 mir: &'mir mir::Mir<'tcx>,
66 param_env: ty::ParamEnv<'tcx>,
67 ) -> EvalResult<'tcx, CompileTimeEvalContext<'a, 'mir, 'tcx>> {
68 let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeInterpreter::new());
69 // Insert a stack frame so any queries have the correct substs.
70 // We also avoid all the extra work performed by push_stack_frame,
71 // like initializing local variables
72 ecx.stack.push(interpret::Frame {
73 block: mir::START_BLOCK,
74 locals: IndexVec::new(),
75 local_layouts: IndexVec::new(),
80 return_to_block: StackPopCleanup::Goto(None), // never pop
87 /// Warning: do not use this function if you expect to start interpreting the given `Mir`.
88 /// The `EvalContext` is only meant to be used to do field and index projections into constants for
89 /// `simd_shuffle` and const patterns in match arms.
91 /// The function containing the `match` that is currently being analyzed may have generic bounds
92 /// that inform us about the generic bounds of the constant. E.g. using an associated constant
93 /// of a function's generic parameter will require knowledge about the bounds on the generic
94 /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
95 fn mk_eval_cx<'a, 'tcx>(
96 tcx: TyCtxt<'a, 'tcx, 'tcx>,
97 instance: Instance<'tcx>,
98 param_env: ty::ParamEnv<'tcx>,
99 ) -> EvalResult<'tcx, CompileTimeEvalContext<'a, 'tcx, 'tcx>> {
100 debug!("mk_eval_cx: {:?}, {:?}", instance, param_env);
101 let span = tcx.def_span(instance.def_id());
102 let mir = tcx.optimized_mir(instance.def.def_id());
103 mk_eval_cx_inner(tcx, instance, mir, span, param_env)
106 pub(crate) fn eval_promoted<'a, 'mir, 'tcx>(
107 tcx: TyCtxt<'a, 'tcx, 'tcx>,
109 mir: &'mir mir::Mir<'tcx>,
110 param_env: ty::ParamEnv<'tcx>,
111 ) -> EvalResult<'tcx, MPlaceTy<'tcx>> {
112 let mut ecx = mk_borrowck_eval_cx(tcx, cid.instance, mir, DUMMY_SP).unwrap();
113 eval_body_using_ecx(&mut ecx, cid, Some(mir), param_env)
116 // FIXME: These two conversion functions are bad hacks. We should just always use allocations.
117 pub fn op_to_const<'tcx>(
118 ecx: &CompileTimeEvalContext<'_, '_, 'tcx>,
121 ) -> EvalResult<'tcx, ty::Const<'tcx>> {
122 // We do not normalize just any data. Only scalar layout and fat pointers.
123 let normalize = may_normalize
124 && match op.layout.abi {
125 layout::Abi::Scalar(..) => true,
126 layout::Abi::ScalarPair(..) => {
127 // Must be a fat pointer
128 op.layout.ty.builtin_deref(true).is_some()
132 let normalized_op = if normalize {
133 ecx.try_read_immediate(op)?
136 Operand::Indirect(mplace) => Err(mplace),
137 Operand::Immediate(val) => Ok(val)
140 let val = match normalized_op {
141 Err(MemPlace { ptr, align, meta }) => {
142 // extract alloc-offset pair
143 assert!(meta.is_none());
144 let ptr = ptr.to_ptr()?;
145 let alloc = ecx.memory.get(ptr.alloc_id)?;
146 assert!(alloc.align >= align);
147 assert!(alloc.bytes.len() as u64 - ptr.offset.bytes() >= op.layout.size.bytes());
148 let mut alloc = alloc.clone();
150 // FIXME shouldn't it be the case that `mark_static_initialized` has already
151 // interned this? I thought that is the entire point of that `FinishStatic` stuff?
152 let alloc = ecx.tcx.intern_const_alloc(alloc);
153 ConstValue::ByRef(ptr.alloc_id, alloc, ptr.offset)
155 Ok(Immediate::Scalar(x)) =>
156 ConstValue::Scalar(x.not_undef()?),
157 Ok(Immediate::ScalarPair(a, b)) =>
158 ConstValue::ScalarPair(a.not_undef()?, b.not_undef()?),
160 Ok(ty::Const { val, ty: op.layout.ty })
163 pub fn lazy_const_to_op<'tcx>(
164 ecx: &CompileTimeEvalContext<'_, '_, 'tcx>,
165 cnst: ty::LazyConst<'tcx>,
167 ) -> EvalResult<'tcx, OpTy<'tcx>> {
168 let op = ecx.const_value_to_op(cnst)?;
169 Ok(OpTy { op, layout: ecx.layout_of(ty)? })
172 fn eval_body_and_ecx<'a, 'mir, 'tcx>(
173 tcx: TyCtxt<'a, 'tcx, 'tcx>,
175 mir: Option<&'mir mir::Mir<'tcx>>,
176 param_env: ty::ParamEnv<'tcx>,
177 ) -> (EvalResult<'tcx, MPlaceTy<'tcx>>, CompileTimeEvalContext<'a, 'mir, 'tcx>) {
178 // we start out with the best span we have
179 // and try improving it down the road when more information is available
180 let span = tcx.def_span(cid.instance.def_id());
181 let span = mir.map(|mir| mir.span).unwrap_or(span);
182 let mut ecx = EvalContext::new(tcx.at(span), param_env, CompileTimeInterpreter::new());
183 let r = eval_body_using_ecx(&mut ecx, cid, mir, param_env);
187 // Returns a pointer to where the result lives
188 fn eval_body_using_ecx<'mir, 'tcx>(
189 ecx: &mut CompileTimeEvalContext<'_, 'mir, 'tcx>,
191 mir: Option<&'mir mir::Mir<'tcx>>,
192 param_env: ty::ParamEnv<'tcx>,
193 ) -> EvalResult<'tcx, MPlaceTy<'tcx>> {
194 debug!("eval_body_using_ecx: {:?}, {:?}", cid, param_env);
195 let tcx = ecx.tcx.tcx;
196 let mut mir = match mir {
198 None => ecx.load_mir(cid.instance.def)?,
200 if let Some(index) = cid.promoted {
201 mir = &mir.promoted[index];
203 let layout = ecx.layout_of(mir.return_ty().subst(tcx, cid.instance.substs))?;
204 assert!(!layout.is_unsized());
205 let ret = ecx.allocate(layout, MemoryKind::Stack);
207 let name = ty::tls::with(|tcx| tcx.item_path_str(cid.instance.def_id()));
208 let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
209 trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
210 assert!(mir.arg_count == 0);
211 ecx.push_stack_frame(
216 StackPopCleanup::None { cleanup: false },
219 // The main interpreter loop.
223 let internally_mutable = !layout.ty.is_freeze(tcx, param_env, mir.span);
224 let is_static = tcx.is_static(cid.instance.def_id());
225 let mutability = if is_static == Some(hir::Mutability::MutMutable) || internally_mutable {
228 Mutability::Immutable
230 ecx.memory.intern_static(ret.ptr.to_ptr()?.alloc_id, mutability)?;
232 debug!("eval_body_using_ecx done: {:?}", *ret);
236 impl<'tcx> Into<EvalError<'tcx>> for ConstEvalError {
237 fn into(self) -> EvalError<'tcx> {
238 EvalErrorKind::MachineError(self.to_string()).into()
242 #[derive(Clone, Debug)]
243 enum ConstEvalError {
247 impl fmt::Display for ConstEvalError {
248 fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
249 use self::ConstEvalError::*;
251 NeedsRfc(ref msg) => {
254 "\"{}\" needs an rfc before being allowed inside constants",
262 impl Error for ConstEvalError {
263 fn description(&self) -> &str {
264 use self::ConstEvalError::*;
266 NeedsRfc(_) => "this feature needs an rfc before being allowed inside constants",
270 fn cause(&self) -> Option<&dyn Error> {
275 // Extra machine state for CTFE, and the Machine instance
276 pub struct CompileTimeInterpreter<'a, 'mir, 'tcx: 'a+'mir> {
277 /// When this value is negative, it indicates the number of interpreter
278 /// steps *until* the loop detector is enabled. When it is positive, it is
279 /// the number of steps after the detector has been enabled modulo the loop
281 pub(super) steps_since_detector_enabled: isize,
283 /// Extra state to detect loops.
284 pub(super) loop_detector: snapshot::InfiniteLoopDetector<'a, 'mir, 'tcx>,
287 impl<'a, 'mir, 'tcx> CompileTimeInterpreter<'a, 'mir, 'tcx> {
289 CompileTimeInterpreter {
290 loop_detector: Default::default(),
291 steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED,
296 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
298 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
301 FxHashMap::contains_key(self, k)
305 fn insert(&mut self, k: K, v: V) -> Option<V>
307 FxHashMap::insert(self, k, v)
311 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
314 FxHashMap::remove(self, k)
318 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
320 .filter_map(move |(k, v)| f(k, &*v))
328 vacant: impl FnOnce() -> Result<V, E>
335 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
344 vacant: impl FnOnce() -> Result<V, E>
345 ) -> Result<&mut V, E>
347 match self.entry(k) {
348 Entry::Occupied(e) => Ok(e.into_mut()),
349 Entry::Vacant(e) => {
357 type CompileTimeEvalContext<'a, 'mir, 'tcx> =
358 EvalContext<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>;
360 impl interpret::MayLeak for ! {
362 fn may_leak(self) -> bool {
363 // `self` is uninhabited
368 impl<'a, 'mir, 'tcx> interpret::Machine<'a, 'mir, 'tcx>
369 for CompileTimeInterpreter<'a, 'mir, 'tcx>
371 type MemoryKinds = !;
372 type PointerTag = ();
374 type FrameExtra = ();
375 type MemoryExtra = ();
376 type AllocExtra = ();
378 type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>;
380 const STATIC_KIND: Option<!> = None; // no copying of statics allowed
383 fn enforce_validity(_ecx: &EvalContext<'a, 'mir, 'tcx, Self>) -> bool {
384 false // for now, we don't enforce validity
388 ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
389 instance: ty::Instance<'tcx>,
391 dest: Option<PlaceTy<'tcx>>,
392 ret: Option<mir::BasicBlock>,
393 ) -> EvalResult<'tcx, Option<&'mir mir::Mir<'tcx>>> {
394 debug!("eval_fn_call: {:?}", instance);
395 // Execution might have wandered off into other crates, so we cannot to a stability-
396 // sensitive check here. But we can at least rule out functions that are not const
398 if !ecx.tcx.is_const_fn_raw(instance.def_id()) {
399 // Some functions we support even if they are non-const -- but avoid testing
400 // that for const fn! We certainly do *not* want to actually call the fn
401 // though, so be sure we return here.
402 return if ecx.hook_fn(instance, args, dest)? {
403 ecx.goto_block(ret)?; // fully evaluated and done
406 err!(MachineError(format!("calling non-const function `{}`", instance)))
409 // This is a const fn. Call it.
410 Ok(Some(match ecx.load_mir(instance.def) {
413 if let EvalErrorKind::NoMirFor(ref path) = err.kind {
415 ConstEvalError::NeedsRfc(format!("calling extern function `{}`", path))
425 ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
426 instance: ty::Instance<'tcx>,
429 ) -> EvalResult<'tcx> {
430 if ecx.emulate_intrinsic(instance, args, dest)? {
433 // An intrinsic that we do not support
434 let intrinsic_name = &ecx.tcx.item_name(instance.def_id()).as_str()[..];
436 ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()
441 _ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
444 _left_layout: TyLayout<'tcx>,
446 _right_layout: TyLayout<'tcx>,
447 ) -> EvalResult<'tcx, (Scalar, bool)> {
449 ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into(),
453 fn find_foreign_static(
455 _tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
457 ) -> EvalResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> {
458 err!(ReadForeignStatic)
462 fn adjust_static_allocation<'b>(
463 alloc: &'b Allocation,
465 ) -> Cow<'b, Allocation<Self::PointerTag>> {
466 // We do not use a tag so we can just cheaply forward the reference
471 _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
472 _dest: PlaceTy<'tcx>,
473 ) -> EvalResult<'tcx> {
475 ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into(),
479 fn before_terminator(ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>) -> EvalResult<'tcx> {
481 let steps = &mut ecx.machine.steps_since_detector_enabled;
488 *steps %= DETECTOR_SNAPSHOT_PERIOD;
494 let span = ecx.frame().span;
495 ecx.machine.loop_detector.observe_and_analyze(
504 fn tag_new_allocation(
505 _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
507 _kind: MemoryKind<Self::MemoryKinds>,
514 _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
515 ) -> EvalResult<'tcx> {
519 /// Called immediately before a stack frame gets popped
522 _ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
524 ) -> EvalResult<'tcx> {
529 /// Project to a field of a (variant of a) const
530 pub fn const_field<'a, 'tcx>(
531 tcx: TyCtxt<'a, 'tcx, 'tcx>,
532 param_env: ty::ParamEnv<'tcx>,
533 instance: ty::Instance<'tcx>,
534 variant: Option<VariantIdx>,
536 value: ty::Const<'tcx>,
537 ) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
538 trace!("const_field: {:?}, {:?}, {:?}", instance, field, value);
539 let ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
541 // get the operand again
542 let op = lazy_const_to_op(&ecx, ty::LazyConst::Evaluated(value), value.ty)?;
544 let down = match variant {
546 Some(variant) => ecx.operand_downcast(op, variant)?
549 let field = ecx.operand_field(down, field.index() as u64)?;
550 // and finally move back to the const world, always normalizing because
551 // this is not called for statics.
552 op_to_const(&ecx, field, true)
554 result.map_err(|error| {
555 let err = error_to_const_error(&ecx, error);
556 err.report_as_error(ecx.tcx, "could not access field of constant");
557 ErrorHandled::Reported
561 pub fn const_variant_index<'a, 'tcx>(
562 tcx: TyCtxt<'a, 'tcx, 'tcx>,
563 param_env: ty::ParamEnv<'tcx>,
564 instance: ty::Instance<'tcx>,
565 val: ty::Const<'tcx>,
566 ) -> EvalResult<'tcx, VariantIdx> {
567 trace!("const_variant_index: {:?}, {:?}", instance, val);
568 let ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
569 let op = lazy_const_to_op(&ecx, ty::LazyConst::Evaluated(val), val.ty)?;
570 Ok(ecx.read_discriminant(op)?.1)
573 pub fn error_to_const_error<'a, 'mir, 'tcx>(
574 ecx: &EvalContext<'a, 'mir, 'tcx, CompileTimeInterpreter<'a, 'mir, 'tcx>>,
575 mut error: EvalError<'tcx>
576 ) -> ConstEvalErr<'tcx> {
577 error.print_backtrace();
578 let stacktrace = ecx.generate_stacktrace(None);
579 ConstEvalErr { error: error.kind, stacktrace, span: ecx.tcx.span }
582 fn validate_and_turn_into_const<'a, 'tcx>(
583 tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
584 constant: RawConst<'tcx>,
585 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
586 ) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
588 let ecx = mk_eval_cx(tcx, cid.instance, key.param_env).unwrap();
590 let op = ecx.raw_const_to_mplace(constant)?.into();
591 // FIXME: Once the visitor infrastructure landed, change validation to
592 // work directly on `MPlaceTy`.
593 let mut ref_tracking = RefTracking::new(op);
594 while let Some((op, path)) = ref_tracking.todo.pop() {
595 ecx.validate_operand(
598 Some(&mut ref_tracking),
599 /* const_mode */ true,
602 // Now that we validated, turn this into a proper constant
603 let def_id = cid.instance.def.def_id();
604 let normalize = tcx.is_static(def_id).is_none() && cid.promoted.is_none();
605 op_to_const(&ecx, op, normalize)
608 val.map_err(|error| {
609 let err = error_to_const_error(&ecx, error);
610 match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") {
612 diag.note("The rules on what exactly is undefined behavior aren't clear, \
613 so this check might be overzealous. Please open an issue on the rust compiler \
614 repository if you believe it should not be considered undefined behavior",
617 ErrorHandled::Reported
624 pub fn const_eval_provider<'a, 'tcx>(
625 tcx: TyCtxt<'a, 'tcx, 'tcx>,
626 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
627 ) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
628 // see comment in const_eval_provider for what we're doing here
629 if key.param_env.reveal == Reveal::All {
630 let mut key = key.clone();
631 key.param_env.reveal = Reveal::UserFacing;
632 match tcx.const_eval(key) {
633 // try again with reveal all as requested
634 Err(ErrorHandled::TooGeneric) => {
635 // Promoteds should never be "too generic" when getting evaluated.
636 // They either don't get evaluated, or we are in a monomorphic context
637 assert!(key.value.promoted.is_none());
640 other => return other,
643 tcx.const_eval_raw(key).and_then(|val| {
644 validate_and_turn_into_const(tcx, val, key)
648 pub fn const_eval_raw_provider<'a, 'tcx>(
649 tcx: TyCtxt<'a, 'tcx, 'tcx>,
650 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
651 ) -> ::rustc::mir::interpret::ConstEvalRawResult<'tcx> {
652 // Because the constant is computed twice (once per value of `Reveal`), we are at risk of
653 // reporting the same error twice here. To resolve this, we check whether we can evaluate the
654 // constant in the more restrictive `Reveal::UserFacing`, which most likely already was
655 // computed. For a large percentage of constants that will already have succeeded. Only
656 // associated constants of generic functions will fail due to not enough monomorphization
657 // information being available.
659 // In case we fail in the `UserFacing` variant, we just do the real computation.
660 if key.param_env.reveal == Reveal::All {
661 let mut key = key.clone();
662 key.param_env.reveal = Reveal::UserFacing;
663 match tcx.const_eval_raw(key) {
664 // try again with reveal all as requested
665 Err(ErrorHandled::TooGeneric) => {},
667 other => return other,
670 // the first trace is for replicating an ice
671 // There's no tracking issue, but the next two lines concatenated link to the discussion on
672 // zulip. It's not really possible to test this, because it doesn't show up in diagnostics
674 // https://rust-lang.zulipchat.com/#narrow/stream/146212-t-compiler.2Fconst-eval/
675 // subject/anon_const_instance_printing/near/135980032
676 trace!("const eval: {}", key.value.instance);
677 trace!("const eval: {:?}", key);
680 let def_id = cid.instance.def.def_id();
682 if let Some(id) = tcx.hir().as_local_node_id(def_id) {
683 let tables = tcx.typeck_tables_of(def_id);
685 // Do match-check before building MIR
686 if let Err(ErrorReported) = tcx.check_match(def_id) {
687 return Err(ErrorHandled::Reported)
690 if let hir::BodyOwnerKind::Const = tcx.hir().body_owner_kind(id) {
691 tcx.mir_const_qualif(def_id);
694 // Do not continue into miri if typeck errors occurred; it will fail horribly
695 if tables.tainted_by_errors {
696 return Err(ErrorHandled::Reported)
700 let (res, ecx) = eval_body_and_ecx(tcx, cid, None, key.param_env);
701 res.and_then(|place| {
703 alloc_id: place.to_ptr().expect("we allocated this ptr!").alloc_id,
707 let err = error_to_const_error(&ecx, error);
708 // errors in statics are always emitted as fatal errors
709 if tcx.is_static(def_id).is_some() {
710 let reported_err = err.report_as_error(ecx.tcx,
711 "could not evaluate static initializer");
712 // Ensure that if the above error was either `TooGeneric` or `Reported`
713 // an error must be reported.
714 if tcx.sess.err_count() == 0 {
715 tcx.sess.delay_span_bug(err.span,
716 &format!("static eval failure did not emit an error: {:#?}",
720 } else if def_id.is_local() {
721 // constant defined in this crate, we can figure out a lint level!
722 match tcx.describe_def(def_id) {
723 // constants never produce a hard error at the definition site. Anything else is
724 // a backwards compatibility hazard (and will break old versions of winapi for sure)
726 // note that validation may still cause a hard error on this very same constant,
727 // because any code that existed before validation could not have failed validation
728 // thus preventing such a hard error from being a backwards compatibility hazard
729 Some(Def::Const(_)) | Some(Def::AssociatedConst(_)) => {
730 let node_id = tcx.hir().as_local_node_id(def_id).unwrap();
732 tcx.at(tcx.def_span(def_id)),
733 "any use of this value will cause an error",
737 // promoting runtime code is only allowed to error if it references broken constants
738 // any other kind of error will be reported to the user as a deny-by-default lint
739 _ => if let Some(p) = cid.promoted {
740 let span = tcx.optimized_mir(def_id).promoted[p].span;
741 if let EvalErrorKind::ReferencedConstant = err.error {
744 "evaluation of constant expression failed",
749 "reaching this expression at runtime will panic or abort",
750 tcx.hir().as_local_node_id(def_id).unwrap(),
753 // anything else (array lengths, enum initializers, constant patterns) are reported
758 "evaluation of constant value failed",
763 // use of broken constant from other crate
764 err.report_as_error(ecx.tcx, "could not evaluate constant")