1 // Not in interpret to make sure we do not use private implementation details
5 use std::borrow::{Borrow, Cow};
7 use std::collections::hash_map::Entry;
8 use std::convert::TryInto;
10 use rustc::hir::def::DefKind;
11 use rustc::hir::def_id::DefId;
12 use rustc::middle::lang_items::PanicLocationLangItem;
13 use rustc::mir::interpret::{ConstEvalErr, ErrorHandled, ScalarMaybeUndef};
15 use rustc::ty::{self, Ty, TyCtxt, subst::Subst};
16 use rustc::ty::layout::{self, LayoutOf, VariantIdx};
17 use rustc::traits::Reveal;
18 use rustc_data_structures::fx::FxHashMap;
19 use crate::interpret::eval_nullary_intrinsic;
21 use syntax::{source_map::{Span, DUMMY_SP}, symbol::Symbol};
23 use crate::interpret::{self,
24 PlaceTy, MPlaceTy, OpTy, ImmTy, Immediate, Scalar, Pointer,
25 RawConst, ConstValue, Machine,
26 InterpResult, InterpErrorInfo, GlobalId, InterpCx, StackPopCleanup,
27 Allocation, AllocId, MemoryKind, Memory, StackPopInfo,
28 snapshot, RefTracking, intern_const_alloc_recursive,
31 /// Number of steps until the detector even starts doing anything.
32 /// Also, a warning is shown to the user when this number is reached.
33 const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000;
34 /// The number of steps between loop detector snapshots.
35 /// Should be a power of two for performance reasons.
36 const DETECTOR_SNAPSHOT_PERIOD: isize = 256;
38 /// The `InterpCx` is only meant to be used to do field and index projections into constants for
39 /// `simd_shuffle` and const patterns in match arms.
41 /// The function containing the `match` that is currently being analyzed may have generic bounds
42 /// that inform us about the generic bounds of the constant. E.g., using an associated constant
43 /// of a function's generic parameter will require knowledge about the bounds on the generic
44 /// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
45 fn mk_eval_cx<'mir, 'tcx>(
48 param_env: ty::ParamEnv<'tcx>,
49 ) -> CompileTimeEvalContext<'mir, 'tcx> {
50 debug!("mk_eval_cx: {:?}", param_env);
51 InterpCx::new(tcx.at(span), param_env, CompileTimeInterpreter::new(), Default::default())
55 ecx: &CompileTimeEvalContext<'_, 'tcx>,
57 ) -> &'tcx ty::Const<'tcx> {
58 // We do not have value optmizations for everything.
59 // Only scalars and slices, since they are very common.
60 // Note that further down we turn scalars of undefined bits back to `ByRef`. These can result
61 // from scalar unions that are initialized with one of their zero sized variants. We could
62 // instead allow `ConstValue::Scalar` to store `ScalarMaybeUndef`, but that would affect all
63 // the usual cases of extracting e.g. a `usize`, without there being a real use case for the
65 let try_as_immediate = match op.layout.abi {
66 layout::Abi::Scalar(..) => true,
67 layout::Abi::ScalarPair(..) => match op.layout.ty.kind {
68 ty::Ref(_, inner, _) => match inner.kind {
69 ty::Slice(elem) => elem == ecx.tcx.types.u8,
77 let immediate = if try_as_immediate {
78 Err(ecx.read_immediate(op).expect("normalization works on validated constants"))
80 // It is guaranteed that any non-slice scalar pair is actually ByRef here.
81 // When we come back from raw const eval, we are always by-ref. The only way our op here is
82 // by-val is if we are in const_field, i.e., if this is (a field of) something that we
83 // "tried to make immediate" before. We wouldn't do that for non-slice scalar pairs or
84 // structs containing such.
87 let val = match immediate {
89 let ptr = mplace.ptr.to_ptr().unwrap();
90 let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
91 ConstValue::ByRef { alloc, offset: ptr.offset }
93 // see comment on `let try_as_immediate` above
94 Err(ImmTy { imm: Immediate::Scalar(x), .. }) => match x {
95 ScalarMaybeUndef::Scalar(s) => ConstValue::Scalar(s),
96 ScalarMaybeUndef::Undef => {
97 // When coming out of "normal CTFE", we'll always have an `Indirect` operand as
98 // argument and we will not need this. The only way we can already have an
99 // `Immediate` is when we are called from `const_field`, and that `Immediate`
100 // comes from a constant so it can happen have `Undef`, because the indirect
101 // memory that was read had undefined bytes.
102 let mplace = op.assert_mem_place();
103 let ptr = mplace.ptr.to_ptr().unwrap();
104 let alloc = ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id);
105 ConstValue::ByRef { alloc, offset: ptr.offset }
108 Err(ImmTy { imm: Immediate::ScalarPair(a, b), .. }) => {
109 let (data, start) = match a.not_undef().unwrap() {
110 Scalar::Ptr(ptr) => (
111 ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
114 Scalar::Raw { .. } => (
115 ecx.tcx.intern_const_alloc(Allocation::from_byte_aligned_bytes(
121 let len = b.to_machine_usize(&ecx.tcx.tcx).unwrap();
122 let start = start.try_into().unwrap();
123 let len: usize = len.try_into().unwrap();
131 ecx.tcx.mk_const(ty::Const { val, ty: op.layout.ty })
134 // Returns a pointer to where the result lives
135 fn eval_body_using_ecx<'mir, 'tcx>(
136 ecx: &mut CompileTimeEvalContext<'mir, 'tcx>,
138 body: &'mir mir::Body<'tcx>,
139 ) -> InterpResult<'tcx, MPlaceTy<'tcx>> {
140 debug!("eval_body_using_ecx: {:?}, {:?}", cid, ecx.param_env);
141 let tcx = ecx.tcx.tcx;
142 let layout = ecx.layout_of(body.return_ty().subst(tcx, cid.instance.substs))?;
143 assert!(!layout.is_unsized());
144 let ret = ecx.allocate(layout, MemoryKind::Stack);
146 let name = ty::tls::with(|tcx| tcx.def_path_str(cid.instance.def_id()));
147 let prom = cid.promoted.map_or(String::new(), |p| format!("::promoted[{:?}]", p));
148 trace!("eval_body_using_ecx: pushing stack frame for global: {}{}", name, prom);
149 assert!(body.arg_count == 0);
150 ecx.push_stack_frame(
155 StackPopCleanup::None { cleanup: false },
158 // The main interpreter loop.
162 intern_const_alloc_recursive(ecx, tcx.static_mutability(cid.instance.def_id()), ret)?;
164 debug!("eval_body_using_ecx done: {:?}", *ret);
168 #[derive(Clone, Debug)]
169 pub enum ConstEvalError {
173 impl<'tcx> Into<InterpErrorInfo<'tcx>> for ConstEvalError {
174 fn into(self) -> InterpErrorInfo<'tcx> {
175 err_unsup!(Unsupported(self.to_string())).into()
179 impl fmt::Display for ConstEvalError {
180 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
181 use self::ConstEvalError::*;
183 NeedsRfc(ref msg) => {
186 "\"{}\" needs an rfc before being allowed inside constants",
194 impl Error for ConstEvalError {
195 fn description(&self) -> &str {
196 use self::ConstEvalError::*;
198 NeedsRfc(_) => "this feature needs an rfc before being allowed inside constants",
202 fn cause(&self) -> Option<&dyn Error> {
207 // Extra machine state for CTFE, and the Machine instance
208 pub struct CompileTimeInterpreter<'mir, 'tcx> {
209 /// When this value is negative, it indicates the number of interpreter
210 /// steps *until* the loop detector is enabled. When it is positive, it is
211 /// the number of steps after the detector has been enabled modulo the loop
213 pub(super) steps_since_detector_enabled: isize,
215 /// Extra state to detect loops.
216 pub(super) loop_detector: snapshot::InfiniteLoopDetector<'mir, 'tcx>,
219 impl<'mir, 'tcx> CompileTimeInterpreter<'mir, 'tcx> {
221 CompileTimeInterpreter {
222 loop_detector: Default::default(),
223 steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED,
228 impl<K: Hash + Eq, V> interpret::AllocMap<K, V> for FxHashMap<K, V> {
230 fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
233 FxHashMap::contains_key(self, k)
237 fn insert(&mut self, k: K, v: V) -> Option<V>
239 FxHashMap::insert(self, k, v)
243 fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
246 FxHashMap::remove(self, k)
250 fn filter_map_collect<T>(&self, mut f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T> {
252 .filter_map(move |(k, v)| f(k, &*v))
260 vacant: impl FnOnce() -> Result<V, E>
267 bug!("The CTFE machine shouldn't ever need to extend the alloc_map when reading")
276 vacant: impl FnOnce() -> Result<V, E>
277 ) -> Result<&mut V, E>
279 match self.entry(k) {
280 Entry::Occupied(e) => Ok(e.into_mut()),
281 Entry::Vacant(e) => {
289 crate type CompileTimeEvalContext<'mir, 'tcx> =
290 InterpCx<'mir, 'tcx, CompileTimeInterpreter<'mir, 'tcx>>;
292 impl interpret::MayLeak for ! {
294 fn may_leak(self) -> bool {
295 // `self` is uninhabited
300 impl<'mir, 'tcx> interpret::Machine<'mir, 'tcx> for CompileTimeInterpreter<'mir, 'tcx> {
301 type MemoryKinds = !;
302 type PointerTag = ();
305 type FrameExtra = ();
306 type MemoryExtra = ();
307 type AllocExtra = ();
309 type MemoryMap = FxHashMap<AllocId, (MemoryKind<!>, Allocation)>;
311 const STATIC_KIND: Option<!> = None; // no copying of statics allowed
313 // We do not check for alignment to avoid having to carry an `Align`
314 // in `ConstValue::ByRef`.
315 const CHECK_ALIGN: bool = false;
318 fn enforce_validity(_ecx: &InterpCx<'mir, 'tcx, Self>) -> bool {
319 false // for now, we don't enforce validity
323 ecx: &mut InterpCx<'mir, 'tcx, Self>,
324 instance: ty::Instance<'tcx>,
326 dest: Option<PlaceTy<'tcx>>,
327 ret: Option<mir::BasicBlock>,
328 _unwind: Option<mir::BasicBlock> // unwinding is not supported in consts
329 ) -> InterpResult<'tcx, Option<&'mir mir::Body<'tcx>>> {
330 debug!("eval_fn_call: {:?}", instance);
331 // Only check non-glue functions
332 if let ty::InstanceDef::Item(def_id) = instance.def {
333 // Execution might have wandered off into other crates, so we cannot do a stability-
334 // sensitive check here. But we can at least rule out functions that are not const
336 if !ecx.tcx.is_const_fn_raw(def_id) {
337 // Some functions we support even if they are non-const -- but avoid testing
338 // that for const fn! We certainly do *not* want to actually call the fn
339 // though, so be sure we return here.
340 return if ecx.hook_fn(instance, args, dest)? {
341 ecx.goto_block(ret)?; // fully evaluated and done
344 throw_unsup_format!("calling non-const function `{}`", instance)
348 // This is a const fn. Call it.
349 Ok(Some(match ecx.load_mir(instance.def, None) {
352 if let err_unsup!(NoMirFor(ref path)) = err.kind {
354 ConstEvalError::NeedsRfc(format!("calling extern function `{}`", path))
364 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
366 _args: &[OpTy<'tcx>],
367 _dest: Option<PlaceTy<'tcx>>,
368 _ret: Option<mir::BasicBlock>,
369 ) -> InterpResult<'tcx> {
374 ecx: &mut InterpCx<'mir, 'tcx, Self>,
376 instance: ty::Instance<'tcx>,
378 dest: Option<PlaceTy<'tcx>>,
379 ) -> InterpResult<'tcx> {
380 if ecx.emulate_intrinsic(span, instance, args, dest)? {
383 // An intrinsic that we do not support
384 let intrinsic_name = ecx.tcx.item_name(instance.def_id());
386 ConstEvalError::NeedsRfc(format!("calling intrinsic `{}`", intrinsic_name)).into()
391 _mem: &Memory<'mir, 'tcx, Self>,
393 ) -> InterpResult<'tcx, u64> {
395 ConstEvalError::NeedsRfc("pointer-to-integer cast".to_string()).into(),
400 _ecx: &InterpCx<'mir, 'tcx, Self>,
404 ) -> InterpResult<'tcx, (Scalar, bool, Ty<'tcx>)> {
406 ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into(),
410 fn find_foreign_static(
413 ) -> InterpResult<'tcx, Cow<'tcx, Allocation<Self::PointerTag>>> {
414 throw_unsup!(ReadForeignStatic)
418 fn tag_allocation<'b>(
421 alloc: Cow<'b, Allocation>,
422 _kind: Option<MemoryKind<!>>,
423 ) -> (Cow<'b, Allocation<Self::PointerTag>>, Self::PointerTag) {
424 // We do not use a tag so we can just cheaply forward the allocation
429 fn tag_static_base_pointer(
432 ) -> Self::PointerTag {
437 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
438 _dest: PlaceTy<'tcx>,
439 ) -> InterpResult<'tcx> {
441 ConstEvalError::NeedsRfc("heap allocations via `box` keyword".to_string()).into(),
445 fn before_terminator(ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
447 let steps = &mut ecx.machine.steps_since_detector_enabled;
454 *steps %= DETECTOR_SNAPSHOT_PERIOD;
460 let span = ecx.frame().span;
461 ecx.machine.loop_detector.observe_and_analyze(
470 fn stack_push(_ecx: &mut InterpCx<'mir, 'tcx, Self>) -> InterpResult<'tcx> {
474 /// Called immediately before a stack frame gets popped.
477 _ecx: &mut InterpCx<'mir, 'tcx, Self>,
479 ) -> InterpResult<'tcx, StackPopInfo> {
480 // Const-eval mode does not support unwinding from panics
481 Ok(StackPopInfo::Normal)
485 /// Extracts a field of a (variant of a) const.
486 // this function uses `unwrap` copiously, because an already validated constant must have valid
487 // fields and can thus never fail outside of compiler bugs
488 pub fn const_field<'tcx>(
490 param_env: ty::ParamEnv<'tcx>,
491 variant: Option<VariantIdx>,
493 value: &'tcx ty::Const<'tcx>,
494 ) -> &'tcx ty::Const<'tcx> {
495 trace!("const_field: {:?}, {:?}", field, value);
496 let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env);
497 // get the operand again
498 let op = ecx.eval_const_to_op(value, None).unwrap();
500 let down = match variant {
502 Some(variant) => ecx.operand_downcast(op, variant).unwrap(),
505 let field = ecx.operand_field(down, field.index() as u64).unwrap();
506 // and finally move back to the const world, always normalizing because
507 // this is not called for statics.
508 op_to_const(&ecx, field)
511 pub fn const_caller_location<'tcx>(
513 (file, line, col): (Symbol, u32, u32),
514 ) -> &'tcx ty::Const<'tcx> {
515 trace!("const_caller_location: {}:{}:{}", file, line, col);
516 let mut ecx = mk_eval_cx(tcx, DUMMY_SP, ty::ParamEnv::reveal_all());
518 let loc_ty = tcx.mk_imm_ref(
519 tcx.lifetimes.re_static,
520 tcx.type_of(tcx.require_lang_item(PanicLocationLangItem, None))
521 .subst(tcx, tcx.mk_substs([tcx.lifetimes.re_static.into()].iter())),
523 let loc_place = ecx.alloc_caller_location(file, line, col).unwrap();
524 intern_const_alloc_recursive(&mut ecx, None, loc_place).unwrap();
525 let loc_const = ty::Const {
527 val: ConstValue::Scalar(loc_place.ptr.into()),
530 tcx.mk_const(loc_const)
533 // this function uses `unwrap` copiously, because an already validated constant must have valid
534 // fields and can thus never fail outside of compiler bugs
535 pub fn const_variant_index<'tcx>(
537 param_env: ty::ParamEnv<'tcx>,
538 val: &'tcx ty::Const<'tcx>,
540 trace!("const_variant_index: {:?}", val);
541 let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env);
542 let op = ecx.eval_const_to_op(val, None).unwrap();
543 ecx.read_discriminant(op).unwrap().1
546 /// Turn an interpreter error into something to report to the user.
547 /// As a side-effect, if RUSTC_CTFE_BACKTRACE is set, this prints the backtrace.
548 /// Should be called only if the error is actually going to to be reported!
549 pub fn error_to_const_error<'mir, 'tcx, M: Machine<'mir, 'tcx>>(
550 ecx: &InterpCx<'mir, 'tcx, M>,
551 mut error: InterpErrorInfo<'tcx>,
552 ) -> ConstEvalErr<'tcx> {
553 error.print_backtrace();
554 let stacktrace = ecx.generate_stacktrace(None);
555 ConstEvalErr { error: error.kind, stacktrace, span: ecx.tcx.span }
558 pub fn note_on_undefined_behavior_error() -> &'static str {
559 "The rules on what exactly is undefined behavior aren't clear, \
560 so this check might be overzealous. Please open an issue on the rustc \
561 repository if you believe it should not be considered undefined behavior."
564 fn validate_and_turn_into_const<'tcx>(
566 constant: RawConst<'tcx>,
567 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
568 ) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
570 let ecx = mk_eval_cx(tcx, tcx.def_span(key.value.instance.def_id()), key.param_env);
572 let mplace = ecx.raw_const_to_mplace(constant)?;
573 let mut ref_tracking = RefTracking::new(mplace);
574 while let Some((mplace, path)) = ref_tracking.todo.pop() {
575 ecx.validate_operand(
578 Some(&mut ref_tracking),
581 // Now that we validated, turn this into a proper constant.
582 // Statics/promoteds are always `ByRef`, for the rest `op_to_const` decides
583 // whether they become immediates.
584 let def_id = cid.instance.def.def_id();
585 if tcx.is_static(def_id) || cid.promoted.is_some() {
586 let ptr = mplace.ptr.to_ptr()?;
587 Ok(tcx.mk_const(ty::Const {
588 val: ConstValue::ByRef {
589 alloc: ecx.tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id),
592 ty: mplace.layout.ty,
595 Ok(op_to_const(&ecx, mplace.into()))
599 val.map_err(|error| {
600 let err = error_to_const_error(&ecx, error);
601 match err.struct_error(ecx.tcx, "it is undefined behavior to use this value") {
603 diag.note(note_on_undefined_behavior_error());
605 ErrorHandled::Reported
612 pub fn const_eval_provider<'tcx>(
614 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
615 ) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
616 // see comment in const_eval_raw_provider for what we're doing here
617 if key.param_env.reveal == Reveal::All {
618 let mut key = key.clone();
619 key.param_env.reveal = Reveal::UserFacing;
620 match tcx.const_eval(key) {
621 // try again with reveal all as requested
622 Err(ErrorHandled::TooGeneric) => {
623 // Promoteds should never be "too generic" when getting evaluated.
624 // They either don't get evaluated, or we are in a monomorphic context
625 assert!(key.value.promoted.is_none());
628 other => return other,
632 // We call `const_eval` for zero arg intrinsics, too, in order to cache their value.
633 // Catch such calls and evaluate them instead of trying to load a constant's MIR.
634 if let ty::InstanceDef::Intrinsic(def_id) = key.value.instance.def {
635 let ty = key.value.instance.ty(tcx);
636 let substs = match ty.kind {
637 ty::FnDef(_, substs) => substs,
638 _ => bug!("intrinsic with type {:?}", ty),
640 return eval_nullary_intrinsic(tcx, key.param_env, def_id, substs)
642 let span = tcx.def_span(def_id);
643 let error = ConstEvalErr { error: error.kind, stacktrace: vec![], span };
644 error.report_as_error(tcx.at(span), "could not evaluate nullary intrinsic")
648 tcx.const_eval_raw(key).and_then(|val| {
649 validate_and_turn_into_const(tcx, val, key)
653 pub fn const_eval_raw_provider<'tcx>(
655 key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
656 ) -> ::rustc::mir::interpret::ConstEvalRawResult<'tcx> {
657 // Because the constant is computed twice (once per value of `Reveal`), we are at risk of
658 // reporting the same error twice here. To resolve this, we check whether we can evaluate the
659 // constant in the more restrictive `Reveal::UserFacing`, which most likely already was
660 // computed. For a large percentage of constants that will already have succeeded. Only
661 // associated constants of generic functions will fail due to not enough monomorphization
662 // information being available.
664 // In case we fail in the `UserFacing` variant, we just do the real computation.
665 if key.param_env.reveal == Reveal::All {
666 let mut key = key.clone();
667 key.param_env.reveal = Reveal::UserFacing;
668 match tcx.const_eval_raw(key) {
669 // try again with reveal all as requested
670 Err(ErrorHandled::TooGeneric) => {},
672 other => return other,
675 if cfg!(debug_assertions) {
676 // Make sure we format the instance even if we do not print it.
677 // This serves as a regression test against an ICE on printing.
678 // The next two lines concatenated contain some discussion:
679 // https://rust-lang.zulipchat.com/#narrow/stream/146212-t-compiler.2Fconst-eval/
680 // subject/anon_const_instance_printing/near/135980032
681 let instance = key.value.instance.to_string();
682 trace!("const eval: {:?} ({})", key, instance);
686 let def_id = cid.instance.def.def_id();
688 if def_id.is_local() && tcx.typeck_tables_of(def_id).tainted_by_errors {
689 return Err(ErrorHandled::Reported);
692 let span = tcx.def_span(cid.instance.def_id());
693 let mut ecx = InterpCx::new(
696 CompileTimeInterpreter::new(),
700 let res = ecx.load_mir(cid.instance.def, cid.promoted);
702 |body| eval_body_using_ecx(&mut ecx, cid, body)
705 alloc_id: place.ptr.assert_ptr().alloc_id,
709 let err = error_to_const_error(&ecx, error);
710 // errors in statics are always emitted as fatal errors
711 if tcx.is_static(def_id) {
712 // Ensure that if the above error was either `TooGeneric` or `Reported`
713 // an error must be reported.
714 let v = err.report_as_error(ecx.tcx, "could not evaluate static initializer");
715 tcx.sess.delay_span_bug(
717 &format!("static eval failure did not emit an error: {:#?}", v)
720 } else if def_id.is_local() {
721 // constant defined in this crate, we can figure out a lint level!
722 match tcx.def_kind(def_id) {
723 // constants never produce a hard error at the definition site. Anything else is
724 // a backwards compatibility hazard (and will break old versions of winapi for sure)
726 // note that validation may still cause a hard error on this very same constant,
727 // because any code that existed before validation could not have failed validation
728 // thus preventing such a hard error from being a backwards compatibility hazard
729 Some(DefKind::Const) | Some(DefKind::AssocConst) => {
730 let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
732 tcx.at(tcx.def_span(def_id)),
733 "any use of this value will cause an error",
738 // promoting runtime code is only allowed to error if it references broken constants
739 // any other kind of error will be reported to the user as a deny-by-default lint
740 _ => if let Some(p) = cid.promoted {
741 let span = tcx.promoted_mir(def_id)[p].span;
742 if let err_inval!(ReferencedConstant) = err.error {
745 "evaluation of constant expression failed",
750 "reaching this expression at runtime will panic or abort",
751 tcx.hir().as_local_hir_id(def_id).unwrap(),
755 // anything else (array lengths, enum initializers, constant patterns) are reported
760 "evaluation of constant value failed",
765 // use of broken constant from other crate
766 err.report_as_error(ecx.tcx, "could not evaluate constant")