2 use std::hash::{Hash, Hasher};
5 use rustc::hir::def_id::DefId;
6 use rustc::hir::def::Def;
7 use rustc::hir::map::definitions::DefPathData;
9 use rustc::ty::layout::{
10 self, Size, Align, HasDataLayout, LayoutOf, TyLayout
12 use rustc::ty::subst::{Subst, Substs};
13 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
14 use rustc::ty::query::TyCtxtAt;
15 use rustc_data_structures::fx::{FxHashSet, FxHasher};
16 use rustc_data_structures::indexed_vec::IndexVec;
17 use rustc::mir::interpret::{
18 GlobalId, Scalar, FrameInfo,
19 EvalResult, EvalErrorKind,
21 truncate, sign_extend,
24 use syntax::source_map::{self, Span};
25 use syntax::ast::Mutability;
28 Value, Operand, MemPlace, MPlaceTy, Place, PlaceExtra,
32 pub struct EvalContext<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
33 /// Stores the `Machine` instance.
36 /// The results of the type checker, from rustc.
37 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
39 /// Bounds in scope for polymorphic evaluations.
40 pub param_env: ty::ParamEnv<'tcx>,
42 /// The virtual memory system.
43 pub memory: Memory<'a, 'mir, 'tcx, M>,
45 /// The virtual call stack.
46 pub(crate) stack: Vec<Frame<'mir, 'tcx>>,
48 /// The maximum number of stack frames allowed
49 pub(crate) stack_limit: usize,
51 /// When this value is negative, it indicates the number of interpreter
52 /// steps *until* the loop detector is enabled. When it is positive, it is
53 /// the number of steps after the detector has been enabled modulo the loop
55 pub(crate) steps_since_detector_enabled: isize,
57 pub(crate) loop_detector: InfiniteLoopDetector<'a, 'mir, 'tcx, M>,
62 pub struct Frame<'mir, 'tcx: 'mir> {
63 ////////////////////////////////////////////////////////////////////////////////
64 // Function and callsite information
65 ////////////////////////////////////////////////////////////////////////////////
66 /// The MIR for the function called on this frame.
67 pub mir: &'mir mir::Mir<'tcx>,
69 /// The def_id and substs of the current function
70 pub instance: ty::Instance<'tcx>,
72 /// The span of the call site.
73 pub span: source_map::Span,
75 ////////////////////////////////////////////////////////////////////////////////
76 // Return place and locals
77 ////////////////////////////////////////////////////////////////////////////////
78 /// The block to return to when returning from the current stack frame
79 pub return_to_block: StackPopCleanup,
81 /// The location where the result of the current stack frame should be written to.
82 pub return_place: Place,
84 /// The list of locals for this stack frame, stored in order as
85 /// `[return_ptr, arguments..., variables..., temporaries...]`. The locals are stored as `Option<Value>`s.
86 /// `None` represents a local that is currently dead, while a live local
87 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
88 pub locals: IndexVec<mir::Local, LocalValue>,
90 ////////////////////////////////////////////////////////////////////////////////
91 // Current position within the function
92 ////////////////////////////////////////////////////////////////////////////////
93 /// The block that is currently executed (or will be executed after the above call stacks
95 pub block: mir::BasicBlock,
97 /// The index of the currently evaluated statement.
101 impl<'mir, 'tcx: 'mir> Eq for Frame<'mir, 'tcx> {}
103 impl<'mir, 'tcx: 'mir> PartialEq for Frame<'mir, 'tcx> {
104 fn eq(&self, other: &Self) -> bool {
116 // Some of these are constant during evaluation, but are included
117 // anyways for correctness.
118 *instance == other.instance
119 && *return_to_block == other.return_to_block
120 && *return_place == other.return_place
121 && *locals == other.locals
122 && *block == other.block
123 && *stmt == other.stmt
127 impl<'mir, 'tcx: 'mir> Hash for Frame<'mir, 'tcx> {
128 fn hash<H: Hasher>(&self, state: &mut H) {
140 instance.hash(state);
141 return_to_block.hash(state);
142 return_place.hash(state);
149 // State of a local variable
150 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
151 pub enum LocalValue {
153 // Mostly for convenience, we re-use the `Operand` type here.
154 // This is an optimization over just always having a pointer here;
155 // we can thus avoid doing an allocation when the local just stores
156 // immediate values *and* never has its address taken.
160 impl<'tcx> LocalValue {
161 pub fn access(&self) -> EvalResult<'tcx, &Operand> {
163 LocalValue::Dead => err!(DeadLocal),
164 LocalValue::Live(ref val) => Ok(val),
168 pub fn access_mut(&mut self) -> EvalResult<'tcx, &mut Operand> {
170 LocalValue::Dead => err!(DeadLocal),
171 LocalValue::Live(ref mut val) => Ok(val),
176 /// The virtual machine state during const-evaluation at a given point in time.
177 type EvalSnapshot<'a, 'mir, 'tcx, M>
178 = (M, Vec<Frame<'mir, 'tcx>>, Memory<'a, 'mir, 'tcx, M>);
180 pub(crate) struct InfiniteLoopDetector<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'mir, 'tcx>> {
181 /// The set of all `EvalSnapshot` *hashes* observed by this detector.
183 /// When a collision occurs in this table, we store the full snapshot in
185 hashes: FxHashSet<u64>,
187 /// The set of all `EvalSnapshot`s observed by this detector.
189 /// An `EvalSnapshot` will only be fully cloned once it has caused a
190 /// collision in `hashes`. As a result, the detector must observe at least
191 /// *two* full cycles of an infinite loop before it triggers.
192 snapshots: FxHashSet<EvalSnapshot<'a, 'mir, 'tcx, M>>,
195 impl<'a, 'mir, 'tcx, M> Default for InfiniteLoopDetector<'a, 'mir, 'tcx, M>
196 where M: Machine<'mir, 'tcx>,
199 fn default() -> Self {
200 InfiniteLoopDetector {
201 hashes: FxHashSet::default(),
202 snapshots: FxHashSet::default(),
207 impl<'a, 'mir, 'tcx, M> InfiniteLoopDetector<'a, 'mir, 'tcx, M>
208 where M: Machine<'mir, 'tcx>,
211 /// Returns `true` if the loop detector has not yet observed a snapshot.
212 pub fn is_empty(&self) -> bool {
213 self.hashes.is_empty()
216 pub fn observe_and_analyze(
219 stack: &Vec<Frame<'mir, 'tcx>>,
220 memory: &Memory<'a, 'mir, 'tcx, M>,
221 ) -> EvalResult<'tcx, ()> {
222 let snapshot = (machine, stack, memory);
224 let mut fx = FxHasher::default();
225 snapshot.hash(&mut fx);
226 let hash = fx.finish();
228 if self.hashes.insert(hash) {
233 if self.snapshots.insert((machine.clone(), stack.clone(), memory.clone())) {
234 // Spurious collision or first cycle
239 Err(EvalErrorKind::InfiniteLoop.into())
243 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
244 pub enum StackPopCleanup {
245 /// The stackframe existed to compute the initial value of a static/constant, make sure it
246 /// isn't modifyable afterwards in case of constants.
247 /// In case of `static mut`, mark the memory to ensure it's never marked as immutable through
248 /// references or deallocated
249 MarkStatic(Mutability),
250 /// A regular stackframe added due to a function call will need to get forwarded to the next
252 Goto(mir::BasicBlock),
253 /// The main function and diverging functions have nowhere to return to
257 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout for &'a EvalContext<'a, 'mir, 'tcx, M> {
259 fn data_layout(&self) -> &layout::TargetDataLayout {
260 &self.tcx.data_layout
264 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> HasDataLayout
265 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
267 fn data_layout(&self) -> &layout::TargetDataLayout {
268 &self.tcx.data_layout
272 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx> for &'a EvalContext<'a, 'mir, 'tcx, M> {
274 fn tcx<'b>(&'b self) -> TyCtxt<'b, 'tcx, 'tcx> {
279 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> layout::HasTyCtxt<'tcx>
280 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
282 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
287 impl<'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf for &'a EvalContext<'a, 'mir, 'tcx, M> {
289 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
291 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
292 self.tcx.layout_of(self.param_env.and(ty))
293 .map_err(|layout| EvalErrorKind::Layout(layout).into())
297 impl<'c, 'b, 'a, 'mir, 'tcx, M: Machine<'mir, 'tcx>> LayoutOf
298 for &'c &'b mut EvalContext<'a, 'mir, 'tcx, M> {
300 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
303 fn layout_of(self, ty: Ty<'tcx>) -> Self::TyLayout {
304 (&**self).layout_of(ty)
308 const STEPS_UNTIL_DETECTOR_ENABLED: isize = 1_000_000;
310 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
312 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
313 param_env: ty::ParamEnv<'tcx>,
315 memory_data: M::MemoryData,
321 memory: Memory::new(tcx, memory_data),
323 stack_limit: tcx.sess.const_eval_stack_frame_limit,
324 loop_detector: Default::default(),
325 steps_since_detector_enabled: -STEPS_UNTIL_DETECTOR_ENABLED,
329 pub(crate) fn with_fresh_body<F: FnOnce(&mut Self) -> R, R>(&mut self, f: F) -> R {
330 let stack = mem::replace(&mut self.stack, Vec::new());
331 let steps = mem::replace(&mut self.steps_since_detector_enabled, -STEPS_UNTIL_DETECTOR_ENABLED);
334 self.steps_since_detector_enabled = steps;
338 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
342 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
346 pub fn stack(&self) -> &[Frame<'mir, 'tcx>] {
351 pub fn cur_frame(&self) -> usize {
352 assert!(self.stack.len() > 0);
356 /// Mark a storage as live, killing the previous content and returning it.
357 /// Remember to deallocate that!
358 pub fn storage_live(&mut self, local: mir::Local) -> EvalResult<'tcx, LocalValue> {
359 trace!("{:?} is now live", local);
361 let layout = self.layout_of_local(self.cur_frame(), local)?;
362 let init = LocalValue::Live(self.uninit_operand(layout)?);
363 // StorageLive *always* kills the value that's currently stored
364 Ok(mem::replace(&mut self.frame_mut().locals[local], init))
367 /// Returns the old value of the local.
368 /// Remember to deallocate that!
369 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue {
370 trace!("{:?} is now dead", local);
372 mem::replace(&mut self.frame_mut().locals[local], LocalValue::Dead)
375 pub fn str_to_value(&mut self, s: &str) -> EvalResult<'tcx, Value> {
376 let ptr = self.memory.allocate_bytes(s.as_bytes());
377 Ok(Value::new_slice(Scalar::Ptr(ptr), s.len() as u64, self.tcx.tcx))
380 pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
381 trace!("resolve: {:?}, {:#?}", def_id, substs);
382 trace!("substs: {:#?}", self.substs());
383 trace!("param_env: {:#?}", self.param_env);
384 let substs = self.tcx.subst_and_normalize_erasing_regions(
389 ty::Instance::resolve(
394 ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
397 pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
398 ty.is_sized(self.tcx, self.param_env)
403 instance: ty::InstanceDef<'tcx>,
404 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
405 // do not continue if typeck errors occurred (can only occur in local crate)
406 let did = instance.def_id();
407 if did.is_local() && self.tcx.has_typeck_tables(did) && self.tcx.typeck_tables_of(did).tainted_by_errors {
408 return err!(TypeckError);
410 trace!("load mir {:?}", instance);
412 ty::InstanceDef::Item(def_id) => {
413 self.tcx.maybe_optimized_mir(def_id).ok_or_else(||
414 EvalErrorKind::NoMirFor(self.tcx.item_path_str(def_id)).into()
417 _ => Ok(self.tcx.instance_mir(instance)),
421 pub fn monomorphize<T: TypeFoldable<'tcx> + Subst<'tcx>>(
424 substs: &'tcx Substs<'tcx>
426 // miri doesn't care about lifetimes, and will choke on some crazy ones
427 // let's simply get rid of them
428 let substituted = t.subst(*self.tcx, substs);
429 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
432 pub fn layout_of_local(
436 ) -> EvalResult<'tcx, TyLayout<'tcx>> {
437 let local_ty = self.stack[frame].mir.local_decls[local].ty;
438 let local_ty = self.monomorphize(
440 self.stack[frame].instance.substs
442 self.layout_of(local_ty)
445 /// Return the actual dynamic size and alignment of the place at the given type.
446 /// Note that the value does not matter if the type is sized. For unsized types,
447 /// the value has to be a fat pointer, and we only care about the "extra" data in it.
448 pub fn size_and_align_of_mplace(
450 mplace: MPlaceTy<'tcx>,
451 ) -> EvalResult<'tcx, (Size, Align)> {
452 if let PlaceExtra::None = mplace.extra {
453 assert!(!mplace.layout.is_unsized());
454 Ok(mplace.layout.size_and_align())
456 let layout = mplace.layout;
457 assert!(layout.is_unsized());
458 match layout.ty.sty {
459 ty::TyAdt(..) | ty::TyTuple(..) => {
460 // First get the size of all statically known fields.
461 // Don't use type_of::sizing_type_of because that expects t to be sized,
462 // and it also rounds up to alignment, which we want to avoid,
463 // as the unsized field's alignment could be smaller.
464 assert!(!layout.ty.is_simd());
465 debug!("DST layout: {:?}", layout);
467 let sized_size = layout.fields.offset(layout.fields.count() - 1);
468 let sized_align = layout.align;
470 "DST {} statically sized prefix size: {:?} align: {:?}",
476 // Recurse to get the size of the dynamically sized field (must be
478 let field = self.mplace_field(mplace, layout.fields.count() as u64 - 1)?;
479 let (unsized_size, unsized_align) = self.size_and_align_of_mplace(field)?;
481 // FIXME (#26403, #27023): We should be adding padding
482 // to `sized_size` (to accommodate the `unsized_align`
483 // required of the unsized field that follows) before
484 // summing it with `sized_size`. (Note that since #26403
485 // is unfixed, we do not yet add the necessary padding
486 // here. But this is where the add would go.)
488 // Return the sum of sizes and max of aligns.
489 let size = sized_size + unsized_size;
491 // Choose max of two known alignments (combined value must
492 // be aligned according to more restrictive of the two).
493 let align = sized_align.max(unsized_align);
495 // Issue #27023: must add any necessary padding to `size`
496 // (to make it a multiple of `align`) before returning it.
498 // Namely, the returned size should be, in C notation:
500 // `size + ((size & (align-1)) ? align : 0)`
502 // emulated via the semi-standard fast bit trick:
504 // `(size + (align-1)) & -align`
506 Ok((size.abi_align(align), align))
508 ty::TyDynamic(..) => {
509 let vtable = match mplace.extra {
510 PlaceExtra::Vtable(vtable) => vtable,
511 _ => bug!("Expected vtable"),
513 // the second entry in the vtable is the dynamic size of the object.
514 self.read_size_and_align_from_vtable(vtable)
517 ty::TySlice(_) | ty::TyStr => {
518 let len = match mplace.extra {
519 PlaceExtra::Length(len) => len,
520 _ => bug!("Expected length"),
522 let (elem_size, align) = layout.field(self, 0)?.size_and_align();
523 Ok((elem_size * len, align))
526 _ => bug!("size_of_val::<{:?}> not supported", layout.ty),
531 pub fn push_stack_frame(
533 instance: ty::Instance<'tcx>,
534 span: source_map::Span,
535 mir: &'mir mir::Mir<'tcx>,
537 return_to_block: StackPopCleanup,
538 ) -> EvalResult<'tcx> {
539 ::log_settings::settings().indentation += 1;
541 // first push a stack frame so we have access to the local substs
542 self.stack.push(Frame {
544 block: mir::START_BLOCK,
547 // empty local array, we fill it in below, after we are inside the stack frame and
548 // all methods actually know about the frame
549 locals: IndexVec::new(),
555 // don't allocate at all for trivial constants
556 if mir.local_decls.len() > 1 {
557 // We put some marker value into the locals that we later want to initialize.
558 // This can be anything except for LocalValue::Dead -- because *that* is the
559 // value we use for things that we know are initially dead.
561 LocalValue::Live(Operand::Immediate(Value::Scalar(ScalarMaybeUndef::Undef)));
562 self.frame_mut().locals = IndexVec::from_elem(dummy, &mir.local_decls);
563 // Now mark those locals as dead that we do not want to initialize
564 match self.tcx.describe_def(instance.def_id()) {
565 // statics and constants don't have `Storage*` statements, no need to look for them
566 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
568 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
569 for block in mir.basic_blocks() {
570 for stmt in block.statements.iter() {
571 use rustc::mir::StatementKind::{StorageDead, StorageLive};
574 StorageDead(local) => {
575 // Worst case we are overwriting a dummy, no deallocation needed
576 self.storage_dead(local);
584 // Finally, properly initialize all those that still have the dummy value
585 for local in mir.local_decls.indices() {
586 if self.frame().locals[local] == dummy {
587 self.storage_live(local)?;
592 if self.stack.len() > self.stack_limit {
593 err!(StackFrameLimitReached)
599 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
600 ::log_settings::settings().indentation -= 1;
601 M::end_region(self, None)?;
602 let frame = self.stack.pop().expect(
603 "tried to pop a stack frame, but there were none",
605 match frame.return_to_block {
606 StackPopCleanup::MarkStatic(mutable) => {
607 if let Place::Ptr(MemPlace { ptr, .. }) = frame.return_place {
608 // FIXME: to_ptr()? might be too extreme here, static zsts might reach this under certain conditions
609 self.memory.mark_static_initialized(
610 ptr.to_ptr()?.alloc_id,
614 bug!("StackPopCleanup::MarkStatic on: {:?}", frame.return_place);
617 StackPopCleanup::Goto(target) => self.goto_block(target),
618 StackPopCleanup::None => {}
620 // deallocate all locals that are backed by an allocation
621 for local in frame.locals {
622 self.deallocate_local(local)?;
628 crate fn deallocate_local(&mut self, local: LocalValue) -> EvalResult<'tcx> {
629 // FIXME: should we tell the user that there was a local which was never written to?
630 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
631 trace!("deallocating local");
632 let ptr = ptr.to_ptr()?;
633 self.memory.dump_alloc(ptr.alloc_id);
634 self.memory.deallocate_local(ptr)?;
639 pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
640 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
641 ty::ParamEnv::reveal_all()
645 self.tcx.const_eval(param_env.and(gid)).map_err(|err| EvalErrorKind::ReferencedConstant(err).into())
649 pub fn frame(&self) -> &Frame<'mir, 'tcx> {
650 self.stack.last().expect("no call frames exist")
654 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx> {
655 self.stack.last_mut().expect("no call frames exist")
658 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
662 pub fn substs(&self) -> &'tcx Substs<'tcx> {
663 if let Some(frame) = self.stack.last() {
664 frame.instance.substs
670 pub fn dump_place(&self, place: Place) {
672 if !log_enabled!(::log::Level::Trace) {
676 Place::Local { frame, local } => {
677 let mut allocs = Vec::new();
678 let mut msg = format!("{:?}", local);
679 if frame != self.cur_frame() {
680 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
682 write!(msg, ":").unwrap();
684 match self.stack[frame].locals[local].access() {
686 if let EvalErrorKind::DeadLocal = err.kind {
687 write!(msg, " is dead").unwrap();
689 panic!("Failed to access local: {:?}", err);
692 Ok(Operand::Indirect(mplace)) => {
693 let (ptr, align) = mplace.to_scalar_ptr_align();
695 Scalar::Ptr(ptr) => {
696 write!(msg, " by align({}) ref:", align.abi()).unwrap();
697 allocs.push(ptr.alloc_id);
699 ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
702 Ok(Operand::Immediate(Value::Scalar(val))) => {
703 write!(msg, " {:?}", val).unwrap();
704 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
705 allocs.push(ptr.alloc_id);
708 Ok(Operand::Immediate(Value::ScalarPair(val1, val2))) => {
709 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
710 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
711 allocs.push(ptr.alloc_id);
713 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
714 allocs.push(ptr.alloc_id);
720 self.memory.dump_allocs(allocs);
722 Place::Ptr(mplace) => {
724 Scalar::Ptr(ptr) => {
725 trace!("by align({}) ref:", mplace.align.abi());
726 self.memory.dump_alloc(ptr.alloc_id);
728 ptr => trace!(" integral by ref: {:?}", ptr),
734 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> (Vec<FrameInfo>, Span) {
735 let mut last_span = None;
736 let mut frames = Vec::new();
737 // skip 1 because the last frame is just the environment of the constant
738 for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().skip(1).rev() {
739 // make sure we don't emit frames that are duplicates of the previous
740 if explicit_span == Some(span) {
741 last_span = Some(span);
744 if let Some(last) = last_span {
749 last_span = Some(span);
751 let location = if self.tcx.def_key(instance.def_id()).disambiguated_data.data == DefPathData::ClosureExpr {
756 let block = &mir.basic_blocks()[block];
757 let source_info = if stmt < block.statements.len() {
758 block.statements[stmt].source_info
760 block.terminator().source_info
762 let lint_root = match mir.source_scope_local_data {
763 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
764 mir::ClearCrossCrate::Clear => None,
766 frames.push(FrameInfo { span, location, lint_root });
768 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
769 (frames, self.tcx.span)
773 pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
774 assert!(ty.abi.is_signed());
775 sign_extend(value, ty.size)
779 pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
780 truncate(value, ty.size)