5 use syntax::source_map::{self, Span, DUMMY_SP};
6 use rustc::hir::def_id::DefId;
7 use rustc::hir::def::DefKind;
9 use rustc::ty::layout::{
10 self, Size, Align, HasDataLayout, LayoutOf, TyLayout
12 use rustc::ty::subst::{Subst, SubstsRef};
13 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
14 use rustc::ty::query::TyCtxtAt;
15 use rustc_data_structures::indexed_vec::IndexVec;
16 use rustc::mir::interpret::{
18 GlobalId, Scalar, Pointer, FrameInfo, AllocId,
19 EvalResult, InterpError,
20 truncate, sign_extend,
22 use rustc_data_structures::fx::FxHashMap;
25 Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
29 pub struct InterpretCx<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
30 /// Stores the `Machine` instance.
33 /// The results of the type checker, from rustc.
34 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
36 /// Bounds in scope for polymorphic evaluations.
37 pub(crate) param_env: ty::ParamEnv<'tcx>,
39 /// The virtual memory system.
40 pub(crate) memory: Memory<'a, 'mir, 'tcx, M>,
42 /// The virtual call stack.
43 pub(crate) stack: Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>>,
45 /// A cache for deduplicating vtables
46 pub(super) vtables: FxHashMap<
47 (Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>),
48 Pointer<M::PointerTag>
54 pub struct Frame<'mir, 'tcx: 'mir, Tag=(), Extra=()> {
55 ////////////////////////////////////////////////////////////////////////////////
56 // Function and callsite information
57 ////////////////////////////////////////////////////////////////////////////////
58 /// The MIR for the function called on this frame.
59 pub mir: &'mir mir::Body<'tcx>,
61 /// The def_id and substs of the current function.
62 pub instance: ty::Instance<'tcx>,
64 /// The span of the call site.
65 pub span: source_map::Span,
67 ////////////////////////////////////////////////////////////////////////////////
68 // Return place and locals
69 ////////////////////////////////////////////////////////////////////////////////
70 /// Work to perform when returning from this function.
71 pub return_to_block: StackPopCleanup,
73 /// The location where the result of the current stack frame should be written to,
74 /// and its layout in the caller.
75 pub return_place: Option<PlaceTy<'tcx, Tag>>,
77 /// The list of locals for this stack frame, stored in order as
78 /// `[return_ptr, arguments..., variables..., temporaries...]`.
79 /// The locals are stored as `Option<Value>`s.
80 /// `None` represents a local that is currently dead, while a live local
81 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
82 pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
84 ////////////////////////////////////////////////////////////////////////////////
85 // Current position within the function
86 ////////////////////////////////////////////////////////////////////////////////
87 /// The block that is currently executed (or will be executed after the above call stacks
89 pub block: mir::BasicBlock,
91 /// The index of the currently evaluated statement.
94 /// Extra data for the machine.
98 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
99 pub enum StackPopCleanup {
100 /// Jump to the next block in the caller, or cause UB if None (that's a function
101 /// that may never return). Also store layout of return place so
102 /// we can validate it at that layout.
103 Goto(Option<mir::BasicBlock>),
104 /// Just do nohing: Used by Main and for the box_alloc hook in miri.
105 /// `cleanup` says whether locals are deallocated. Static computation
106 /// wants them leaked to intern what they need (and just throw away
107 /// the entire `ecx` when it is done).
108 None { cleanup: bool },
111 /// State of a local variable including a memoized layout
112 #[derive(Clone, PartialEq, Eq)]
113 pub struct LocalState<'tcx, Tag=(), Id=AllocId> {
114 pub value: LocalValue<Tag, Id>,
115 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
116 pub layout: Cell<Option<TyLayout<'tcx>>>,
119 /// Current value of a local variable
120 #[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
121 pub enum LocalValue<Tag=(), Id=AllocId> {
122 /// This local is not currently alive, and cannot be used at all.
124 /// This local is alive but not yet initialized. It can be written to
125 /// but not read from or its address taken. Locals get initialized on
126 /// first write because for unsized locals, we do not know their size
129 /// A normal, live local.
130 /// Mostly for convenience, we re-use the `Operand` type here.
131 /// This is an optimization over just always having a pointer here;
132 /// we can thus avoid doing an allocation when the local just stores
133 /// immediate values *and* never has its address taken.
134 Live(Operand<Tag, Id>),
137 impl<'tcx, Tag: Copy + 'static> LocalState<'tcx, Tag> {
138 pub fn access(&self) -> EvalResult<'tcx, Operand<Tag>> {
140 LocalValue::Dead => err!(DeadLocal),
141 LocalValue::Uninitialized =>
142 bug!("The type checker should prevent reading from a never-written local"),
143 LocalValue::Live(val) => Ok(val),
147 /// Overwrite the local. If the local can be overwritten in place, return a reference
148 /// to do so; otherwise return the `MemPlace` to consult instead.
151 ) -> EvalResult<'tcx, Result<&mut LocalValue<Tag>, MemPlace<Tag>>> {
153 LocalValue::Dead => err!(DeadLocal),
154 LocalValue::Live(Operand::Indirect(mplace)) => Ok(Err(mplace)),
155 ref mut local @ LocalValue::Live(Operand::Immediate(_)) |
156 ref mut local @ LocalValue::Uninitialized => {
163 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
164 for InterpretCx<'a, 'mir, 'tcx, M>
167 fn data_layout(&self) -> &layout::TargetDataLayout {
168 &self.tcx.data_layout
172 impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpretCx<'a, 'mir, 'tcx, M>
173 where M: Machine<'a, 'mir, 'tcx>
176 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
181 impl<'a, 'mir, 'tcx, M> layout::HasParamEnv<'tcx> for InterpretCx<'a, 'mir, 'tcx, M>
182 where M: Machine<'a, 'mir, 'tcx>
184 fn param_env(&self) -> ty::ParamEnv<'tcx> {
189 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
190 for InterpretCx<'a, 'mir, 'tcx, M>
193 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
196 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
197 self.tcx.layout_of(self.param_env.and(ty))
198 .map_err(|layout| InterpError::Layout(layout).into())
202 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
204 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
205 param_env: ty::ParamEnv<'tcx>,
212 memory: Memory::new(tcx),
214 vtables: FxHashMap::default(),
219 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
224 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
229 pub fn tag_static_base_pointer(&self, ptr: Pointer) -> Pointer<M::PointerTag> {
230 self.memory.tag_static_base_pointer(ptr)
234 pub fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
239 pub fn cur_frame(&self) -> usize {
240 assert!(self.stack.len() > 0);
245 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
246 self.stack.last().expect("no call frames exist")
250 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
251 self.stack.last_mut().expect("no call frames exist")
255 pub(super) fn mir(&self) -> &'mir mir::Body<'tcx> {
259 pub(super) fn subst_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
262 ) -> EvalResult<'tcx, T> {
263 match self.stack.last() {
264 Some(frame) => Ok(self.tcx.subst_and_normalize_erasing_regions(
265 frame.instance.substs,
269 None => if substs.needs_subst() {
270 err!(TooGeneric).into()
277 pub(super) fn resolve(
280 substs: SubstsRef<'tcx>
281 ) -> EvalResult<'tcx, ty::Instance<'tcx>> {
282 trace!("resolve: {:?}, {:#?}", def_id, substs);
283 trace!("param_env: {:#?}", self.param_env);
284 let substs = self.subst_and_normalize_erasing_regions(substs)?;
285 trace!("substs: {:#?}", substs);
286 ty::Instance::resolve(
291 ).ok_or_else(|| InterpError::TooGeneric.into())
294 pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
295 ty.is_sized(self.tcx, self.param_env)
298 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
299 ty.is_freeze(*self.tcx, self.param_env, DUMMY_SP)
304 instance: ty::InstanceDef<'tcx>,
305 ) -> EvalResult<'tcx, &'tcx mir::Body<'tcx>> {
306 // do not continue if typeck errors occurred (can only occur in local crate)
307 let did = instance.def_id();
309 && self.tcx.has_typeck_tables(did)
310 && self.tcx.typeck_tables_of(did).tainted_by_errors
312 return err!(TypeckError);
314 trace!("load mir {:?}", instance);
316 ty::InstanceDef::Item(def_id) => if self.tcx.is_mir_available(did) {
317 Ok(self.tcx.optimized_mir(did))
319 err!(NoMirFor(self.tcx.def_path_str(def_id)))
321 _ => Ok(self.tcx.instance_mir(instance)),
325 pub(super) fn monomorphize<T: TypeFoldable<'tcx> + Subst<'tcx>>(
328 ) -> EvalResult<'tcx, T> {
329 match self.stack.last() {
330 Some(frame) => Ok(self.monomorphize_with_substs(t, frame.instance.substs)),
331 None => if t.needs_subst() {
332 err!(TooGeneric).into()
339 fn monomorphize_with_substs<T: TypeFoldable<'tcx> + Subst<'tcx>>(
342 substs: SubstsRef<'tcx>
344 // miri doesn't care about lifetimes, and will choke on some crazy ones
345 // let's simply get rid of them
346 let substituted = t.subst(*self.tcx, substs);
347 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
350 pub fn layout_of_local(
352 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
354 layout: Option<TyLayout<'tcx>>,
355 ) -> EvalResult<'tcx, TyLayout<'tcx>> {
356 match frame.locals[local].layout.get() {
358 let layout = crate::interpret::operand::from_known_layout(layout, || {
359 let local_ty = frame.mir.local_decls[local].ty;
360 let local_ty = self.monomorphize_with_substs(local_ty, frame.instance.substs);
361 self.layout_of(local_ty)
363 // Layouts of locals are requested a lot, so we cache them.
364 frame.locals[local].layout.set(Some(layout));
367 Some(layout) => Ok(layout),
371 /// Returns the actual dynamic size and alignment of the place at the given type.
372 /// Only the "meta" (metadata) part of the place matters.
373 /// This can fail to provide an answer for extern types.
374 pub(super) fn size_and_align_of(
376 metadata: Option<Scalar<M::PointerTag>>,
377 layout: TyLayout<'tcx>,
378 ) -> EvalResult<'tcx, Option<(Size, Align)>> {
379 if !layout.is_unsized() {
380 return Ok(Some((layout.size, layout.align.abi)));
382 match layout.ty.sty {
383 ty::Adt(..) | ty::Tuple(..) => {
384 // First get the size of all statically known fields.
385 // Don't use type_of::sizing_type_of because that expects t to be sized,
386 // and it also rounds up to alignment, which we want to avoid,
387 // as the unsized field's alignment could be smaller.
388 assert!(!layout.ty.is_simd());
389 trace!("DST layout: {:?}", layout);
391 let sized_size = layout.fields.offset(layout.fields.count() - 1);
392 let sized_align = layout.align.abi;
394 "DST {} statically sized prefix size: {:?} align: {:?}",
400 // Recurse to get the size of the dynamically sized field (must be
401 // the last field). Can't have foreign types here, how would we
402 // adjust alignment and size for them?
403 let field = layout.field(self, layout.fields.count() - 1)?;
404 let (unsized_size, unsized_align) = match self.size_and_align_of(metadata, field)? {
405 Some(size_and_align) => size_and_align,
407 // A field with extern type. If this field is at offset 0, we behave
408 // like the underlying extern type.
409 // FIXME: Once we have made decisions for how to handle size and alignment
410 // of `extern type`, this should be adapted. It is just a temporary hack
411 // to get some code to work that probably ought to work.
412 if sized_size == Size::ZERO {
415 bug!("Fields cannot be extern types, unless they are at offset 0")
420 // FIXME (#26403, #27023): We should be adding padding
421 // to `sized_size` (to accommodate the `unsized_align`
422 // required of the unsized field that follows) before
423 // summing it with `sized_size`. (Note that since #26403
424 // is unfixed, we do not yet add the necessary padding
425 // here. But this is where the add would go.)
427 // Return the sum of sizes and max of aligns.
428 let size = sized_size + unsized_size;
430 // Choose max of two known alignments (combined value must
431 // be aligned according to more restrictive of the two).
432 let align = sized_align.max(unsized_align);
434 // Issue #27023: must add any necessary padding to `size`
435 // (to make it a multiple of `align`) before returning it.
437 // Namely, the returned size should be, in C notation:
439 // `size + ((size & (align-1)) ? align : 0)`
441 // emulated via the semi-standard fast bit trick:
443 // `(size + (align-1)) & -align`
445 Ok(Some((size.align_to(align), align)))
448 let vtable = metadata.expect("dyn trait fat ptr must have vtable").to_ptr()?;
449 // the second entry in the vtable is the dynamic size of the object.
450 Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
453 ty::Slice(_) | ty::Str => {
454 let len = metadata.expect("slice fat ptr must have vtable").to_usize(self)?;
455 let elem = layout.field(self, 0)?;
456 Ok(Some((elem.size * len, elem.align.abi)))
463 _ => bug!("size_and_align_of::<{:?}> not supported", layout.ty),
467 pub fn size_and_align_of_mplace(
469 mplace: MPlaceTy<'tcx, M::PointerTag>
470 ) -> EvalResult<'tcx, Option<(Size, Align)>> {
471 self.size_and_align_of(mplace.meta, mplace.layout)
474 pub fn push_stack_frame(
476 instance: ty::Instance<'tcx>,
477 span: source_map::Span,
478 mir: &'mir mir::Body<'tcx>,
479 return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
480 return_to_block: StackPopCleanup,
481 ) -> EvalResult<'tcx> {
482 if self.stack.len() > 0 {
483 info!("PAUSING({}) {}", self.cur_frame(), self.frame().instance);
485 ::log_settings::settings().indentation += 1;
487 // first push a stack frame so we have access to the local substs
488 let extra = M::stack_push(self)?;
489 self.stack.push(Frame {
491 block: mir::START_BLOCK,
494 // empty local array, we fill it in below, after we are inside the stack frame and
495 // all methods actually know about the frame
496 locals: IndexVec::new(),
503 // don't allocate at all for trivial constants
504 if mir.local_decls.len() > 1 {
505 // Locals are initially uninitialized.
506 let dummy = LocalState {
507 value: LocalValue::Uninitialized,
508 layout: Cell::new(None),
510 let mut locals = IndexVec::from_elem(dummy, &mir.local_decls);
511 // Return place is handled specially by the `eval_place` functions, and the
512 // entry in `locals` should never be used. Make it dead, to be sure.
513 locals[mir::RETURN_PLACE].value = LocalValue::Dead;
514 // Now mark those locals as dead that we do not want to initialize
515 match self.tcx.def_kind(instance.def_id()) {
516 // statics and constants don't have `Storage*` statements, no need to look for them
517 Some(DefKind::Static)
518 | Some(DefKind::Const)
519 | Some(DefKind::AssocConst) => {},
521 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
522 for block in mir.basic_blocks() {
523 for stmt in block.statements.iter() {
524 use rustc::mir::StatementKind::{StorageDead, StorageLive};
527 StorageDead(local) => {
528 locals[local].value = LocalValue::Dead;
537 self.frame_mut().locals = locals;
540 info!("ENTERING({}) {}", self.cur_frame(), self.frame().instance);
542 if self.stack.len() > self.tcx.sess.const_eval_stack_frame_limit {
543 err!(StackFrameLimitReached)
549 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
550 info!("LEAVING({}) {}", self.cur_frame(), self.frame().instance);
551 ::log_settings::settings().indentation -= 1;
552 let frame = self.stack.pop().expect(
553 "tried to pop a stack frame, but there were none",
555 M::stack_pop(self, frame.extra)?;
556 // Abort early if we do not want to clean up: We also avoid validation in that case,
557 // because this is CTFE and the final value will be thoroughly validated anyway.
558 match frame.return_to_block {
559 StackPopCleanup::Goto(_) => {},
560 StackPopCleanup::None { cleanup } => {
562 assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked");
563 // Leak the locals, skip validation.
568 // Deallocate all locals that are backed by an allocation.
569 for local in frame.locals {
570 self.deallocate_local(local.value)?;
572 // Validate the return value. Do this after deallocating so that we catch dangling
574 if let Some(return_place) = frame.return_place {
575 if M::enforce_validity(self) {
576 // Data got changed, better make sure it matches the type!
577 // It is still possible that the return place held invalid data while
578 // the function is running, but that's okay because nobody could have
579 // accessed that same data from the "outside" to observe any broken
580 // invariant -- that is, unless a function somehow has a ptr to
581 // its return place... but the way MIR is currently generated, the
582 // return place is always a local and then this cannot happen.
583 self.validate_operand(
584 self.place_to_op(return_place)?,
591 // Uh, that shouldn't happen... the function did not intend to return
592 return err!(Unreachable);
594 // Jump to new block -- *after* validation so that the spans make more sense.
595 match frame.return_to_block {
596 StackPopCleanup::Goto(block) => {
597 self.goto_block(block)?;
599 StackPopCleanup::None { .. } => {}
602 if self.stack.len() > 0 {
603 info!("CONTINUING({}) {}", self.cur_frame(), self.frame().instance);
609 /// Mark a storage as live, killing the previous content and returning it.
610 /// Remember to deallocate that!
614 ) -> EvalResult<'tcx, LocalValue<M::PointerTag>> {
615 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
616 trace!("{:?} is now live", local);
618 let local_val = LocalValue::Uninitialized;
619 // StorageLive *always* kills the value that's currently stored.
620 // However, we do not error if the variable already is live;
621 // see <https://github.com/rust-lang/rust/issues/42371>.
622 Ok(mem::replace(&mut self.frame_mut().locals[local].value, local_val))
625 /// Returns the old value of the local.
626 /// Remember to deallocate that!
627 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue<M::PointerTag> {
628 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
629 trace!("{:?} is now dead", local);
631 mem::replace(&mut self.frame_mut().locals[local].value, LocalValue::Dead)
634 pub(super) fn deallocate_local(
636 local: LocalValue<M::PointerTag>,
637 ) -> EvalResult<'tcx> {
638 // FIXME: should we tell the user that there was a local which was never written to?
639 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
640 trace!("deallocating local");
641 let ptr = ptr.to_ptr()?;
642 self.memory.dump_alloc(ptr.alloc_id);
643 self.memory.deallocate_local(ptr)?;
648 pub fn const_eval_raw(
651 ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
652 let param_env = if self.tcx.is_static(gid.instance.def_id()) {
653 ty::ParamEnv::reveal_all()
657 // We use `const_eval_raw` here, and get an unvalidated result. That is okay:
658 // Our result will later be validated anyway, and there seems no good reason
659 // to have to fail early here. This is also more consistent with
660 // `Memory::get_static_alloc` which has to use `const_eval_raw` to avoid cycles.
661 let val = self.tcx.const_eval_raw(param_env.and(gid)).map_err(|err| {
663 ErrorHandled::Reported => InterpError::ReferencedConstant,
664 ErrorHandled::TooGeneric => InterpError::TooGeneric,
667 self.raw_const_to_mplace(val)
670 pub fn dump_place(&self, place: Place<M::PointerTag>) {
672 if !log_enabled!(::log::Level::Trace) {
676 Place::Local { frame, local } => {
677 let mut allocs = Vec::new();
678 let mut msg = format!("{:?}", local);
679 if frame != self.cur_frame() {
680 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
682 write!(msg, ":").unwrap();
684 match self.stack[frame].locals[local].value {
685 LocalValue::Dead => write!(msg, " is dead").unwrap(),
686 LocalValue::Uninitialized => write!(msg, " is uninitialized").unwrap(),
687 LocalValue::Live(Operand::Indirect(mplace)) => {
689 Scalar::Ptr(ptr) => {
690 write!(msg, " by align({}){} ref:",
691 mplace.align.bytes(),
693 Some(meta) => format!(" meta({:?})", meta),
694 None => String::new()
697 allocs.push(ptr.alloc_id);
699 ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
702 LocalValue::Live(Operand::Immediate(Immediate::Scalar(val))) => {
703 write!(msg, " {:?}", val).unwrap();
704 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
705 allocs.push(ptr.alloc_id);
708 LocalValue::Live(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
709 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
710 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
711 allocs.push(ptr.alloc_id);
713 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
714 allocs.push(ptr.alloc_id);
720 self.memory.dump_allocs(allocs);
722 Place::Ptr(mplace) => {
724 Scalar::Ptr(ptr) => {
725 trace!("by align({}) ref:", mplace.align.bytes());
726 self.memory.dump_alloc(ptr.alloc_id);
728 ptr => trace!(" integral by ref: {:?}", ptr),
734 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> Vec<FrameInfo<'tcx>> {
735 let mut last_span = None;
736 let mut frames = Vec::new();
737 for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().rev() {
738 // make sure we don't emit frames that are duplicates of the previous
739 if explicit_span == Some(span) {
740 last_span = Some(span);
743 if let Some(last) = last_span {
748 last_span = Some(span);
750 let block = &mir.basic_blocks()[block];
751 let source_info = if stmt < block.statements.len() {
752 block.statements[stmt].source_info
754 block.terminator().source_info
756 let lint_root = match mir.source_scope_local_data {
757 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
758 mir::ClearCrossCrate::Clear => None,
760 frames.push(FrameInfo { call_site: span, instance, lint_root });
762 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
767 pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
768 assert!(ty.abi.is_signed());
769 sign_extend(value, ty.size)
773 pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
774 truncate(value, ty.size)