5 use syntax::source_map::{self, Span, DUMMY_SP};
6 use rustc::hir::def_id::DefId;
7 use rustc::hir::def::Def;
9 use rustc::ty::layout::{
10 self, Size, Align, HasDataLayout, LayoutOf, TyLayout
12 use rustc::ty::subst::{Subst, SubstsRef};
13 use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
14 use rustc::ty::query::TyCtxtAt;
15 use rustc_data_structures::indexed_vec::IndexVec;
16 use rustc::mir::interpret::{
18 GlobalId, Scalar, FrameInfo, AllocId,
19 EvalResult, EvalErrorKind,
20 truncate, sign_extend,
22 use rustc_data_structures::fx::FxHashMap;
25 Immediate, Operand, MemPlace, MPlaceTy, Place, PlaceTy, ScalarMaybeUndef,
29 pub struct InterpretCx<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
30 /// Stores the `Machine` instance.
33 /// The results of the type checker, from rustc.
34 pub tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
36 /// Bounds in scope for polymorphic evaluations.
37 pub(crate) param_env: ty::ParamEnv<'tcx>,
39 /// The virtual memory system.
40 pub(crate) memory: Memory<'a, 'mir, 'tcx, M>,
42 /// The virtual call stack.
43 pub(crate) stack: Vec<Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>>,
45 /// A cache for deduplicating vtables
46 pub(super) vtables: FxHashMap<(Ty<'tcx>, Option<ty::PolyExistentialTraitRef<'tcx>>), AllocId>,
51 pub struct Frame<'mir, 'tcx: 'mir, Tag=(), Extra=()> {
52 ////////////////////////////////////////////////////////////////////////////////
53 // Function and callsite information
54 ////////////////////////////////////////////////////////////////////////////////
55 /// The MIR for the function called on this frame.
56 pub mir: &'mir mir::Mir<'tcx>,
58 /// The def_id and substs of the current function.
59 pub instance: ty::Instance<'tcx>,
61 /// The span of the call site.
62 pub span: source_map::Span,
64 ////////////////////////////////////////////////////////////////////////////////
65 // Return place and locals
66 ////////////////////////////////////////////////////////////////////////////////
67 /// Work to perform when returning from this function.
68 pub return_to_block: StackPopCleanup,
70 /// The location where the result of the current stack frame should be written to,
71 /// and its layout in the caller.
72 pub return_place: Option<PlaceTy<'tcx, Tag>>,
74 /// The list of locals for this stack frame, stored in order as
75 /// `[return_ptr, arguments..., variables..., temporaries...]`.
76 /// The locals are stored as `Option<Value>`s.
77 /// `None` represents a local that is currently dead, while a live local
78 /// can either directly contain `Scalar` or refer to some part of an `Allocation`.
79 pub locals: IndexVec<mir::Local, LocalState<'tcx, Tag>>,
81 ////////////////////////////////////////////////////////////////////////////////
82 // Current position within the function
83 ////////////////////////////////////////////////////////////////////////////////
84 /// The block that is currently executed (or will be executed after the above call stacks
86 pub block: mir::BasicBlock,
88 /// The index of the currently evaluated statement.
91 /// Extra data for the machine.
95 #[derive(Clone, Debug, Eq, PartialEq, Hash)]
96 pub enum StackPopCleanup {
97 /// Jump to the next block in the caller, or cause UB if None (that's a function
98 /// that may never return). Also store layout of return place so
99 /// we can validate it at that layout.
100 Goto(Option<mir::BasicBlock>),
101 /// Just do nohing: Used by Main and for the box_alloc hook in miri.
102 /// `cleanup` says whether locals are deallocated. Static computation
103 /// wants them leaked to intern what they need (and just throw away
104 /// the entire `ecx` when it is done).
105 None { cleanup: bool },
108 /// State of a local variable including a memoized layout
109 #[derive(Clone, PartialEq, Eq)]
110 pub struct LocalState<'tcx, Tag=(), Id=AllocId> {
111 pub state: LocalValue<Tag, Id>,
112 /// Don't modify if `Some`, this is only used to prevent computing the layout twice
113 pub layout: Cell<Option<TyLayout<'tcx>>>,
116 /// State of a local variable
117 #[derive(Copy, Clone, PartialEq, Eq, Hash)]
118 pub enum LocalValue<Tag=(), Id=AllocId> {
120 // Mostly for convenience, we re-use the `Operand` type here.
121 // This is an optimization over just always having a pointer here;
122 // we can thus avoid doing an allocation when the local just stores
123 // immediate values *and* never has its address taken.
124 Live(Operand<Tag, Id>),
127 impl<'tcx, Tag> LocalState<'tcx, Tag> {
128 pub fn access(&self) -> EvalResult<'tcx, &Operand<Tag>> {
130 LocalValue::Dead => err!(DeadLocal),
131 LocalValue::Live(ref val) => Ok(val),
135 pub fn access_mut(&mut self) -> EvalResult<'tcx, &mut Operand<Tag>> {
137 LocalValue::Dead => err!(DeadLocal),
138 LocalValue::Live(ref mut val) => Ok(val),
143 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> HasDataLayout
144 for InterpretCx<'a, 'mir, 'tcx, M>
147 fn data_layout(&self) -> &layout::TargetDataLayout {
148 &self.tcx.data_layout
152 impl<'a, 'mir, 'tcx, M> layout::HasTyCtxt<'tcx> for InterpretCx<'a, 'mir, 'tcx, M>
153 where M: Machine<'a, 'mir, 'tcx>
156 fn tcx<'d>(&'d self) -> TyCtxt<'d, 'tcx, 'tcx> {
161 impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> LayoutOf
162 for InterpretCx<'a, 'mir, 'tcx, M>
165 type TyLayout = EvalResult<'tcx, TyLayout<'tcx>>;
168 fn layout_of(&self, ty: Ty<'tcx>) -> Self::TyLayout {
169 self.tcx.layout_of(self.param_env.and(ty))
170 .map_err(|layout| EvalErrorKind::Layout(layout).into())
174 impl<'a, 'mir, 'tcx: 'mir, M: Machine<'a, 'mir, 'tcx>> InterpretCx<'a, 'mir, 'tcx, M> {
176 tcx: TyCtxtAt<'a, 'tcx, 'tcx>,
177 param_env: ty::ParamEnv<'tcx>,
184 memory: Memory::new(tcx),
186 vtables: FxHashMap::default(),
191 pub fn memory(&self) -> &Memory<'a, 'mir, 'tcx, M> {
196 pub fn memory_mut(&mut self) -> &mut Memory<'a, 'mir, 'tcx, M> {
201 pub fn stack(&self) -> &[Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>] {
206 pub fn cur_frame(&self) -> usize {
207 assert!(self.stack.len() > 0);
212 pub fn frame(&self) -> &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
213 self.stack.last().expect("no call frames exist")
217 pub fn frame_mut(&mut self) -> &mut Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra> {
218 self.stack.last_mut().expect("no call frames exist")
222 pub(super) fn mir(&self) -> &'mir mir::Mir<'tcx> {
226 pub(super) fn subst_and_normalize_erasing_regions<T: TypeFoldable<'tcx>>(
229 ) -> EvalResult<'tcx, T> {
230 match self.stack.last() {
231 Some(frame) => Ok(self.tcx.subst_and_normalize_erasing_regions(
232 frame.instance.substs,
236 None => if substs.needs_subst() {
237 err!(TooGeneric).into()
244 pub(super) fn resolve(
247 substs: SubstsRef<'tcx>
248 ) -> EvalResult<'tcx, ty::Instance<'tcx>> {
249 trace!("resolve: {:?}, {:#?}", def_id, substs);
250 trace!("param_env: {:#?}", self.param_env);
251 let substs = self.subst_and_normalize_erasing_regions(substs)?;
252 trace!("substs: {:#?}", substs);
253 ty::Instance::resolve(
258 ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
261 pub fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
262 ty.is_sized(self.tcx, self.param_env)
265 pub fn type_is_freeze(&self, ty: Ty<'tcx>) -> bool {
266 ty.is_freeze(*self.tcx, self.param_env, DUMMY_SP)
271 instance: ty::InstanceDef<'tcx>,
272 ) -> EvalResult<'tcx, &'tcx mir::Mir<'tcx>> {
273 // do not continue if typeck errors occurred (can only occur in local crate)
274 let did = instance.def_id();
276 && self.tcx.has_typeck_tables(did)
277 && self.tcx.typeck_tables_of(did).tainted_by_errors
279 return err!(TypeckError);
281 trace!("load mir {:?}", instance);
283 ty::InstanceDef::Item(def_id) => if self.tcx.is_mir_available(did) {
284 Ok(self.tcx.optimized_mir(did))
286 err!(NoMirFor(self.tcx.def_path_str(def_id)))
288 _ => Ok(self.tcx.instance_mir(instance)),
292 pub(super) fn monomorphize<T: TypeFoldable<'tcx> + Subst<'tcx>>(
295 ) -> EvalResult<'tcx, T> {
296 match self.stack.last() {
297 Some(frame) => Ok(self.monomorphize_with_substs(t, frame.instance.substs)),
298 None => if t.needs_subst() {
299 err!(TooGeneric).into()
306 fn monomorphize_with_substs<T: TypeFoldable<'tcx> + Subst<'tcx>>(
309 substs: SubstsRef<'tcx>
311 // miri doesn't care about lifetimes, and will choke on some crazy ones
312 // let's simply get rid of them
313 let substituted = t.subst(*self.tcx, substs);
314 self.tcx.normalize_erasing_regions(ty::ParamEnv::reveal_all(), substituted)
317 pub fn layout_of_local(
319 frame: &Frame<'mir, 'tcx, M::PointerTag, M::FrameExtra>,
321 layout: Option<TyLayout<'tcx>>,
322 ) -> EvalResult<'tcx, TyLayout<'tcx>> {
323 match frame.locals[local].layout.get() {
325 let layout = crate::interpret::operand::from_known_layout(layout, || {
326 let local_ty = frame.mir.local_decls[local].ty;
327 let local_ty = self.monomorphize_with_substs(local_ty, frame.instance.substs);
328 self.layout_of(local_ty)
330 frame.locals[local].layout.set(Some(layout));
333 Some(layout) => Ok(layout),
337 pub fn str_to_immediate(&mut self, s: &str) -> EvalResult<'tcx, Immediate<M::PointerTag>> {
338 let ptr = self.memory.allocate_static_bytes(s.as_bytes()).with_default_tag();
339 Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self))
342 /// Returns the actual dynamic size and alignment of the place at the given type.
343 /// Only the "meta" (metadata) part of the place matters.
344 /// This can fail to provide an answer for extern types.
345 pub(super) fn size_and_align_of(
347 metadata: Option<Scalar<M::PointerTag>>,
348 layout: TyLayout<'tcx>,
349 ) -> EvalResult<'tcx, Option<(Size, Align)>> {
350 if !layout.is_unsized() {
351 return Ok(Some((layout.size, layout.align.abi)));
353 match layout.ty.sty {
354 ty::Adt(..) | ty::Tuple(..) => {
355 // First get the size of all statically known fields.
356 // Don't use type_of::sizing_type_of because that expects t to be sized,
357 // and it also rounds up to alignment, which we want to avoid,
358 // as the unsized field's alignment could be smaller.
359 assert!(!layout.ty.is_simd());
360 trace!("DST layout: {:?}", layout);
362 let sized_size = layout.fields.offset(layout.fields.count() - 1);
363 let sized_align = layout.align.abi;
365 "DST {} statically sized prefix size: {:?} align: {:?}",
371 // Recurse to get the size of the dynamically sized field (must be
372 // the last field). Can't have foreign types here, how would we
373 // adjust alignment and size for them?
374 let field = layout.field(self, layout.fields.count() - 1)?;
375 let (unsized_size, unsized_align) = match self.size_and_align_of(metadata, field)? {
376 Some(size_and_align) => size_and_align,
378 // A field with extern type. If this field is at offset 0, we behave
379 // like the underlying extern type.
380 // FIXME: Once we have made decisions for how to handle size and alignment
381 // of `extern type`, this should be adapted. It is just a temporary hack
382 // to get some code to work that probably ought to work.
383 if sized_size == Size::ZERO {
386 bug!("Fields cannot be extern types, unless they are at offset 0")
391 // FIXME (#26403, #27023): We should be adding padding
392 // to `sized_size` (to accommodate the `unsized_align`
393 // required of the unsized field that follows) before
394 // summing it with `sized_size`. (Note that since #26403
395 // is unfixed, we do not yet add the necessary padding
396 // here. But this is where the add would go.)
398 // Return the sum of sizes and max of aligns.
399 let size = sized_size + unsized_size;
401 // Choose max of two known alignments (combined value must
402 // be aligned according to more restrictive of the two).
403 let align = sized_align.max(unsized_align);
405 // Issue #27023: must add any necessary padding to `size`
406 // (to make it a multiple of `align`) before returning it.
408 // Namely, the returned size should be, in C notation:
410 // `size + ((size & (align-1)) ? align : 0)`
412 // emulated via the semi-standard fast bit trick:
414 // `(size + (align-1)) & -align`
416 Ok(Some((size.align_to(align), align)))
419 let vtable = metadata.expect("dyn trait fat ptr must have vtable").to_ptr()?;
420 // the second entry in the vtable is the dynamic size of the object.
421 Ok(Some(self.read_size_and_align_from_vtable(vtable)?))
424 ty::Slice(_) | ty::Str => {
425 let len = metadata.expect("slice fat ptr must have vtable").to_usize(self)?;
426 let elem = layout.field(self, 0)?;
427 Ok(Some((elem.size * len, elem.align.abi)))
434 _ => bug!("size_and_align_of::<{:?}> not supported", layout.ty),
438 pub fn size_and_align_of_mplace(
440 mplace: MPlaceTy<'tcx, M::PointerTag>
441 ) -> EvalResult<'tcx, Option<(Size, Align)>> {
442 self.size_and_align_of(mplace.meta, mplace.layout)
445 pub fn push_stack_frame(
447 instance: ty::Instance<'tcx>,
448 span: source_map::Span,
449 mir: &'mir mir::Mir<'tcx>,
450 return_place: Option<PlaceTy<'tcx, M::PointerTag>>,
451 return_to_block: StackPopCleanup,
452 ) -> EvalResult<'tcx> {
453 if self.stack.len() > 0 {
454 info!("PAUSING({}) {}", self.cur_frame(), self.frame().instance);
456 ::log_settings::settings().indentation += 1;
458 // first push a stack frame so we have access to the local substs
459 let extra = M::stack_push(self)?;
460 self.stack.push(Frame {
462 block: mir::START_BLOCK,
465 // empty local array, we fill it in below, after we are inside the stack frame and
466 // all methods actually know about the frame
467 locals: IndexVec::new(),
474 // don't allocate at all for trivial constants
475 if mir.local_decls.len() > 1 {
476 // We put some marker immediate into the locals that we later want to initialize.
477 // This can be anything except for LocalValue::Dead -- because *that* is the
478 // value we use for things that we know are initially dead.
479 let dummy = LocalState {
480 state: LocalValue::Live(Operand::Immediate(Immediate::Scalar(
481 ScalarMaybeUndef::Undef,
483 layout: Cell::new(None),
485 let mut locals = IndexVec::from_elem(dummy, &mir.local_decls);
486 // Return place is handled specially by the `eval_place` functions, and the
487 // entry in `locals` should never be used. Make it dead, to be sure.
488 locals[mir::RETURN_PLACE].state = LocalValue::Dead;
489 // Now mark those locals as dead that we do not want to initialize
490 match self.tcx.describe_def(instance.def_id()) {
491 // statics and constants don't have `Storage*` statements, no need to look for them
492 Some(Def::Static(..)) | Some(Def::Const(..)) | Some(Def::AssociatedConst(..)) => {},
494 trace!("push_stack_frame: {:?}: num_bbs: {}", span, mir.basic_blocks().len());
495 for block in mir.basic_blocks() {
496 for stmt in block.statements.iter() {
497 use rustc::mir::StatementKind::{StorageDead, StorageLive};
500 StorageDead(local) => {
501 locals[local].state = LocalValue::Dead;
509 // Finally, properly initialize all those that still have the dummy value
510 for (idx, local) in locals.iter_enumerated_mut() {
512 LocalValue::Live(_) => {
513 // This needs to be properly initialized.
514 let ty = self.monomorphize(mir.local_decls[idx].ty)?;
515 let layout = self.layout_of(ty)?;
516 local.state = LocalValue::Live(self.uninit_operand(layout)?);
517 local.layout = Cell::new(Some(layout));
519 LocalValue::Dead => {
525 self.frame_mut().locals = locals;
528 info!("ENTERING({}) {}", self.cur_frame(), self.frame().instance);
530 if self.stack.len() > self.tcx.sess.const_eval_stack_frame_limit {
531 err!(StackFrameLimitReached)
537 pub(super) fn pop_stack_frame(&mut self) -> EvalResult<'tcx> {
538 info!("LEAVING({}) {}", self.cur_frame(), self.frame().instance);
539 ::log_settings::settings().indentation -= 1;
540 let frame = self.stack.pop().expect(
541 "tried to pop a stack frame, but there were none",
543 M::stack_pop(self, frame.extra)?;
544 // Abort early if we do not want to clean up: We also avoid validation in that case,
545 // because this is CTFE and the final value will be thoroughly validated anyway.
546 match frame.return_to_block {
547 StackPopCleanup::Goto(_) => {},
548 StackPopCleanup::None { cleanup } => {
550 assert!(self.stack.is_empty(), "only the topmost frame should ever be leaked");
551 // Leak the locals, skip validation.
556 // Deallocate all locals that are backed by an allocation.
557 for local in frame.locals {
558 self.deallocate_local(local.state)?;
560 // Validate the return value. Do this after deallocating so that we catch dangling
562 if let Some(return_place) = frame.return_place {
563 if M::enforce_validity(self) {
564 // Data got changed, better make sure it matches the type!
565 // It is still possible that the return place held invalid data while
566 // the function is running, but that's okay because nobody could have
567 // accessed that same data from the "outside" to observe any broken
568 // invariant -- that is, unless a function somehow has a ptr to
569 // its return place... but the way MIR is currently generated, the
570 // return place is always a local and then this cannot happen.
571 self.validate_operand(
572 self.place_to_op(return_place)?,
579 // Uh, that shouldn't happen... the function did not intend to return
580 return err!(Unreachable);
582 // Jump to new block -- *after* validation so that the spans make more sense.
583 match frame.return_to_block {
584 StackPopCleanup::Goto(block) => {
585 self.goto_block(block)?;
587 StackPopCleanup::None { .. } => {}
590 if self.stack.len() > 0 {
591 info!("CONTINUING({}) {}", self.cur_frame(), self.frame().instance);
597 /// Mark a storage as live, killing the previous content and returning it.
598 /// Remember to deallocate that!
602 ) -> EvalResult<'tcx, LocalValue<M::PointerTag>> {
603 assert!(local != mir::RETURN_PLACE, "Cannot make return place live");
604 trace!("{:?} is now live", local);
606 let layout = self.layout_of_local(self.frame(), local, None)?;
607 let init = LocalValue::Live(self.uninit_operand(layout)?);
608 // StorageLive *always* kills the value that's currently stored
609 Ok(mem::replace(&mut self.frame_mut().locals[local].state, init))
612 /// Returns the old value of the local.
613 /// Remember to deallocate that!
614 pub fn storage_dead(&mut self, local: mir::Local) -> LocalValue<M::PointerTag> {
615 assert!(local != mir::RETURN_PLACE, "Cannot make return place dead");
616 trace!("{:?} is now dead", local);
618 mem::replace(&mut self.frame_mut().locals[local].state, LocalValue::Dead)
621 pub(super) fn deallocate_local(
623 local: LocalValue<M::PointerTag>,
624 ) -> EvalResult<'tcx> {
625 // FIXME: should we tell the user that there was a local which was never written to?
626 if let LocalValue::Live(Operand::Indirect(MemPlace { ptr, .. })) = local {
627 trace!("deallocating local");
628 let ptr = ptr.to_ptr()?;
629 self.memory.dump_alloc(ptr.alloc_id);
630 self.memory.deallocate_local(ptr)?;
635 pub fn const_eval_raw(
638 ) -> EvalResult<'tcx, MPlaceTy<'tcx, M::PointerTag>> {
639 let param_env = if self.tcx.is_static(gid.instance.def_id()).is_some() {
640 ty::ParamEnv::reveal_all()
644 // We use `const_eval_raw` here, and get an unvalidated result. That is okay:
645 // Our result will later be validated anyway, and there seems no good reason
646 // to have to fail early here. This is also more consistent with
647 // `Memory::get_static_alloc` which has to use `const_eval_raw` to avoid cycles.
648 let val = self.tcx.const_eval_raw(param_env.and(gid)).map_err(|err| {
650 ErrorHandled::Reported => EvalErrorKind::ReferencedConstant,
651 ErrorHandled::TooGeneric => EvalErrorKind::TooGeneric,
654 self.raw_const_to_mplace(val)
657 pub fn dump_place(&self, place: Place<M::PointerTag>) {
659 if !log_enabled!(::log::Level::Trace) {
663 Place::Local { frame, local } => {
664 let mut allocs = Vec::new();
665 let mut msg = format!("{:?}", local);
666 if frame != self.cur_frame() {
667 write!(msg, " ({} frames up)", self.cur_frame() - frame).unwrap();
669 write!(msg, ":").unwrap();
671 match self.stack[frame].locals[local].access() {
673 if let EvalErrorKind::DeadLocal = err.kind {
674 write!(msg, " is dead").unwrap();
676 panic!("Failed to access local: {:?}", err);
679 Ok(Operand::Indirect(mplace)) => {
680 let (ptr, align) = mplace.to_scalar_ptr_align();
682 Scalar::Ptr(ptr) => {
683 write!(msg, " by align({}) ref:", align.bytes()).unwrap();
684 allocs.push(ptr.alloc_id);
686 ptr => write!(msg, " by integral ref: {:?}", ptr).unwrap(),
689 Ok(Operand::Immediate(Immediate::Scalar(val))) => {
690 write!(msg, " {:?}", val).unwrap();
691 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val {
692 allocs.push(ptr.alloc_id);
695 Ok(Operand::Immediate(Immediate::ScalarPair(val1, val2))) => {
696 write!(msg, " ({:?}, {:?})", val1, val2).unwrap();
697 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val1 {
698 allocs.push(ptr.alloc_id);
700 if let ScalarMaybeUndef::Scalar(Scalar::Ptr(ptr)) = val2 {
701 allocs.push(ptr.alloc_id);
707 self.memory.dump_allocs(allocs);
709 Place::Ptr(mplace) => {
711 Scalar::Ptr(ptr) => {
712 trace!("by align({}) ref:", mplace.align.bytes());
713 self.memory.dump_alloc(ptr.alloc_id);
715 ptr => trace!(" integral by ref: {:?}", ptr),
721 pub fn generate_stacktrace(&self, explicit_span: Option<Span>) -> Vec<FrameInfo<'tcx>> {
722 let mut last_span = None;
723 let mut frames = Vec::new();
724 for &Frame { instance, span, mir, block, stmt, .. } in self.stack().iter().rev() {
725 // make sure we don't emit frames that are duplicates of the previous
726 if explicit_span == Some(span) {
727 last_span = Some(span);
730 if let Some(last) = last_span {
735 last_span = Some(span);
737 let block = &mir.basic_blocks()[block];
738 let source_info = if stmt < block.statements.len() {
739 block.statements[stmt].source_info
741 block.terminator().source_info
743 let lint_root = match mir.source_scope_local_data {
744 mir::ClearCrossCrate::Set(ref ivs) => Some(ivs[source_info.scope].lint_root),
745 mir::ClearCrossCrate::Clear => None,
747 frames.push(FrameInfo { call_site: span, instance, lint_root });
749 trace!("generate stacktrace: {:#?}, {:?}", frames, explicit_span);
754 pub fn sign_extend(&self, value: u128, ty: TyLayout<'_>) -> u128 {
755 assert!(ty.abi.is_signed());
756 sign_extend(value, ty.size)
760 pub fn truncate(&self, value: u128, ty: TyLayout<'_>) -> u128 {
761 truncate(value, ty.size)