1 //! Implements "Stacked Borrows". See <https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md>
2 //! for further information.
4 use std::cell::RefCell;
6 use std::num::NonZeroU64;
11 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
12 use rustc_middle::mir::RetagKind;
14 use rustc_target::abi::{Align, LayoutOf, Size};
15 use rustc_hir::Mutability;
19 pub type PtrId = NonZeroU64;
20 pub type CallId = NonZeroU64;
21 pub type AllocExtra = Stacks;
23 /// Tracking pointer provenance
24 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
30 impl fmt::Debug for Tag {
31 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
33 Tag::Tagged(id) => write!(f, "<{}>", id),
34 Tag::Untagged => write!(f, "<untagged>"),
39 /// Indicates which permission is granted (by this item to some pointers)
40 #[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
42 /// Grants unique mutable access.
44 /// Grants shared mutable access.
46 /// Grants shared read-only access.
48 /// Grants no access, but separates two groups of SharedReadWrite so they are not
49 /// all considered mutually compatible.
53 /// An item in the per-location borrow stack.
54 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
56 /// The permission this item grants.
58 /// The pointers the permission is granted to.
60 /// An optional protector, ensuring the item cannot get popped until `CallId` is over.
61 protector: Option<CallId>,
64 impl fmt::Debug for Item {
65 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
66 write!(f, "[{:?} for {:?}", self.perm, self.tag)?;
67 if let Some(call) = self.protector {
68 write!(f, " (call {})", call)?;
75 /// Extra per-location state.
76 #[derive(Clone, Debug, PartialEq, Eq)]
78 /// Used *mostly* as a stack; never empty.
80 /// * Above a `SharedReadOnly` there can only be more `SharedReadOnly`.
81 /// * Except for `Untagged`, no tag occurs in the stack more than once.
85 /// Extra per-allocation state.
86 #[derive(Clone, Debug)]
88 // Even reading memory can have effects on the stack, so we need a `RefCell` here.
89 stacks: RefCell<RangeMap<Stack>>,
90 // Pointer to global state
94 /// Extra global state, available to the memory access hooks.
96 pub struct GlobalState {
97 /// Next unused pointer ID (tag).
99 /// Table storing the "base" tag for each allocation.
100 /// The base tag is the one used for the initial pointer.
101 /// We need this in a separate table to handle cyclic statics.
102 base_ptr_ids: FxHashMap<AllocId, Tag>,
103 /// Next unused call ID (for protectors).
104 next_call_id: CallId,
105 /// Those call IDs corresponding to functions that are still running.
106 active_calls: FxHashSet<CallId>,
107 /// The pointer id to trace
108 tracked_pointer_tag: Option<PtrId>,
109 /// The call id to trace
110 tracked_call_id: Option<CallId>,
111 /// Whether to track raw pointers.
114 /// Memory extra state gives us interior mutable access to the global state.
115 pub type MemoryExtra = Rc<RefCell<GlobalState>>;
117 /// Indicates which kind of access is being performed.
118 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
119 pub enum AccessKind {
124 impl fmt::Display for AccessKind {
125 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
127 AccessKind::Read => write!(f, "read access"),
128 AccessKind::Write => write!(f, "write access"),
133 /// Indicates which kind of reference is being created.
134 /// Used by high-level `reborrow` to compute which permissions to grant to the
136 #[derive(Copy, Clone, Hash, PartialEq, Eq)]
138 /// `&mut` and `Box`.
139 Unique { two_phase: bool },
140 /// `&` with or without interior mutability.
142 /// `*mut`/`*const` (raw pointers).
143 Raw { mutable: bool },
146 impl fmt::Display for RefKind {
147 fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
149 RefKind::Unique { two_phase: false } => write!(f, "unique"),
150 RefKind::Unique { two_phase: true } => write!(f, "unique (two-phase)"),
151 RefKind::Shared => write!(f, "shared"),
152 RefKind::Raw { mutable: true } => write!(f, "raw (mutable)"),
153 RefKind::Raw { mutable: false } => write!(f, "raw (constant)"),
158 /// Utilities for initialization and ID generation
160 pub fn new(tracked_pointer_tag: Option<PtrId>, tracked_call_id: Option<CallId>, track_raw: bool) -> Self {
162 next_ptr_id: NonZeroU64::new(1).unwrap(),
163 base_ptr_ids: FxHashMap::default(),
164 next_call_id: NonZeroU64::new(1).unwrap(),
165 active_calls: FxHashSet::default(),
172 fn new_ptr(&mut self) -> PtrId {
173 let id = self.next_ptr_id;
174 if Some(id) == self.tracked_pointer_tag {
175 register_diagnostic(NonHaltingDiagnostic::CreatedPointerTag(id));
177 self.next_ptr_id = NonZeroU64::new(id.get() + 1).unwrap();
181 pub fn new_call(&mut self) -> CallId {
182 let id = self.next_call_id;
183 trace!("new_call: Assigning ID {}", id);
184 if Some(id) == self.tracked_call_id {
185 register_diagnostic(NonHaltingDiagnostic::CreatedCallId(id));
187 assert!(self.active_calls.insert(id));
188 self.next_call_id = NonZeroU64::new(id.get() + 1).unwrap();
192 pub fn end_call(&mut self, id: CallId) {
193 assert!(self.active_calls.remove(&id));
196 fn is_active(&self, id: CallId) -> bool {
197 self.active_calls.contains(&id)
200 pub fn global_base_ptr(&mut self, id: AllocId) -> Tag {
201 self.base_ptr_ids.get(&id).copied().unwrap_or_else(|| {
202 let tag = Tag::Tagged(self.new_ptr());
203 trace!("New allocation {:?} has base tag {:?}", id, tag);
204 self.base_ptr_ids.insert(id, tag).unwrap_none();
211 fn err_sb_ub(msg: String) -> InterpError<'static> {
212 err_machine_stop!(TerminationInfo::ExperimentalUb {
214 url: format!("https://github.com/rust-lang/unsafe-code-guidelines/blob/master/wip/stacked-borrows.md"),
218 // # Stacked Borrows Core Begin
220 /// We need to make at least the following things true:
222 /// U1: After creating a `Uniq`, it is at the top.
223 /// U2: If the top is `Uniq`, accesses must be through that `Uniq` or remove it it.
224 /// U3: If an access happens with a `Uniq`, it requires the `Uniq` to be in the stack.
226 /// F1: After creating a `&`, the parts outside `UnsafeCell` have our `SharedReadOnly` on top.
227 /// F2: If a write access happens, it pops the `SharedReadOnly`. This has three pieces:
228 /// F2a: If a write happens granted by an item below our `SharedReadOnly`, the `SharedReadOnly`
230 /// F2b: No `SharedReadWrite` or `Unique` will ever be added on top of our `SharedReadOnly`.
231 /// F3: If an access happens with an `&` outside `UnsafeCell`,
232 /// it requires the `SharedReadOnly` to still be in the stack.
234 /// Core relation on `Permission` to define which accesses are allowed
236 /// This defines for a given permission, whether it permits the given kind of access.
237 fn grants(self, access: AccessKind) -> bool {
238 // Disabled grants nothing. Otherwise, all items grant read access, and except for SharedReadOnly they grant write access.
239 self != Permission::Disabled
240 && (access == AccessKind::Read || self != Permission::SharedReadOnly)
244 /// Core per-location operations: access, dealloc, reborrow.
246 /// Find the item granting the given kind of access to the given tag, and return where
247 /// it is on the stack.
248 fn find_granting(&self, access: AccessKind, tag: Tag) -> Option<usize> {
251 .enumerate() // we also need to know *where* in the stack
252 .rev() // search top-to-bottom
253 // Return permission of first item that grants access.
254 // We require a permission with the right tag, ensuring U3 and F3.
257 if tag == item.tag && item.perm.grants(access) { Some(idx) } else { None }
262 /// Find the first write-incompatible item above the given one --
263 /// i.e, find the height to which the stack will be truncated when writing to `granting`.
264 fn find_first_write_incompatible(&self, granting: usize) -> usize {
265 let perm = self.borrows[granting].perm;
267 Permission::SharedReadOnly => bug!("Cannot use SharedReadOnly for writing"),
268 Permission::Disabled => bug!("Cannot use Disabled for anything"),
269 // On a write, everything above us is incompatible.
270 Permission::Unique => granting + 1,
271 Permission::SharedReadWrite => {
272 // The SharedReadWrite *just* above us are compatible, to skip those.
273 let mut idx = granting + 1;
274 while let Some(item) = self.borrows.get(idx) {
275 if item.perm == Permission::SharedReadWrite {
279 // Found first incompatible!
288 /// Check if the given item is protected.
289 fn check_protector(item: &Item, tag: Option<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
290 if let Tag::Tagged(id) = item.tag {
291 if Some(id) == global.tracked_pointer_tag {
292 register_diagnostic(NonHaltingDiagnostic::PoppedPointerTag(item.clone()));
295 if let Some(call) = item.protector {
296 if global.is_active(call) {
297 if let Some(tag) = tag {
298 Err(err_sb_ub(format!(
299 "not granting access to tag {:?} because incompatible item is protected: {:?}",
303 Err(err_sb_ub(format!(
304 "deallocating while item is protected: {:?}",
313 /// Test if a memory `access` using pointer tagged `tag` is granted.
314 /// If yes, return the index of the item that granted it.
315 fn access(&mut self, access: AccessKind, ptr: Pointer<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
316 // Two main steps: Find granting item, remove incompatible items above.
318 // Step 1: Find granting item.
319 let granting_idx = self.find_granting(access, ptr.tag).ok_or_else(|| {
321 "no item granting {} to tag {:?} at {} found in borrow stack.",
322 access, ptr.tag, ptr.erase_tag(),
326 // Step 2: Remove incompatible items above them. Make sure we do not remove protected
327 // items. Behavior differs for reads and writes.
328 if access == AccessKind::Write {
329 // Remove everything above the write-compatible items, like a proper stack. This makes sure read-only and unique
330 // pointers become invalid on write accesses (ensures F2a, and ensures U2 for write accesses).
331 let first_incompatible_idx = self.find_first_write_incompatible(granting_idx);
332 for item in self.borrows.drain(first_incompatible_idx..).rev() {
333 trace!("access: popping item {:?}", item);
334 Stack::check_protector(&item, Some(ptr.tag), global)?;
337 // On a read, *disable* all `Unique` above the granting item. This ensures U2 for read accesses.
338 // The reason this is not following the stack discipline (by removing the first Unique and
339 // everything on top of it) is that in `let raw = &mut *x as *mut _; let _val = *x;`, the second statement
340 // would pop the `Unique` from the reborrow of the first statement, and subsequently also pop the
341 // `SharedReadWrite` for `raw`.
342 // This pattern occurs a lot in the standard library: create a raw pointer, then also create a shared
343 // reference and use that.
344 // We *disable* instead of removing `Unique` to avoid "connecting" two neighbouring blocks of SRWs.
345 for idx in ((granting_idx + 1)..self.borrows.len()).rev() {
346 let item = &mut self.borrows[idx];
347 if item.perm == Permission::Unique {
348 trace!("access: disabling item {:?}", item);
349 Stack::check_protector(item, Some(ptr.tag), global)?;
350 item.perm = Permission::Disabled;
359 /// Deallocate a location: Like a write access, but also there must be no
360 /// active protectors at all because we will remove all items.
361 fn dealloc(&mut self, ptr: Pointer<Tag>, global: &GlobalState) -> InterpResult<'tcx> {
362 // Step 1: Find granting item.
363 self.find_granting(AccessKind::Write, ptr.tag).ok_or_else(|| {
365 "no item granting write access for deallocation to tag {:?} at {} found in borrow stack",
366 ptr.tag, ptr.erase_tag(),
370 // Step 2: Remove all items. Also checks for protectors.
371 for item in self.borrows.drain(..).rev() {
372 Stack::check_protector(&item, None, global)?;
378 /// Derive a new pointer from one with the given tag.
379 /// `weak` controls whether this operation is weak or strong: weak granting does not act as
380 /// an access, and they add the new item directly on top of the one it is derived
381 /// from instead of all the way at the top of the stack.
382 fn grant(&mut self, derived_from: Pointer<Tag>, new: Item, global: &GlobalState) -> InterpResult<'tcx> {
383 // Figure out which access `perm` corresponds to.
385 if new.perm.grants(AccessKind::Write) { AccessKind::Write } else { AccessKind::Read };
386 // Now we figure out which item grants our parent (`derived_from`) this kind of access.
387 // We use that to determine where to put the new item.
388 let granting_idx = self.find_granting(access, derived_from.tag)
389 .ok_or_else(|| err_sb_ub(format!(
390 "trying to reborrow for {:?} at {}, but parent tag {:?} does not have an appropriate item in the borrow stack",
391 new.perm, derived_from.erase_tag(), derived_from.tag,
394 // Compute where to put the new item.
395 // Either way, we ensure that we insert the new item in a way such that between
396 // `derived_from` and the new one, there are only items *compatible with* `derived_from`.
397 let new_idx = if new.perm == Permission::SharedReadWrite {
399 access == AccessKind::Write,
400 "this case only makes sense for stack-like accesses"
402 // SharedReadWrite can coexist with "existing loans", meaning they don't act like a write
403 // access. Instead of popping the stack, we insert the item at the place the stack would
404 // be popped to (i.e., we insert it above all the write-compatible items).
405 // This ensures F2b by adding the new item below any potentially existing `SharedReadOnly`.
406 self.find_first_write_incompatible(granting_idx)
408 // A "safe" reborrow for a pointer that actually expects some aliasing guarantees.
409 // Here, creating a reference actually counts as an access.
410 // This ensures F2b for `Unique`, by removing offending `SharedReadOnly`.
411 self.access(access, derived_from, global)?;
413 // We insert "as far up as possible": We know only compatible items are remaining
414 // on top of `derived_from`, and we want the new item at the top so that we
415 // get the strongest possible guarantees.
416 // This ensures U1 and F1.
420 // Put the new item there. As an optimization, deduplicate if it is equal to one of its new neighbors.
421 if self.borrows[new_idx - 1] == new || self.borrows.get(new_idx) == Some(&new) {
422 // Optimization applies, done.
423 trace!("reborrow: avoiding adding redundant item {:?}", new);
425 trace!("reborrow: adding item {:?}", new);
426 self.borrows.insert(new_idx, new);
432 // # Stacked Borrows Core End
434 /// Map per-stack operations to higher-level per-location-range operations.
436 /// Creates new stack with initial tag.
437 fn new(size: Size, perm: Permission, tag: Tag, extra: MemoryExtra) -> Self {
438 let item = Item { perm, tag, protector: None };
439 let stack = Stack { borrows: vec![item] };
441 Stacks { stacks: RefCell::new(RangeMap::new(size, stack)), global: extra }
444 /// Call `f` on every stack in the range.
449 f: impl Fn(Pointer<Tag>, &mut Stack, &GlobalState) -> InterpResult<'tcx>,
450 ) -> InterpResult<'tcx> {
451 let global = self.global.borrow();
452 let mut stacks = self.stacks.borrow_mut();
453 for (offset, stack) in stacks.iter_mut(ptr.offset, size) {
454 let mut cur_ptr = ptr;
455 cur_ptr.offset = offset;
456 f(cur_ptr, stack, &*global)?;
462 /// Glue code to connect with Miri Machine Hooks
464 pub fn new_allocation(
468 kind: MemoryKind<MiriMemoryKind>,
470 let (tag, perm) = match kind {
471 // New unique borrow. This tag is not accessible by the program,
472 // so it will only ever be used when using the local directly (i.e.,
473 // not through a pointer). That is, whenever we directly write to a local, this will pop
474 // everything else off the stack, invalidating all previous pointers,
475 // and in particular, *all* raw pointers.
476 MemoryKind::Stack => (Tag::Tagged(extra.borrow_mut().new_ptr()), Permission::Unique),
477 // `Global` memory can be referenced by global pointers from `tcx`.
478 // Thus we call `global_base_ptr` such that the global pointers get the same tag
479 // as what we use here.
480 // `ExternStatic` is used for extern statics, and thus must also be listed here.
481 // `Env` we list because we can get away with precise tracking there.
482 // The base pointer is not unique, so the base permission is `SharedReadWrite`.
483 MemoryKind::Machine(MiriMemoryKind::Global | MiriMemoryKind::ExternStatic | MiriMemoryKind::Tls | MiriMemoryKind::Env) =>
484 (extra.borrow_mut().global_base_ptr(id), Permission::SharedReadWrite),
485 // Everything else we handle like raw pointers for now.
487 let mut extra = extra.borrow_mut();
488 let tag = if extra.track_raw { Tag::Tagged(extra.new_ptr()) } else { Tag::Untagged };
489 (tag, Permission::SharedReadWrite)
492 (Stacks::new(size, perm, tag, extra), tag)
496 pub fn memory_read<'tcx>(&self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
497 trace!("read access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
498 self.for_each(ptr, size, |ptr, stack, global| stack.access(AccessKind::Read, ptr, global))
502 pub fn memory_written<'tcx>(&mut self, ptr: Pointer<Tag>, size: Size) -> InterpResult<'tcx> {
503 trace!("write access with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
504 self.for_each(ptr, size, |ptr, stack, global| stack.access(AccessKind::Write, ptr, global))
508 pub fn memory_deallocated<'tcx>(
512 ) -> InterpResult<'tcx> {
513 trace!("deallocation with tag {:?}: {:?}, size {}", ptr.tag, ptr.erase_tag(), size.bytes());
514 self.for_each(ptr, size, |ptr, stack, global| stack.dealloc(ptr, global))
518 /// Retagging/reborrowing. There is some policy in here, such as which permissions
519 /// to grant for which references, and when to add protectors.
520 impl<'mir, 'tcx: 'mir> EvalContextPrivExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
521 trait EvalContextPrivExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
524 place: MPlaceTy<'tcx, Tag>,
529 ) -> InterpResult<'tcx> {
530 let this = self.eval_context_mut();
531 let protector = if protect { Some(this.frame().extra.call_id) } else { None };
532 let ptr = place.ptr.assert_ptr();
534 "reborrow: {} reference {:?} derived from {:?} (pointee {}): {:?}, size {}",
543 // Get the allocation. It might not be mutable, so we cannot use `get_mut`.
544 let extra = &this.memory.get_raw(ptr.alloc_id)?.extra;
545 let stacked_borrows =
546 extra.stacked_borrows.as_ref().expect("we should have Stacked Borrows data");
547 // Update the stacks.
548 // Make sure that raw pointers and mutable shared references are reborrowed "weak":
549 // There could be existing unique pointers reborrowed from them that should remain valid!
550 let perm = match kind {
551 RefKind::Unique { two_phase: false } => Permission::Unique,
552 RefKind::Unique { two_phase: true } => Permission::SharedReadWrite,
553 RefKind::Raw { mutable: true } => Permission::SharedReadWrite,
554 RefKind::Shared | RefKind::Raw { mutable: false } => {
555 // Shared references and *const are a whole different kind of game, the
556 // permission is not uniform across the entire range!
557 // We need a frozen-sensitive reborrow.
558 return this.visit_freeze_sensitive(place, size, |cur_ptr, size, frozen| {
559 // We are only ever `SharedReadOnly` inside the frozen bits.
560 let perm = if frozen {
561 Permission::SharedReadOnly
563 Permission::SharedReadWrite
565 let item = Item { perm, tag: new_tag, protector };
566 stacked_borrows.for_each(cur_ptr, size, |cur_ptr, stack, global| {
567 stack.grant(cur_ptr, item, global)
572 let item = Item { perm, tag: new_tag, protector };
573 stacked_borrows.for_each(ptr, size, |ptr, stack, global| stack.grant(ptr, item, global))
576 /// Retags an indidual pointer, returning the retagged version.
577 /// `mutbl` can be `None` to make this a raw pointer.
580 val: ImmTy<'tcx, Tag>,
583 ) -> InterpResult<'tcx, ImmTy<'tcx, Tag>> {
584 let this = self.eval_context_mut();
585 // We want a place for where the ptr *points to*, so we get one.
586 let place = this.ref_to_mplace(val)?;
588 .size_and_align_of_mplace(place)?
589 .map(|(size, _)| size)
590 .unwrap_or_else(|| place.layout.size);
591 // `reborrow` relies on getting a `Pointer` and everything being in-bounds,
592 // so let's ensure that. However, we do not care about alignment.
593 // We can see dangling ptrs in here e.g. after a Box's `Unique` was
594 // updated using "self.0 = ..." (can happen in Box::from_raw) so we cannot ICE; see miri#1050.
595 let place = this.mplace_access_checked(place, Some(Align::from_bytes(1).unwrap()))?;
596 // Nothing to do for ZSTs.
597 if size == Size::ZERO {
601 // Compute new borrow.
603 let mut mem_extra = this.memory.extra.stacked_borrows.as_ref().unwrap().borrow_mut();
605 // Give up tracking for raw pointers.
606 RefKind::Raw { .. } if !mem_extra.track_raw => Tag::Untagged,
607 // All other pointers are properly tracked.
608 _ => Tag::Tagged(mem_extra.new_ptr()),
613 this.reborrow(place, size, kind, new_tag, protect)?;
614 let new_place = place.replace_tag(new_tag);
616 // Return new pointer.
617 Ok(ImmTy::from_immediate(new_place.to_ref(), val.layout))
621 impl<'mir, 'tcx: 'mir> EvalContextExt<'mir, 'tcx> for crate::MiriEvalContext<'mir, 'tcx> {}
622 pub trait EvalContextExt<'mir, 'tcx: 'mir>: crate::MiriEvalContextExt<'mir, 'tcx> {
623 fn retag(&mut self, kind: RetagKind, place: PlaceTy<'tcx, Tag>) -> InterpResult<'tcx> {
624 let this = self.eval_context_mut();
625 // Determine mutability and whether to add a protector.
626 // Cannot use `builtin_deref` because that reports *immutable* for `Box`,
627 // making it useless.
628 fn qualify(ty: ty::Ty<'_>, kind: RetagKind) -> Option<(RefKind, bool)> {
630 // References are simple.
631 ty::Ref(_, _, Mutability::Mut) => Some((
632 RefKind::Unique { two_phase: kind == RetagKind::TwoPhase },
633 kind == RetagKind::FnEntry,
635 ty::Ref(_, _, Mutability::Not) =>
636 Some((RefKind::Shared, kind == RetagKind::FnEntry)),
637 // Raw pointers need to be enabled.
638 ty::RawPtr(tym) if kind == RetagKind::Raw =>
639 Some((RefKind::Raw { mutable: tym.mutbl == Mutability::Mut }, false)),
640 // Boxes do not get a protector: protectors reflect that references outlive the call
641 // they were passed in to; that's just not the case for boxes.
642 ty::Adt(..) if ty.is_box() => Some((RefKind::Unique { two_phase: false }, false)),
647 // We only reborrow "bare" references/boxes.
648 // Not traversing into fields helps with <https://github.com/rust-lang/unsafe-code-guidelines/issues/125>,
649 // but might also cost us optimization and analyses. We will have to experiment more with this.
650 if let Some((mutbl, protector)) = qualify(place.layout.ty, kind) {
652 let val = this.read_immediate(this.place_to_op(place)?)?;
653 let val = this.retag_reference(val, mutbl, protector)?;
654 this.write_immediate(*val, place)?;
660 /// After a stack frame got pushed, retag the return place so that we are sure
661 /// it does not alias with anything.
663 /// This is a HACK because there is nothing in MIR that would make the retag
664 /// explicit. Also see https://github.com/rust-lang/rust/issues/71117.
665 fn retag_return_place(&mut self) -> InterpResult<'tcx> {
666 let this = self.eval_context_mut();
667 let return_place = if let Some(return_place) = this.frame_mut().return_place {
670 // No return place, nothing to do.
673 if return_place.layout.is_zst() {
674 // There may not be any memory here, nothing to do.
677 // We need this to be in-memory to use tagged pointers.
678 let return_place = this.force_allocation(return_place)?;
680 // We have to turn the place into a pointer to use the existing code.
681 // (The pointer type does not matter, so we use a raw pointer.)
682 let ptr_layout = this.layout_of(this.tcx.mk_mut_ptr(return_place.layout.ty))?;
683 let val = ImmTy::from_immediate(return_place.to_ref(), ptr_layout);
685 let val = this.retag_reference(val, RefKind::Unique { two_phase: false }, /*protector*/ true)?;
686 // And use reborrowed pointer for return place.
687 let return_place = this.ref_to_mplace(val)?;
688 this.frame_mut().return_place = Some(return_place.into());