1 //! This is the implementation of the pass which transforms generators into state machines.
3 //! MIR generation for generators creates a function which has a self argument which
4 //! passes by value. This argument is effectively a generator type which only contains upvars and
5 //! is only used for this argument inside the MIR for the generator.
6 //! It is passed by value to enable upvars to be moved out of it. Drop elaboration runs on that
7 //! MIR before this pass and creates drop flags for MIR locals.
8 //! It will also drop the generator argument (which only consists of upvars) if any of the upvars
9 //! are moved out of. This pass elaborates the drops of upvars / generator argument in the case
10 //! that none of the upvars were moved out of. This is because we cannot have any drops of this
11 //! generator in the MIR, since it is used to create the drop glue for the generator. We'd get
12 //! infinite recursion otherwise.
14 //! This pass creates the implementation for either the `Generator::resume` or `Future::poll`
15 //! function and the drop shim for the generator based on the MIR input.
16 //! It converts the generator argument from Self to &mut Self adding derefs in the MIR as needed.
17 //! It computes the final layout of the generator struct which looks like this:
18 //! First upvars are stored
19 //! It is followed by the generator state field.
20 //! Then finally the MIR locals which are live across a suspension point are stored.
21 //! ```ignore (illustrative)
22 //! struct Generator {
28 //! This pass computes the meaning of the state field and the MIR locals which are live
29 //! across a suspension point. There are however three hardcoded generator states:
30 //! 0 - Generator have not been resumed yet
31 //! 1 - Generator has returned / is completed
32 //! 2 - Generator has been poisoned
34 //! It also rewrites `return x` and `yield y` as setting a new generator state and returning
35 //! `GeneratorState::Complete(x)` and `GeneratorState::Yielded(y)`,
36 //! or `Poll::Ready(x)` and `Poll::Pending` respectively.
37 //! MIR locals which are live across a suspension point are moved to the generator struct
38 //! with references to them being updated with references to the generator struct.
40 //! The pass creates two functions which have a switch on the generator state giving
41 //! the action to take.
43 //! One of them is the implementation of `Generator::resume` / `Future::poll`.
44 //! For generators with state 0 (unresumed) it starts the execution of the generator.
45 //! For generators with state 1 (returned) and state 2 (poisoned) it panics.
46 //! Otherwise it continues the execution from the last suspension point.
48 //! The other function is the drop glue for the generator.
49 //! For generators with state 0 (unresumed) it drops the upvars of the generator.
50 //! For generators with state 1 (returned) and state 2 (poisoned) it does nothing.
51 //! Otherwise it drops all the values in scope at the last suspension point.
53 use crate::deref_separator::deref_finder;
55 use crate::util::expand_aggregate;
57 use rustc_data_structures::fx::{FxHashMap, FxHashSet};
58 use rustc_errors::pluralize;
60 use rustc_hir::lang_items::LangItem;
61 use rustc_hir::GeneratorKind;
62 use rustc_index::bit_set::{BitMatrix, BitSet, GrowableBitSet};
63 use rustc_index::vec::{Idx, IndexVec};
64 use rustc_middle::mir::dump_mir;
65 use rustc_middle::mir::visit::{MutVisitor, PlaceContext, Visitor};
66 use rustc_middle::mir::*;
67 use rustc_middle::ty::{self, AdtDef, Ty, TyCtxt};
68 use rustc_middle::ty::{GeneratorSubsts, SubstsRef};
69 use rustc_mir_dataflow::impls::{
70 MaybeBorrowedLocals, MaybeLiveLocals, MaybeRequiresStorage, MaybeStorageLive,
72 use rustc_mir_dataflow::storage::always_storage_live_locals;
73 use rustc_mir_dataflow::{self, Analysis};
74 use rustc_span::def_id::DefId;
75 use rustc_span::symbol::sym;
77 use rustc_target::abi::VariantIdx;
78 use rustc_target::spec::PanicStrategy;
81 pub struct StateTransform;
83 struct RenameLocalVisitor<'tcx> {
89 impl<'tcx> MutVisitor<'tcx> for RenameLocalVisitor<'tcx> {
90 fn tcx(&self) -> TyCtxt<'tcx> {
94 fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
95 if *local == self.from {
100 fn visit_terminator(&mut self, terminator: &mut Terminator<'tcx>, location: Location) {
101 match terminator.kind {
102 TerminatorKind::Return => {
103 // Do not replace the implicit `_0` access here, as that's not possible. The
104 // transform already handles `return` correctly.
106 _ => self.super_terminator(terminator, location),
111 struct DerefArgVisitor<'tcx> {
115 impl<'tcx> MutVisitor<'tcx> for DerefArgVisitor<'tcx> {
116 fn tcx(&self) -> TyCtxt<'tcx> {
120 fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
121 assert_ne!(*local, SELF_ARG);
124 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
125 if place.local == SELF_ARG {
130 projection: self.tcx().intern_place_elems(&[ProjectionElem::Deref]),
135 self.visit_local(&mut place.local, context, location);
137 for elem in place.projection.iter() {
138 if let PlaceElem::Index(local) = elem {
139 assert_ne!(local, SELF_ARG);
146 struct PinArgVisitor<'tcx> {
147 ref_gen_ty: Ty<'tcx>,
151 impl<'tcx> MutVisitor<'tcx> for PinArgVisitor<'tcx> {
152 fn tcx(&self) -> TyCtxt<'tcx> {
156 fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
157 assert_ne!(*local, SELF_ARG);
160 fn visit_place(&mut self, place: &mut Place<'tcx>, context: PlaceContext, location: Location) {
161 if place.local == SELF_ARG {
166 projection: self.tcx().intern_place_elems(&[ProjectionElem::Field(
174 self.visit_local(&mut place.local, context, location);
176 for elem in place.projection.iter() {
177 if let PlaceElem::Index(local) = elem {
178 assert_ne!(local, SELF_ARG);
185 fn replace_base<'tcx>(place: &mut Place<'tcx>, new_base: Place<'tcx>, tcx: TyCtxt<'tcx>) {
186 place.local = new_base.local;
188 let mut new_projection = new_base.projection.to_vec();
189 new_projection.append(&mut place.projection.to_vec());
191 place.projection = tcx.intern_place_elems(&new_projection);
194 const SELF_ARG: Local = Local::from_u32(1);
196 /// Generator has not been resumed yet.
197 const UNRESUMED: usize = GeneratorSubsts::UNRESUMED;
198 /// Generator has returned / is completed.
199 const RETURNED: usize = GeneratorSubsts::RETURNED;
200 /// Generator has panicked and is poisoned.
201 const POISONED: usize = GeneratorSubsts::POISONED;
203 /// Number of variants to reserve in generator state. Corresponds to
204 /// `UNRESUMED` (beginning of a generator) and `RETURNED`/`POISONED`
205 /// (end of a generator) states.
206 const RESERVED_VARIANTS: usize = 3;
208 /// A `yield` point in the generator.
209 struct SuspensionPoint<'tcx> {
210 /// State discriminant used when suspending or resuming at this point.
212 /// The block to jump to after resumption.
214 /// Where to move the resume argument after resumption.
215 resume_arg: Place<'tcx>,
216 /// Which block to jump to if the generator is dropped in this state.
217 drop: Option<BasicBlock>,
218 /// Set of locals that have live storage while at this suspension point.
219 storage_liveness: GrowableBitSet<Local>,
222 struct TransformVisitor<'tcx> {
225 state_adt_ref: AdtDef<'tcx>,
226 state_substs: SubstsRef<'tcx>,
228 // The type of the discriminant in the generator struct
231 // Mapping from Local to (type of local, generator struct index)
232 // FIXME(eddyb) This should use `IndexVec<Local, Option<_>>`.
233 remap: FxHashMap<Local, (Ty<'tcx>, VariantIdx, usize)>,
235 // A map from a suspension point in a block to the locals which have live storage at that point
236 storage_liveness: IndexVec<BasicBlock, Option<BitSet<Local>>>,
238 // A list of suspension points, generated during the transform
239 suspension_points: Vec<SuspensionPoint<'tcx>>,
241 // The set of locals that have no `StorageLive`/`StorageDead` annotations.
242 always_live_locals: BitSet<Local>,
244 // The original RETURN_PLACE local
245 new_ret_local: Local,
248 impl<'tcx> TransformVisitor<'tcx> {
249 // Make a `GeneratorState` or `Poll` variant assignment.
251 // `core::ops::GeneratorState` only has single element tuple variants,
252 // so we can just write to the downcasted first field and then set the
253 // discriminant to the appropriate variant.
257 source_info: SourceInfo,
259 statements: &mut Vec<Statement<'tcx>>,
261 let idx = VariantIdx::new(match (is_return, self.is_async_kind) {
262 (true, false) => 1, // GeneratorState::Complete
263 (false, false) => 0, // GeneratorState::Yielded
264 (true, true) => 0, // Poll::Ready
265 (false, true) => 1, // Poll::Pending
268 let kind = AggregateKind::Adt(self.state_adt_ref.did(), idx, self.state_substs, None, None);
271 if self.is_async_kind && idx == VariantIdx::new(1) {
272 assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 0);
274 // FIXME(swatinem): assert that `val` is indeed unit?
275 statements.extend(expand_aggregate(
276 Place::return_place(),
285 // else: `Poll::Ready(x)`, `GeneratorState::Yielded(x)` or `GeneratorState::Complete(x)`
286 assert_eq!(self.state_adt_ref.variant(idx).fields.len(), 1);
290 .bound_type_of(self.state_adt_ref.variant(idx).fields[0].did)
291 .subst(self.tcx, self.state_substs);
293 statements.extend(expand_aggregate(
294 Place::return_place(),
295 std::iter::once((val, ty)),
302 // Create a Place referencing a generator struct field
303 fn make_field(&self, variant_index: VariantIdx, idx: usize, ty: Ty<'tcx>) -> Place<'tcx> {
304 let self_place = Place::from(SELF_ARG);
305 let base = self.tcx.mk_place_downcast_unnamed(self_place, variant_index);
306 let mut projection = base.projection.to_vec();
307 projection.push(ProjectionElem::Field(Field::new(idx), ty));
309 Place { local: base.local, projection: self.tcx.intern_place_elems(&projection) }
312 // Create a statement which changes the discriminant
313 fn set_discr(&self, state_disc: VariantIdx, source_info: SourceInfo) -> Statement<'tcx> {
314 let self_place = Place::from(SELF_ARG);
317 kind: StatementKind::SetDiscriminant {
318 place: Box::new(self_place),
319 variant_index: state_disc,
324 // Create a statement which reads the discriminant into a temporary
325 fn get_discr(&self, body: &mut Body<'tcx>) -> (Statement<'tcx>, Place<'tcx>) {
326 let temp_decl = LocalDecl::new(self.discr_ty, body.span).internal();
327 let local_decls_len = body.local_decls.push(temp_decl);
328 let temp = Place::from(local_decls_len);
330 let self_place = Place::from(SELF_ARG);
331 let assign = Statement {
332 source_info: SourceInfo::outermost(body.span),
333 kind: StatementKind::Assign(Box::new((temp, Rvalue::Discriminant(self_place)))),
339 impl<'tcx> MutVisitor<'tcx> for TransformVisitor<'tcx> {
340 fn tcx(&self) -> TyCtxt<'tcx> {
344 fn visit_local(&mut self, local: &mut Local, _: PlaceContext, _: Location) {
345 assert_eq!(self.remap.get(local), None);
350 place: &mut Place<'tcx>,
351 _context: PlaceContext,
354 // Replace an Local in the remap with a generator struct access
355 if let Some(&(ty, variant_index, idx)) = self.remap.get(&place.local) {
356 replace_base(place, self.make_field(variant_index, idx, ty), self.tcx);
360 fn visit_basic_block_data(&mut self, block: BasicBlock, data: &mut BasicBlockData<'tcx>) {
361 // Remove StorageLive and StorageDead statements for remapped locals
362 data.retain_statements(|s| match s.kind {
363 StatementKind::StorageLive(l) | StatementKind::StorageDead(l) => {
364 !self.remap.contains_key(&l)
369 let ret_val = match data.terminator().kind {
370 TerminatorKind::Return => {
371 Some((true, None, Operand::Move(Place::from(self.new_ret_local)), None))
373 TerminatorKind::Yield { ref value, resume, resume_arg, drop } => {
374 Some((false, Some((resume, resume_arg)), value.clone(), drop))
379 if let Some((is_return, resume, v, drop)) = ret_val {
380 let source_info = data.terminator().source_info;
381 // We must assign the value first in case it gets declared dead below
382 self.make_state(v, source_info, is_return, &mut data.statements);
383 let state = if let Some((resume, mut resume_arg)) = resume {
385 let state = RESERVED_VARIANTS + self.suspension_points.len();
387 // The resume arg target location might itself be remapped if its base local is
388 // live across a yield.
390 if let Some(&(ty, variant, idx)) = self.remap.get(&resume_arg.local) {
391 replace_base(&mut resume_arg, self.make_field(variant, idx, ty), self.tcx);
397 self.suspension_points.push(SuspensionPoint {
402 storage_liveness: self.storage_liveness[block].clone().unwrap().into(),
405 VariantIdx::new(state)
408 VariantIdx::new(RETURNED) // state for returned
410 data.statements.push(self.set_discr(state, source_info));
411 data.terminator_mut().kind = TerminatorKind::Return;
414 self.super_basic_block_data(block, data);
418 fn make_generator_state_argument_indirect<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
419 let gen_ty = body.local_decls.raw[1].ty;
422 tcx.mk_ref(tcx.lifetimes.re_erased, ty::TypeAndMut { ty: gen_ty, mutbl: Mutability::Mut });
424 // Replace the by value generator argument
425 body.local_decls.raw[1].ty = ref_gen_ty;
427 // Add a deref to accesses of the generator state
428 DerefArgVisitor { tcx }.visit_body(body);
431 fn make_generator_state_argument_pinned<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
432 let ref_gen_ty = body.local_decls.raw[1].ty;
434 let pin_did = tcx.require_lang_item(LangItem::Pin, Some(body.span));
435 let pin_adt_ref = tcx.adt_def(pin_did);
436 let substs = tcx.intern_substs(&[ref_gen_ty.into()]);
437 let pin_ref_gen_ty = tcx.mk_adt(pin_adt_ref, substs);
439 // Replace the by ref generator argument
440 body.local_decls.raw[1].ty = pin_ref_gen_ty;
442 // Add the Pin field access to accesses of the generator state
443 PinArgVisitor { ref_gen_ty, tcx }.visit_body(body);
446 /// Allocates a new local and replaces all references of `local` with it. Returns the new local.
448 /// `local` will be changed to a new local decl with type `ty`.
450 /// Note that the new local will be uninitialized. It is the caller's responsibility to assign some
451 /// valid value to it before its first use.
452 fn replace_local<'tcx>(
455 body: &mut Body<'tcx>,
458 let new_decl = LocalDecl::new(ty, body.span);
459 let new_local = body.local_decls.push(new_decl);
460 body.local_decls.swap(local, new_local);
462 RenameLocalVisitor { from: local, to: new_local, tcx }.visit_body(body);
467 /// Transforms the `body` of the generator applying the following transforms:
469 /// - Eliminates all the `get_context` calls that async lowering created.
470 /// - Replace all `Local` `ResumeTy` types with `&mut Context<'_>` (`context_mut_ref`).
472 /// The `Local`s that have their types replaced are:
473 /// - The `resume` argument itself.
474 /// - The argument to `get_context`.
475 /// - The yielded value of a `yield`.
477 /// The `ResumeTy` hides a `&mut Context<'_>` behind an unsafe raw pointer, and the
478 /// `get_context` function is being used to convert that back to a `&mut Context<'_>`.
480 /// Ideally the async lowering would not use the `ResumeTy`/`get_context` indirection,
481 /// but rather directly use `&mut Context<'_>`, however that would currently
482 /// lead to higher-kinded lifetime errors.
483 /// See <https://github.com/rust-lang/rust/issues/105501>.
485 /// The async lowering step and the type / lifetime inference / checking are
486 /// still using the `ResumeTy` indirection for the time being, and that indirection
487 /// is removed here. After this transform, the generator body only knows about `&mut Context<'_>`.
488 fn transform_async_context<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
489 let context_mut_ref = tcx.mk_task_context();
491 // replace the type of the `resume` argument
492 replace_resume_ty_local(tcx, body, Local::new(2), context_mut_ref);
494 let get_context_def_id = tcx.require_lang_item(LangItem::GetContext, None);
496 for bb in BasicBlock::new(0)..body.basic_blocks.next_index() {
497 let bb_data = &body[bb];
498 if bb_data.is_cleanup {
502 match &bb_data.terminator().kind {
503 TerminatorKind::Call { func, .. } => {
504 let func_ty = func.ty(body, tcx);
505 if let ty::FnDef(def_id, _) = *func_ty.kind() {
506 if def_id == get_context_def_id {
507 let local = eliminate_get_context_call(&mut body[bb]);
508 replace_resume_ty_local(tcx, body, local, context_mut_ref);
514 TerminatorKind::Yield { resume_arg, .. } => {
515 replace_resume_ty_local(tcx, body, resume_arg.local, context_mut_ref);
522 fn eliminate_get_context_call<'tcx>(bb_data: &mut BasicBlockData<'tcx>) -> Local {
523 let terminator = bb_data.terminator.take().unwrap();
524 if let TerminatorKind::Call { mut args, destination, target, .. } = terminator.kind {
525 let arg = args.pop().unwrap();
526 let local = arg.place().unwrap().local;
528 let arg = Rvalue::Use(arg);
529 let assign = Statement {
530 source_info: terminator.source_info,
531 kind: StatementKind::Assign(Box::new((destination, arg))),
533 bb_data.statements.push(assign);
534 bb_data.terminator = Some(Terminator {
535 source_info: terminator.source_info,
536 kind: TerminatorKind::Goto { target: target.unwrap() },
544 #[cfg_attr(not(debug_assertions), allow(unused))]
545 fn replace_resume_ty_local<'tcx>(
547 body: &mut Body<'tcx>,
549 context_mut_ref: Ty<'tcx>,
551 let local_ty = std::mem::replace(&mut body.local_decls[local].ty, context_mut_ref);
552 // We have to replace the `ResumeTy` that is used for type and borrow checking
553 // with `&mut Context<'_>` in MIR.
554 #[cfg(debug_assertions)]
556 if let ty::Adt(resume_ty_adt, _) = local_ty.kind() {
557 let expected_adt = tcx.adt_def(tcx.require_lang_item(LangItem::ResumeTy, None));
558 assert_eq!(*resume_ty_adt, expected_adt);
560 panic!("expected `ResumeTy`, found `{:?}`", local_ty);
565 struct LivenessInfo {
566 /// Which locals are live across any suspension point.
567 saved_locals: GeneratorSavedLocals,
569 /// The set of saved locals live at each suspension point.
570 live_locals_at_suspension_points: Vec<BitSet<GeneratorSavedLocal>>,
572 /// Parallel vec to the above with SourceInfo for each yield terminator.
573 source_info_at_suspension_points: Vec<SourceInfo>,
575 /// For every saved local, the set of other saved locals that are
576 /// storage-live at the same time as this local. We cannot overlap locals in
577 /// the layout which have conflicting storage.
578 storage_conflicts: BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>,
580 /// For every suspending block, the locals which are storage-live across
581 /// that suspension point.
582 storage_liveness: IndexVec<BasicBlock, Option<BitSet<Local>>>,
585 fn locals_live_across_suspend_points<'tcx>(
588 always_live_locals: &BitSet<Local>,
591 let body_ref: &Body<'_> = &body;
593 // Calculate when MIR locals have live storage. This gives us an upper bound of their
595 let mut storage_live = MaybeStorageLive::new(std::borrow::Cow::Borrowed(always_live_locals))
596 .into_engine(tcx, body_ref)
597 .iterate_to_fixpoint()
598 .into_results_cursor(body_ref);
600 // Calculate the MIR locals which have been previously
601 // borrowed (even if they are still active).
602 let borrowed_locals_results =
603 MaybeBorrowedLocals.into_engine(tcx, body_ref).pass_name("generator").iterate_to_fixpoint();
605 let mut borrowed_locals_cursor =
606 rustc_mir_dataflow::ResultsCursor::new(body_ref, &borrowed_locals_results);
608 // Calculate the MIR locals that we actually need to keep storage around
610 let requires_storage_results = MaybeRequiresStorage::new(body, &borrowed_locals_results)
611 .into_engine(tcx, body_ref)
612 .iterate_to_fixpoint();
613 let mut requires_storage_cursor =
614 rustc_mir_dataflow::ResultsCursor::new(body_ref, &requires_storage_results);
616 // Calculate the liveness of MIR locals ignoring borrows.
617 let mut liveness = MaybeLiveLocals
618 .into_engine(tcx, body_ref)
619 .pass_name("generator")
620 .iterate_to_fixpoint()
621 .into_results_cursor(body_ref);
623 let mut storage_liveness_map = IndexVec::from_elem(None, &body.basic_blocks);
624 let mut live_locals_at_suspension_points = Vec::new();
625 let mut source_info_at_suspension_points = Vec::new();
626 let mut live_locals_at_any_suspension_point = BitSet::new_empty(body.local_decls.len());
628 for (block, data) in body.basic_blocks.iter_enumerated() {
629 if let TerminatorKind::Yield { .. } = data.terminator().kind {
630 let loc = Location { block, statement_index: data.statements.len() };
632 liveness.seek_to_block_end(block);
633 let mut live_locals: BitSet<_> = BitSet::new_empty(body.local_decls.len());
634 live_locals.union(liveness.get());
637 // The `liveness` variable contains the liveness of MIR locals ignoring borrows.
638 // This is correct for movable generators since borrows cannot live across
639 // suspension points. However for immovable generators we need to account for
640 // borrows, so we conservatively assume that all borrowed locals are live until
641 // we find a StorageDead statement referencing the locals.
642 // To do this we just union our `liveness` result with `borrowed_locals`, which
643 // contains all the locals which has been borrowed before this suspension point.
644 // If a borrow is converted to a raw reference, we must also assume that it lives
645 // forever. Note that the final liveness is still bounded by the storage liveness
646 // of the local, which happens using the `intersect` operation below.
647 borrowed_locals_cursor.seek_before_primary_effect(loc);
648 live_locals.union(borrowed_locals_cursor.get());
651 // Store the storage liveness for later use so we can restore the state
652 // after a suspension point
653 storage_live.seek_before_primary_effect(loc);
654 storage_liveness_map[block] = Some(storage_live.get().clone());
656 // Locals live are live at this point only if they are used across
657 // suspension points (the `liveness` variable)
658 // and their storage is required (the `storage_required` variable)
659 requires_storage_cursor.seek_before_primary_effect(loc);
660 live_locals.intersect(requires_storage_cursor.get());
662 // The generator argument is ignored.
663 live_locals.remove(SELF_ARG);
665 debug!("loc = {:?}, live_locals = {:?}", loc, live_locals);
667 // Add the locals live at this suspension point to the set of locals which live across
668 // any suspension points
669 live_locals_at_any_suspension_point.union(&live_locals);
671 live_locals_at_suspension_points.push(live_locals);
672 source_info_at_suspension_points.push(data.terminator().source_info);
676 debug!("live_locals_anywhere = {:?}", live_locals_at_any_suspension_point);
677 let saved_locals = GeneratorSavedLocals(live_locals_at_any_suspension_point);
679 // Renumber our liveness_map bitsets to include only the locals we are
681 let live_locals_at_suspension_points = live_locals_at_suspension_points
683 .map(|live_here| saved_locals.renumber_bitset(&live_here))
686 let storage_conflicts = compute_storage_conflicts(
689 always_live_locals.clone(),
690 requires_storage_results,
695 live_locals_at_suspension_points,
696 source_info_at_suspension_points,
698 storage_liveness: storage_liveness_map,
702 /// The set of `Local`s that must be saved across yield points.
704 /// `GeneratorSavedLocal` is indexed in terms of the elements in this set;
705 /// i.e. `GeneratorSavedLocal::new(1)` corresponds to the second local
706 /// included in this set.
707 struct GeneratorSavedLocals(BitSet<Local>);
709 impl GeneratorSavedLocals {
710 /// Returns an iterator over each `GeneratorSavedLocal` along with the `Local` it corresponds
712 fn iter_enumerated(&self) -> impl '_ + Iterator<Item = (GeneratorSavedLocal, Local)> {
713 self.iter().enumerate().map(|(i, l)| (GeneratorSavedLocal::from(i), l))
716 /// Transforms a `BitSet<Local>` that contains only locals saved across yield points to the
717 /// equivalent `BitSet<GeneratorSavedLocal>`.
718 fn renumber_bitset(&self, input: &BitSet<Local>) -> BitSet<GeneratorSavedLocal> {
719 assert!(self.superset(&input), "{:?} not a superset of {:?}", self.0, input);
720 let mut out = BitSet::new_empty(self.count());
721 for (saved_local, local) in self.iter_enumerated() {
722 if input.contains(local) {
723 out.insert(saved_local);
729 fn get(&self, local: Local) -> Option<GeneratorSavedLocal> {
730 if !self.contains(local) {
734 let idx = self.iter().take_while(|&l| l < local).count();
735 Some(GeneratorSavedLocal::new(idx))
739 impl ops::Deref for GeneratorSavedLocals {
740 type Target = BitSet<Local>;
742 fn deref(&self) -> &Self::Target {
747 /// For every saved local, looks for which locals are StorageLive at the same
748 /// time. Generates a bitset for every local of all the other locals that may be
749 /// StorageLive simultaneously with that local. This is used in the layout
750 /// computation; see `GeneratorLayout` for more.
751 fn compute_storage_conflicts<'mir, 'tcx>(
752 body: &'mir Body<'tcx>,
753 saved_locals: &GeneratorSavedLocals,
754 always_live_locals: BitSet<Local>,
755 requires_storage: rustc_mir_dataflow::Results<'tcx, MaybeRequiresStorage<'mir, 'tcx>>,
756 ) -> BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal> {
757 assert_eq!(body.local_decls.len(), saved_locals.domain_size());
759 debug!("compute_storage_conflicts({:?})", body.span);
760 debug!("always_live = {:?}", always_live_locals);
762 // Locals that are always live or ones that need to be stored across
763 // suspension points are not eligible for overlap.
764 let mut ineligible_locals = always_live_locals;
765 ineligible_locals.intersect(&**saved_locals);
767 // Compute the storage conflicts for all eligible locals.
768 let mut visitor = StorageConflictVisitor {
770 saved_locals: &saved_locals,
771 local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
774 requires_storage.visit_reachable_with(body, &mut visitor);
776 let local_conflicts = visitor.local_conflicts;
778 // Compress the matrix using only stored locals (Local -> GeneratorSavedLocal).
780 // NOTE: Today we store a full conflict bitset for every local. Technically
781 // this is twice as many bits as we need, since the relation is symmetric.
782 // However, in practice these bitsets are not usually large. The layout code
783 // also needs to keep track of how many conflicts each local has, so it's
784 // simpler to keep it this way for now.
785 let mut storage_conflicts = BitMatrix::new(saved_locals.count(), saved_locals.count());
786 for (saved_local_a, local_a) in saved_locals.iter_enumerated() {
787 if ineligible_locals.contains(local_a) {
788 // Conflicts with everything.
789 storage_conflicts.insert_all_into_row(saved_local_a);
791 // Keep overlap information only for stored locals.
792 for (saved_local_b, local_b) in saved_locals.iter_enumerated() {
793 if local_conflicts.contains(local_a, local_b) {
794 storage_conflicts.insert(saved_local_a, saved_local_b);
802 struct StorageConflictVisitor<'mir, 'tcx, 's> {
803 body: &'mir Body<'tcx>,
804 saved_locals: &'s GeneratorSavedLocals,
805 // FIXME(tmandry): Consider using sparse bitsets here once we have good
806 // benchmarks for generators.
807 local_conflicts: BitMatrix<Local, Local>,
810 impl<'mir, 'tcx> rustc_mir_dataflow::ResultsVisitor<'mir, 'tcx>
811 for StorageConflictVisitor<'mir, 'tcx, '_>
813 type FlowState = BitSet<Local>;
815 fn visit_statement_before_primary_effect(
817 state: &Self::FlowState,
818 _statement: &'mir Statement<'tcx>,
821 self.apply_state(state, loc);
824 fn visit_terminator_before_primary_effect(
826 state: &Self::FlowState,
827 _terminator: &'mir Terminator<'tcx>,
830 self.apply_state(state, loc);
834 impl StorageConflictVisitor<'_, '_, '_> {
835 fn apply_state(&mut self, flow_state: &BitSet<Local>, loc: Location) {
836 // Ignore unreachable blocks.
837 if self.body.basic_blocks[loc.block].terminator().kind == TerminatorKind::Unreachable {
841 let mut eligible_storage_live = flow_state.clone();
842 eligible_storage_live.intersect(&**self.saved_locals);
844 for local in eligible_storage_live.iter() {
845 self.local_conflicts.union_row_with(&eligible_storage_live, local);
848 if eligible_storage_live.count() > 1 {
849 trace!("at {:?}, eligible_storage_live={:?}", loc, eligible_storage_live);
854 /// Validates the typeck view of the generator against the actual set of types saved between
856 fn sanitize_witness<'tcx>(
860 upvars: Vec<Ty<'tcx>>,
861 layout: &GeneratorLayout<'tcx>,
863 let did = body.source.def_id();
864 let param_env = tcx.param_env(did);
866 let allowed_upvars = tcx.normalize_erasing_regions(param_env, upvars);
867 let allowed = match witness.kind() {
868 &ty::GeneratorWitness(interior_tys) => {
869 tcx.normalize_erasing_late_bound_regions(param_env, interior_tys)
872 tcx.sess.delay_span_bug(
874 &format!("unexpected generator witness type {:?}", witness.kind()),
880 let mut mismatches = Vec::new();
881 for fty in &layout.field_tys {
882 if fty.ignore_for_traits {
885 let decl_ty = tcx.normalize_erasing_regions(param_env, fty.ty);
887 // Sanity check that typeck knows about the type of locals which are
888 // live across a suspension point
889 if !allowed.contains(&decl_ty) && !allowed_upvars.contains(&decl_ty) {
890 mismatches.push(decl_ty);
894 if !mismatches.is_empty() {
897 "Broken MIR: generator contains type {:?} in MIR, \
898 but typeck only knows about {} and {:?}",
906 fn compute_layout<'tcx>(
908 liveness: LivenessInfo,
911 FxHashMap<Local, (Ty<'tcx>, VariantIdx, usize)>,
912 GeneratorLayout<'tcx>,
913 IndexVec<BasicBlock, Option<BitSet<Local>>>,
917 live_locals_at_suspension_points,
918 source_info_at_suspension_points,
923 // Gather live local types and their indices.
924 let mut locals = IndexVec::<GeneratorSavedLocal, _>::new();
925 let mut tys = IndexVec::<GeneratorSavedLocal, _>::new();
926 for (saved_local, local) in saved_locals.iter_enumerated() {
927 debug!("generator saved local {:?} => {:?}", saved_local, local);
930 let decl = &body.local_decls[local];
933 let ignore_for_traits = if tcx.sess.opts.unstable_opts.drop_tracking_mir {
934 match decl.local_info {
935 // Do not include raw pointers created from accessing `static` items, as those could
936 // well be re-created by another access to the same static.
937 Some(box LocalInfo::StaticRef { is_thread_local, .. }) => !is_thread_local,
938 // Fake borrows are only read by fake reads, so do not have any reality in
939 // post-analysis MIR.
940 Some(box LocalInfo::FakeBorrow) => true,
944 // FIXME(#105084) HIR-based drop tracking does not account for all the temporaries that
945 // MIR building may introduce. This leads to wrongly ignored types, but this is
946 // necessary for internal consistency and to avoid ICEs.
950 GeneratorSavedTy { ty: decl.ty, source_info: decl.source_info, ignore_for_traits };
956 // Leave empty variants for the UNRESUMED, RETURNED, and POISONED states.
957 // In debuginfo, these will correspond to the beginning (UNRESUMED) or end
958 // (RETURNED, POISONED) of the function.
959 let body_span = body.source_scopes[OUTERMOST_SOURCE_SCOPE].span;
960 let mut variant_source_info: IndexVec<VariantIdx, SourceInfo> = [
961 SourceInfo::outermost(body_span.shrink_to_lo()),
962 SourceInfo::outermost(body_span.shrink_to_hi()),
963 SourceInfo::outermost(body_span.shrink_to_hi()),
969 // Build the generator variant field list.
970 // Create a map from local indices to generator struct indices.
971 let mut variant_fields: IndexVec<VariantIdx, IndexVec<Field, GeneratorSavedLocal>> =
972 iter::repeat(IndexVec::new()).take(RESERVED_VARIANTS).collect();
973 let mut remap = FxHashMap::default();
974 for (suspension_point_idx, live_locals) in live_locals_at_suspension_points.iter().enumerate() {
975 let variant_index = VariantIdx::from(RESERVED_VARIANTS + suspension_point_idx);
976 let mut fields = IndexVec::new();
977 for (idx, saved_local) in live_locals.iter().enumerate() {
978 fields.push(saved_local);
979 // Note that if a field is included in multiple variants, we will
980 // just use the first one here. That's fine; fields do not move
981 // around inside generators, so it doesn't matter which variant
982 // index we access them by.
983 remap.entry(locals[saved_local]).or_insert((tys[saved_local].ty, variant_index, idx));
985 variant_fields.push(fields);
986 variant_source_info.push(source_info_at_suspension_points[suspension_point_idx]);
988 debug!("generator variant_fields = {:?}", variant_fields);
989 debug!("generator storage_conflicts = {:#?}", storage_conflicts);
992 GeneratorLayout { field_tys: tys, variant_fields, variant_source_info, storage_conflicts };
995 (remap, layout, storage_liveness)
998 /// Replaces the entry point of `body` with a block that switches on the generator discriminant and
999 /// dispatches to blocks according to `cases`.
1001 /// After this function, the former entry point of the function will be bb1.
1002 fn insert_switch<'tcx>(
1003 body: &mut Body<'tcx>,
1004 cases: Vec<(usize, BasicBlock)>,
1005 transform: &TransformVisitor<'tcx>,
1006 default: TerminatorKind<'tcx>,
1008 let default_block = insert_term_block(body, default);
1009 let (assign, discr) = transform.get_discr(body);
1010 let switch_targets =
1011 SwitchTargets::new(cases.iter().map(|(i, bb)| ((*i) as u128, *bb)), default_block);
1012 let switch = TerminatorKind::SwitchInt { discr: Operand::Move(discr), targets: switch_targets };
1014 let source_info = SourceInfo::outermost(body.span);
1015 body.basic_blocks_mut().raw.insert(
1018 statements: vec![assign],
1019 terminator: Some(Terminator { source_info, kind: switch }),
1024 let blocks = body.basic_blocks_mut().iter_mut();
1026 for target in blocks.flat_map(|b| b.terminator_mut().successors_mut()) {
1027 *target = BasicBlock::new(target.index() + 1);
1031 fn elaborate_generator_drops<'tcx>(tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
1032 use crate::shim::DropShimElaborator;
1033 use rustc_middle::mir::patch::MirPatch;
1034 use rustc_mir_dataflow::elaborate_drops::{elaborate_drop, Unwind};
1036 // Note that `elaborate_drops` only drops the upvars of a generator, and
1037 // this is ok because `open_drop` can only be reached within that own
1038 // generator's resume function.
1040 let def_id = body.source.def_id();
1041 let param_env = tcx.param_env(def_id);
1043 let mut elaborator = DropShimElaborator { body, patch: MirPatch::new(body), tcx, param_env };
1045 for (block, block_data) in body.basic_blocks.iter_enumerated() {
1046 let (target, unwind, source_info) = match block_data.terminator() {
1047 Terminator { source_info, kind: TerminatorKind::Drop { place, target, unwind } } => {
1048 if let Some(local) = place.as_local() {
1049 if local == SELF_ARG {
1050 (target, unwind, source_info)
1060 let unwind = if block_data.is_cleanup {
1063 Unwind::To(unwind.unwrap_or_else(|| elaborator.patch.resume_block()))
1068 Place::from(SELF_ARG),
1075 elaborator.patch.apply(body);
1078 fn create_generator_drop_shim<'tcx>(
1080 transform: &TransformVisitor<'tcx>,
1082 body: &mut Body<'tcx>,
1083 drop_clean: BasicBlock,
1085 let mut body = body.clone();
1086 body.arg_count = 1; // make sure the resume argument is not included here
1088 let source_info = SourceInfo::outermost(body.span);
1090 let mut cases = create_cases(&mut body, transform, Operation::Drop);
1092 cases.insert(0, (UNRESUMED, drop_clean));
1094 // The returned state and the poisoned state fall through to the default
1095 // case which is just to return
1097 insert_switch(&mut body, cases, &transform, TerminatorKind::Return);
1099 for block in body.basic_blocks_mut() {
1100 let kind = &mut block.terminator_mut().kind;
1101 if let TerminatorKind::GeneratorDrop = *kind {
1102 *kind = TerminatorKind::Return;
1106 // Replace the return variable
1107 body.local_decls[RETURN_PLACE] = LocalDecl::with_source_info(tcx.mk_unit(), source_info);
1109 make_generator_state_argument_indirect(tcx, &mut body);
1111 // Change the generator argument from &mut to *mut
1112 body.local_decls[SELF_ARG] = LocalDecl::with_source_info(
1113 tcx.mk_ptr(ty::TypeAndMut { ty: gen_ty, mutbl: hir::Mutability::Mut }),
1117 // Make sure we remove dead blocks to remove
1118 // unrelated code from the resume part of the function
1119 simplify::remove_dead_blocks(tcx, &mut body);
1121 dump_mir(tcx, false, "generator_drop", &0, &body, |_, _| Ok(()));
1126 fn insert_term_block<'tcx>(body: &mut Body<'tcx>, kind: TerminatorKind<'tcx>) -> BasicBlock {
1127 let source_info = SourceInfo::outermost(body.span);
1128 body.basic_blocks_mut().push(BasicBlockData {
1129 statements: Vec::new(),
1130 terminator: Some(Terminator { source_info, kind }),
1135 fn insert_panic_block<'tcx>(
1137 body: &mut Body<'tcx>,
1138 message: AssertMessage<'tcx>,
1140 let assert_block = BasicBlock::new(body.basic_blocks.len());
1141 let term = TerminatorKind::Assert {
1142 cond: Operand::Constant(Box::new(Constant {
1145 literal: ConstantKind::from_bool(tcx, false),
1149 target: assert_block,
1153 let source_info = SourceInfo::outermost(body.span);
1154 body.basic_blocks_mut().push(BasicBlockData {
1155 statements: Vec::new(),
1156 terminator: Some(Terminator { source_info, kind: term }),
1163 fn can_return<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>, param_env: ty::ParamEnv<'tcx>) -> bool {
1164 // Returning from a function with an uninhabited return type is undefined behavior.
1165 if body.return_ty().is_privately_uninhabited(tcx, param_env) {
1169 // If there's a return terminator the function may return.
1170 for block in body.basic_blocks.iter() {
1171 if let TerminatorKind::Return = block.terminator().kind {
1176 // Otherwise the function can't return.
1180 fn can_unwind<'tcx>(tcx: TyCtxt<'tcx>, body: &Body<'tcx>) -> bool {
1181 // Nothing can unwind when landing pads are off.
1182 if tcx.sess.panic_strategy() == PanicStrategy::Abort {
1186 // Unwinds can only start at certain terminators.
1187 for block in body.basic_blocks.iter() {
1188 match block.terminator().kind {
1189 // These never unwind.
1190 TerminatorKind::Goto { .. }
1191 | TerminatorKind::SwitchInt { .. }
1192 | TerminatorKind::Abort
1193 | TerminatorKind::Return
1194 | TerminatorKind::Unreachable
1195 | TerminatorKind::GeneratorDrop
1196 | TerminatorKind::FalseEdge { .. }
1197 | TerminatorKind::FalseUnwind { .. } => {}
1199 // Resume will *continue* unwinding, but if there's no other unwinding terminator it
1200 // will never be reached.
1201 TerminatorKind::Resume => {}
1203 TerminatorKind::Yield { .. } => {
1204 unreachable!("`can_unwind` called before generator transform")
1207 // These may unwind.
1208 TerminatorKind::Drop { .. }
1209 | TerminatorKind::DropAndReplace { .. }
1210 | TerminatorKind::Call { .. }
1211 | TerminatorKind::InlineAsm { .. }
1212 | TerminatorKind::Assert { .. } => return true,
1216 // If we didn't find an unwinding terminator, the function cannot unwind.
1220 fn create_generator_resume_function<'tcx>(
1222 transform: TransformVisitor<'tcx>,
1223 body: &mut Body<'tcx>,
1226 let can_unwind = can_unwind(tcx, body);
1228 // Poison the generator when it unwinds
1230 let source_info = SourceInfo::outermost(body.span);
1231 let poison_block = body.basic_blocks_mut().push(BasicBlockData {
1232 statements: vec![transform.set_discr(VariantIdx::new(POISONED), source_info)],
1233 terminator: Some(Terminator { source_info, kind: TerminatorKind::Resume }),
1237 for (idx, block) in body.basic_blocks_mut().iter_enumerated_mut() {
1238 let source_info = block.terminator().source_info;
1240 if let TerminatorKind::Resume = block.terminator().kind {
1241 // An existing `Resume` terminator is redirected to jump to our dedicated
1242 // "poisoning block" above.
1243 if idx != poison_block {
1244 *block.terminator_mut() = Terminator {
1246 kind: TerminatorKind::Goto { target: poison_block },
1249 } else if !block.is_cleanup {
1250 // Any terminators that *can* unwind but don't have an unwind target set are also
1251 // pointed at our poisoning block (unless they're part of the cleanup path).
1252 if let Some(unwind @ None) = block.terminator_mut().unwind_mut() {
1253 *unwind = Some(poison_block);
1259 let mut cases = create_cases(body, &transform, Operation::Resume);
1261 use rustc_middle::mir::AssertKind::{ResumedAfterPanic, ResumedAfterReturn};
1263 // Jump to the entry point on the unresumed
1264 cases.insert(0, (UNRESUMED, BasicBlock::new(0)));
1266 // Panic when resumed on the returned or poisoned state
1267 let generator_kind = body.generator_kind().unwrap();
1272 (POISONED, insert_panic_block(tcx, body, ResumedAfterPanic(generator_kind))),
1279 (RETURNED, insert_panic_block(tcx, body, ResumedAfterReturn(generator_kind))),
1283 insert_switch(body, cases, &transform, TerminatorKind::Unreachable);
1285 make_generator_state_argument_indirect(tcx, body);
1286 make_generator_state_argument_pinned(tcx, body);
1288 // Make sure we remove dead blocks to remove
1289 // unrelated code from the drop part of the function
1290 simplify::remove_dead_blocks(tcx, body);
1292 dump_mir(tcx, false, "generator_resume", &0, body, |_, _| Ok(()));
1295 fn insert_clean_drop(body: &mut Body<'_>) -> BasicBlock {
1296 let return_block = insert_term_block(body, TerminatorKind::Return);
1299 TerminatorKind::Drop { place: Place::from(SELF_ARG), target: return_block, unwind: None };
1300 let source_info = SourceInfo::outermost(body.span);
1302 // Create a block to destroy an unresumed generators. This can only destroy upvars.
1303 body.basic_blocks_mut().push(BasicBlockData {
1304 statements: Vec::new(),
1305 terminator: Some(Terminator { source_info, kind: term }),
1310 /// An operation that can be performed on a generator.
1311 #[derive(PartialEq, Copy, Clone)]
1318 fn target_block(self, point: &SuspensionPoint<'_>) -> Option<BasicBlock> {
1320 Operation::Resume => Some(point.resume),
1321 Operation::Drop => point.drop,
1326 fn create_cases<'tcx>(
1327 body: &mut Body<'tcx>,
1328 transform: &TransformVisitor<'tcx>,
1329 operation: Operation,
1330 ) -> Vec<(usize, BasicBlock)> {
1331 let source_info = SourceInfo::outermost(body.span);
1336 .filter_map(|point| {
1337 // Find the target for this suspension point, if applicable
1338 operation.target_block(point).map(|target| {
1339 let mut statements = Vec::new();
1341 // Create StorageLive instructions for locals with live storage
1342 for i in 0..(body.local_decls.len()) {
1344 // The resume argument is live on function entry. Don't insert a
1345 // `StorageLive`, or the following `Assign` will read from uninitialized
1350 let l = Local::new(i);
1351 let needs_storage_live = point.storage_liveness.contains(l)
1352 && !transform.remap.contains_key(&l)
1353 && !transform.always_live_locals.contains(l);
1354 if needs_storage_live {
1356 .push(Statement { source_info, kind: StatementKind::StorageLive(l) });
1360 if operation == Operation::Resume {
1361 // Move the resume argument to the destination place of the `Yield` terminator
1362 let resume_arg = Local::new(2); // 0 = return, 1 = self
1363 statements.push(Statement {
1365 kind: StatementKind::Assign(Box::new((
1367 Rvalue::Use(Operand::Move(resume_arg.into())),
1372 // Then jump to the real target
1373 let block = body.basic_blocks_mut().push(BasicBlockData {
1375 terminator: Some(Terminator {
1377 kind: TerminatorKind::Goto { target },
1382 (point.state, block)
1388 #[instrument(level = "debug", skip(tcx), ret)]
1389 pub(crate) fn mir_generator_witnesses<'tcx>(
1392 ) -> GeneratorLayout<'tcx> {
1393 let def_id = def_id.expect_local();
1395 let (body, _) = tcx.mir_promoted(ty::WithOptConstParam::unknown(def_id));
1396 let body = body.borrow();
1399 // The first argument is the generator type passed by value
1400 let gen_ty = body.local_decls[ty::CAPTURE_STRUCT_LOCAL].ty;
1402 // Get the interior types and substs which typeck computed
1403 let (upvars, interior, movable) = match *gen_ty.kind() {
1404 ty::Generator(_, substs, movability) => {
1405 let substs = substs.as_generator();
1407 substs.upvar_tys().collect::<Vec<_>>(),
1409 movability == hir::Movability::Movable,
1412 _ => span_bug!(body.span, "unexpected generator type {}", gen_ty),
1415 // When first entering the generator, move the resume argument into its new local.
1416 let always_live_locals = always_storage_live_locals(&body);
1418 let liveness_info = locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1420 // Extract locals which are live across suspension point into `layout`
1421 // `remap` gives a mapping from local indices onto generator struct indices
1422 // `storage_liveness` tells us which locals have live storage at suspension points
1423 let (_, generator_layout, _) = compute_layout(tcx, liveness_info, body);
1425 if tcx.sess.opts.unstable_opts.drop_tracking_mir {
1426 check_suspend_tys(tcx, &generator_layout, &body);
1428 sanitize_witness(tcx, body, interior, upvars, &generator_layout);
1434 impl<'tcx> MirPass<'tcx> for StateTransform {
1435 fn run_pass(&self, tcx: TyCtxt<'tcx>, body: &mut Body<'tcx>) {
1436 let Some(yield_ty) = body.yield_ty() else {
1437 // This only applies to generators
1441 assert!(body.generator_drop().is_none());
1443 // The first argument is the generator type passed by value
1444 let gen_ty = body.local_decls.raw[1].ty;
1446 // Get the discriminant type and substs which typeck computed
1447 let (discr_ty, movable) = match *gen_ty.kind() {
1448 ty::Generator(_, substs, movability) => {
1449 let substs = substs.as_generator();
1450 (substs.discr_ty(tcx), movability == hir::Movability::Movable)
1454 .delay_span_bug(body.span, &format!("unexpected generator type {}", gen_ty));
1459 let is_async_kind = matches!(body.generator_kind(), Some(GeneratorKind::Async(_)));
1460 let (state_adt_ref, state_substs) = if is_async_kind {
1461 // Compute Poll<return_ty>
1462 let poll_did = tcx.require_lang_item(LangItem::Poll, None);
1463 let poll_adt_ref = tcx.adt_def(poll_did);
1464 let poll_substs = tcx.intern_substs(&[body.return_ty().into()]);
1465 (poll_adt_ref, poll_substs)
1467 // Compute GeneratorState<yield_ty, return_ty>
1468 let state_did = tcx.require_lang_item(LangItem::GeneratorState, None);
1469 let state_adt_ref = tcx.adt_def(state_did);
1470 let state_substs = tcx.intern_substs(&[yield_ty.into(), body.return_ty().into()]);
1471 (state_adt_ref, state_substs)
1473 let ret_ty = tcx.mk_adt(state_adt_ref, state_substs);
1475 // We rename RETURN_PLACE which has type mir.return_ty to new_ret_local
1476 // RETURN_PLACE then is a fresh unused local with type ret_ty.
1477 let new_ret_local = replace_local(RETURN_PLACE, ret_ty, body, tcx);
1479 // Replace all occurrences of `ResumeTy` with `&mut Context<'_>` within async bodies.
1481 transform_async_context(tcx, body);
1484 // We also replace the resume argument and insert an `Assign`.
1485 // This is needed because the resume argument `_2` might be live across a `yield`, in which
1486 // case there is no `Assign` to it that the transform can turn into a store to the generator
1487 // state. After the yield the slot in the generator state would then be uninitialized.
1488 let resume_local = Local::new(2);
1490 if is_async_kind { tcx.mk_task_context() } else { body.local_decls[resume_local].ty };
1491 let new_resume_local = replace_local(resume_local, resume_ty, body, tcx);
1493 // When first entering the generator, move the resume argument into its new local.
1494 let source_info = SourceInfo::outermost(body.span);
1495 let stmts = &mut body.basic_blocks_mut()[BasicBlock::new(0)].statements;
1500 kind: StatementKind::Assign(Box::new((
1501 new_resume_local.into(),
1502 Rvalue::Use(Operand::Move(resume_local.into())),
1507 let always_live_locals = always_storage_live_locals(&body);
1510 locals_live_across_suspend_points(tcx, body, &always_live_locals, movable);
1512 if tcx.sess.opts.unstable_opts.validate_mir {
1513 let mut vis = EnsureGeneratorFieldAssignmentsNeverAlias {
1514 assigned_local: None,
1515 saved_locals: &liveness_info.saved_locals,
1516 storage_conflicts: &liveness_info.storage_conflicts,
1519 vis.visit_body(body);
1522 // Extract locals which are live across suspension point into `layout`
1523 // `remap` gives a mapping from local indices onto generator struct indices
1524 // `storage_liveness` tells us which locals have live storage at suspension points
1525 let (remap, layout, storage_liveness) = compute_layout(tcx, liveness_info, body);
1527 let can_return = can_return(tcx, body, tcx.param_env(body.source.def_id()));
1529 // Run the transformation which converts Places from Local to generator struct
1530 // accesses for locals in `remap`.
1531 // It also rewrites `return x` and `yield y` as writing a new generator state and returning
1532 // either GeneratorState::Complete(x) and GeneratorState::Yielded(y),
1533 // or Poll::Ready(x) and Poll::Pending respectively depending on `is_async_kind`.
1534 let mut transform = TransformVisitor {
1542 suspension_points: Vec::new(),
1546 transform.visit_body(body);
1548 // Update our MIR struct to reflect the changes we've made
1549 body.arg_count = 2; // self, resume arg
1550 body.spread_arg = None;
1552 body.generator.as_mut().unwrap().yield_ty = None;
1553 body.generator.as_mut().unwrap().generator_layout = Some(layout);
1555 // Insert `drop(generator_struct)` which is used to drop upvars for generators in
1556 // the unresumed state.
1557 // This is expanded to a drop ladder in `elaborate_generator_drops`.
1558 let drop_clean = insert_clean_drop(body);
1560 dump_mir(tcx, false, "generator_pre-elab", &0, body, |_, _| Ok(()));
1562 // Expand `drop(generator_struct)` to a drop ladder which destroys upvars.
1563 // If any upvars are moved out of, drop elaboration will handle upvar destruction.
1564 // However we need to also elaborate the code generated by `insert_clean_drop`.
1565 elaborate_generator_drops(tcx, body);
1567 dump_mir(tcx, false, "generator_post-transform", &0, body, |_, _| Ok(()));
1569 // Create a copy of our MIR and use it to create the drop shim for the generator
1570 let drop_shim = create_generator_drop_shim(tcx, &transform, gen_ty, body, drop_clean);
1572 body.generator.as_mut().unwrap().generator_drop = Some(drop_shim);
1574 // Create the Generator::resume / Future::poll function
1575 create_generator_resume_function(tcx, transform, body, can_return);
1577 // Run derefer to fix Derefs that are not in the first place
1578 deref_finder(tcx, body);
1582 /// Looks for any assignments between locals (e.g., `_4 = _5`) that will both be converted to fields
1583 /// in the generator state machine but whose storage is not marked as conflicting
1585 /// Validation needs to happen immediately *before* `TransformVisitor` is invoked, not after.
1587 /// This condition would arise when the assignment is the last use of `_5` but the initial
1588 /// definition of `_4` if we weren't extra careful to mark all locals used inside a statement as
1589 /// conflicting. Non-conflicting generator saved locals may be stored at the same location within
1590 /// the generator state machine, which would result in ill-formed MIR: the left-hand and right-hand
1591 /// sides of an assignment may not alias. This caused a miscompilation in [#73137].
1593 /// [#73137]: https://github.com/rust-lang/rust/issues/73137
1594 struct EnsureGeneratorFieldAssignmentsNeverAlias<'a> {
1595 saved_locals: &'a GeneratorSavedLocals,
1596 storage_conflicts: &'a BitMatrix<GeneratorSavedLocal, GeneratorSavedLocal>,
1597 assigned_local: Option<GeneratorSavedLocal>,
1600 impl EnsureGeneratorFieldAssignmentsNeverAlias<'_> {
1601 fn saved_local_for_direct_place(&self, place: Place<'_>) -> Option<GeneratorSavedLocal> {
1602 if place.is_indirect() {
1606 self.saved_locals.get(place.local)
1609 fn check_assigned_place(&mut self, place: Place<'_>, f: impl FnOnce(&mut Self)) {
1610 if let Some(assigned_local) = self.saved_local_for_direct_place(place) {
1611 assert!(self.assigned_local.is_none(), "`check_assigned_place` must not recurse");
1613 self.assigned_local = Some(assigned_local);
1615 self.assigned_local = None;
1620 impl<'tcx> Visitor<'tcx> for EnsureGeneratorFieldAssignmentsNeverAlias<'_> {
1621 fn visit_place(&mut self, place: &Place<'tcx>, context: PlaceContext, location: Location) {
1622 let Some(lhs) = self.assigned_local else {
1623 // This visitor only invokes `visit_place` for the right-hand side of an assignment
1624 // and only after setting `self.assigned_local`. However, the default impl of
1625 // `Visitor::super_body` may call `visit_place` with a `NonUseContext` for places
1626 // with debuginfo. Ignore them here.
1627 assert!(!context.is_use());
1631 let Some(rhs) = self.saved_local_for_direct_place(*place) else { return };
1633 if !self.storage_conflicts.contains(lhs, rhs) {
1635 "Assignment between generator saved locals whose storage is not \
1636 marked as conflicting: {:?}: {:?} = {:?}",
1644 fn visit_statement(&mut self, statement: &Statement<'tcx>, location: Location) {
1645 match &statement.kind {
1646 StatementKind::Assign(box (lhs, rhs)) => {
1647 self.check_assigned_place(*lhs, |this| this.visit_rvalue(rhs, location));
1650 StatementKind::FakeRead(..)
1651 | StatementKind::SetDiscriminant { .. }
1652 | StatementKind::Deinit(..)
1653 | StatementKind::StorageLive(_)
1654 | StatementKind::StorageDead(_)
1655 | StatementKind::Retag(..)
1656 | StatementKind::AscribeUserType(..)
1657 | StatementKind::Coverage(..)
1658 | StatementKind::Intrinsic(..)
1659 | StatementKind::Nop => {}
1663 fn visit_terminator(&mut self, terminator: &Terminator<'tcx>, location: Location) {
1664 // Checking for aliasing in terminators is probably overkill, but until we have actual
1665 // semantics, we should be conservative here.
1666 match &terminator.kind {
1667 TerminatorKind::Call {
1676 self.check_assigned_place(*destination, |this| {
1677 this.visit_operand(func, location);
1679 this.visit_operand(arg, location);
1684 TerminatorKind::Yield { value, resume: _, resume_arg, drop: _ } => {
1685 self.check_assigned_place(*resume_arg, |this| this.visit_operand(value, location));
1688 // FIXME: Does `asm!` have any aliasing requirements?
1689 TerminatorKind::InlineAsm { .. } => {}
1691 TerminatorKind::Call { .. }
1692 | TerminatorKind::Goto { .. }
1693 | TerminatorKind::SwitchInt { .. }
1694 | TerminatorKind::Resume
1695 | TerminatorKind::Abort
1696 | TerminatorKind::Return
1697 | TerminatorKind::Unreachable
1698 | TerminatorKind::Drop { .. }
1699 | TerminatorKind::DropAndReplace { .. }
1700 | TerminatorKind::Assert { .. }
1701 | TerminatorKind::GeneratorDrop
1702 | TerminatorKind::FalseEdge { .. }
1703 | TerminatorKind::FalseUnwind { .. } => {}
1708 fn check_suspend_tys<'tcx>(tcx: TyCtxt<'tcx>, layout: &GeneratorLayout<'tcx>, body: &Body<'tcx>) {
1709 let mut linted_tys = FxHashSet::default();
1711 // We want a user-facing param-env.
1712 let param_env = tcx.param_env(body.source.def_id());
1714 for (variant, yield_source_info) in
1715 layout.variant_fields.iter().zip(&layout.variant_source_info)
1718 for &local in variant {
1719 let decl = &layout.field_tys[local];
1722 if !decl.ignore_for_traits && linted_tys.insert(decl.ty) {
1723 let Some(hir_id) = decl.source_info.scope.lint_root(&body.source_scopes) else { continue };
1725 check_must_not_suspend_ty(
1731 source_span: decl.source_info.span,
1732 yield_span: yield_source_info.span,
1734 ..Default::default()
1743 struct SuspendCheckData<'a> {
1747 descr_post: &'a str,
1751 // Returns whether it emitted a diagnostic or not
1752 // Note that this fn and the proceeding one are based on the code
1753 // for creating must_use diagnostics
1755 // Note that this technique was chosen over things like a `Suspend` marker trait
1756 // as it is simpler and has precedent in the compiler
1757 fn check_must_not_suspend_ty<'tcx>(
1761 param_env: ty::ParamEnv<'tcx>,
1762 data: SuspendCheckData<'_>,
1768 let plural_suffix = pluralize!(data.plural_len);
1770 debug!("Checking must_not_suspend for {}", ty);
1773 ty::Adt(..) if ty.is_box() => {
1774 let boxed_ty = ty.boxed_ty();
1775 let descr_pre = &format!("{}boxed ", data.descr_pre);
1776 check_must_not_suspend_ty(
1781 SuspendCheckData { descr_pre, ..data },
1784 ty::Adt(def, _) => check_must_not_suspend_def(tcx, def.did(), hir_id, data),
1785 // FIXME: support adding the attribute to TAITs
1786 ty::Alias(ty::Opaque, ty::AliasTy { def_id: def, .. }) => {
1787 let mut has_emitted = false;
1788 for &(predicate, _) in tcx.explicit_item_bounds(def) {
1789 // We only look at the `DefId`, so it is safe to skip the binder here.
1790 if let ty::PredicateKind::Clause(ty::Clause::Trait(ref poly_trait_predicate)) =
1791 predicate.kind().skip_binder()
1793 let def_id = poly_trait_predicate.trait_ref.def_id;
1794 let descr_pre = &format!("{}implementer{} of ", data.descr_pre, plural_suffix);
1795 if check_must_not_suspend_def(
1799 SuspendCheckData { descr_pre, ..data },
1808 ty::Dynamic(binder, _, _) => {
1809 let mut has_emitted = false;
1810 for predicate in binder.iter() {
1811 if let ty::ExistentialPredicate::Trait(ref trait_ref) = predicate.skip_binder() {
1812 let def_id = trait_ref.def_id;
1813 let descr_post = &format!(" trait object{}{}", plural_suffix, data.descr_post);
1814 if check_must_not_suspend_def(
1818 SuspendCheckData { descr_post, ..data },
1827 ty::Tuple(fields) => {
1828 let mut has_emitted = false;
1829 for (i, ty) in fields.iter().enumerate() {
1830 let descr_post = &format!(" in tuple element {i}");
1831 if check_must_not_suspend_ty(
1836 SuspendCheckData { descr_post, ..data },
1843 ty::Array(ty, len) => {
1844 let descr_pre = &format!("{}array{} of ", data.descr_pre, plural_suffix);
1845 check_must_not_suspend_ty(
1852 plural_len: len.try_eval_usize(tcx, param_env).unwrap_or(0) as usize + 1,
1857 // If drop tracking is enabled, we want to look through references, since the referrent
1858 // may not be considered live across the await point.
1859 ty::Ref(_region, ty, _mutability) => {
1860 let descr_pre = &format!("{}reference{} to ", data.descr_pre, plural_suffix);
1861 check_must_not_suspend_ty(
1866 SuspendCheckData { descr_pre, ..data },
1873 fn check_must_not_suspend_def(
1877 data: SuspendCheckData<'_>,
1879 if let Some(attr) = tcx.get_attr(def_id, sym::must_not_suspend) {
1881 "{}`{}`{} held across a suspend point, but should not be",
1883 tcx.def_path_str(def_id),
1886 tcx.struct_span_lint_hir(
1887 rustc_session::lint::builtin::MUST_NOT_SUSPEND,
1892 // add span pointing to the offending yield/await
1893 lint.span_label(data.yield_span, "the value is held across this suspend point");
1895 // Add optional reason note
1896 if let Some(note) = attr.value_str() {
1897 // FIXME(guswynn): consider formatting this better
1898 lint.span_note(data.source_span, note.as_str());
1901 // Add some quick suggestions on what to do
1902 // FIXME: can `drop` work as a suggestion here as well?
1905 "consider using a block (`{ ... }`) \
1906 to shrink the value's scope, ending before the suspend point",