2 AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
3 BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignemnt,
4 GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
5 RustcBoxAttributeError, UnderscoreExprLhsAssign,
7 use super::ResolverAstLoweringExt;
8 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
9 use crate::{FnDeclKind, ImplTraitPosition};
11 use rustc_ast::ptr::P as AstP;
13 use rustc_data_structures::stack::ensure_sufficient_stack;
15 use rustc_hir::def::Res;
16 use rustc_hir::definitions::DefPathData;
17 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
18 use rustc_span::symbol::{sym, Ident};
19 use rustc_span::DUMMY_SP;
20 use thin_vec::thin_vec;
22 impl<'hir> LoweringContext<'_, 'hir> {
23 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
24 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
27 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
28 self.arena.alloc(self.lower_expr_mut(e))
31 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
32 ensure_sufficient_stack(|| {
33 let kind = match e.kind {
34 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
35 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
36 ExprKind::ConstBlock(ref anon_const) => {
37 let anon_const = self.lower_anon_const(anon_const);
38 hir::ExprKind::ConstBlock(anon_const)
40 ExprKind::Repeat(ref expr, ref count) => {
41 let expr = self.lower_expr(expr);
42 let count = self.lower_array_length(count);
43 hir::ExprKind::Repeat(expr, count)
45 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
46 ExprKind::Call(ref f, ref args) => {
47 if e.attrs.get(0).map_or(false, |a| a.has_name(sym::rustc_box)) {
48 if let [inner] = &args[..] && e.attrs.len() == 1 {
49 let kind = hir::ExprKind::Box(self.lower_expr(&inner));
50 let hir_id = self.lower_node_id(e.id);
51 return hir::Expr { hir_id, kind, span: self.lower_span(e.span) };
53 self.tcx.sess.emit_err(RustcBoxAttributeError { span: e.span });
56 } else if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
57 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
59 let f = self.lower_expr(f);
60 hir::ExprKind::Call(f, self.lower_exprs(args))
63 ExprKind::MethodCall(ref seg, ref receiver, ref args, span) => {
64 let hir_seg = self.arena.alloc(self.lower_path_segment(
68 ParenthesizedGenericArgs::Err,
69 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
71 let receiver = self.lower_expr(receiver);
73 self.arena.alloc_from_iter(args.iter().map(|x| self.lower_expr_mut(x)));
74 hir::ExprKind::MethodCall(hir_seg, receiver, args, self.lower_span(span))
76 ExprKind::Binary(binop, ref lhs, ref rhs) => {
77 let binop = self.lower_binop(binop);
78 let lhs = self.lower_expr(lhs);
79 let rhs = self.lower_expr(rhs);
80 hir::ExprKind::Binary(binop, lhs, rhs)
82 ExprKind::Unary(op, ref ohs) => {
83 let op = self.lower_unop(op);
84 let ohs = self.lower_expr(ohs);
85 hir::ExprKind::Unary(op, ohs)
87 ExprKind::Lit(ref l) => {
88 hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone()))
90 ExprKind::Cast(ref expr, ref ty) => {
91 let expr = self.lower_expr(expr);
93 .lower_ty(ty, &mut ImplTraitContext::Disallowed(ImplTraitPosition::Type));
94 hir::ExprKind::Cast(expr, ty)
96 ExprKind::Type(ref expr, ref ty) => {
97 let expr = self.lower_expr(expr);
99 .lower_ty(ty, &mut ImplTraitContext::Disallowed(ImplTraitPosition::Type));
100 hir::ExprKind::Type(expr, ty)
102 ExprKind::AddrOf(k, m, ref ohs) => {
103 let ohs = self.lower_expr(ohs);
104 hir::ExprKind::AddrOf(k, m, ohs)
106 ExprKind::Let(ref pat, ref scrutinee, span) => {
107 hir::ExprKind::Let(self.arena.alloc(hir::Let {
108 hir_id: self.next_id(),
109 span: self.lower_span(span),
110 pat: self.lower_pat(pat),
112 init: self.lower_expr(scrutinee),
115 ExprKind::If(ref cond, ref then, ref else_opt) => {
116 self.lower_expr_if(cond, then, else_opt.as_deref())
118 ExprKind::While(ref cond, ref body, opt_label) => {
119 self.with_loop_scope(e.id, |this| {
121 this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
122 this.lower_expr_while_in_loop_scope(span, cond, body, opt_label)
125 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
127 this.lower_block(body, false),
128 this.lower_label(opt_label),
129 hir::LoopSource::Loop,
133 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
134 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
135 self.lower_expr(expr),
136 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
137 hir::MatchSource::Normal,
139 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
145 hir::AsyncGeneratorKind::Block,
146 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
148 ExprKind::Await(ref expr) => {
149 let dot_await_span = if expr.span.hi() < e.span.hi() {
150 let span_with_whitespace = self
154 .span_extend_while(expr.span, char::is_whitespace)
155 .unwrap_or(expr.span);
156 span_with_whitespace.shrink_to_hi().with_hi(e.span.hi())
158 // this is a recovered `await expr`
161 self.lower_expr_await(dot_await_span, expr)
172 if let Async::Yes { closure_id, .. } = asyncness {
173 self.lower_expr_async_closure(
183 self.lower_expr_closure(
194 ExprKind::Block(ref blk, opt_label) => {
195 let opt_label = self.lower_label(opt_label);
196 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
198 ExprKind::Assign(ref el, ref er, span) => {
199 self.lower_expr_assign(el, er, span, e.span)
201 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
202 self.lower_binop(op),
206 ExprKind::Field(ref el, ident) => {
207 hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(ident))
209 ExprKind::Index(ref el, ref er) => {
210 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
212 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
213 self.lower_expr_range_closed(e.span, e1, e2)
215 ExprKind::Range(ref e1, ref e2, lims) => {
216 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
218 ExprKind::Underscore => {
219 self.tcx.sess.emit_err(UnderscoreExprLhsAssign { span: e.span });
222 ExprKind::Path(ref qself, ref path) => {
223 let qpath = self.lower_qpath(
228 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
230 hir::ExprKind::Path(qpath)
232 ExprKind::Break(opt_label, ref opt_expr) => {
233 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
234 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
236 ExprKind::Continue(opt_label) => {
237 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
239 ExprKind::Ret(ref e) => {
240 let e = e.as_ref().map(|x| self.lower_expr(x));
241 hir::ExprKind::Ret(e)
243 ExprKind::Yeet(ref sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
244 ExprKind::InlineAsm(ref asm) => {
245 hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
247 ExprKind::Struct(ref se) => {
248 let rest = match &se.rest {
249 StructRest::Base(e) => Some(self.lower_expr(e)),
250 StructRest::Rest(sp) => {
251 self.tcx.sess.emit_err(BaseExpressionDoubleDot { span: *sp });
252 Some(&*self.arena.alloc(self.expr_err(*sp)))
254 StructRest::None => None,
256 hir::ExprKind::Struct(
257 self.arena.alloc(self.lower_qpath(
262 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
265 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
269 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
270 ExprKind::Err => hir::ExprKind::Err,
271 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
272 ExprKind::Paren(ref ex) => {
273 let mut ex = self.lower_expr_mut(ex);
274 // Include parens in span, but only if it is a super-span.
275 if e.span.contains(ex.span) {
276 ex.span = self.lower_span(e.span);
278 // Merge attributes into the inner expression.
279 if !e.attrs.is_empty() {
281 self.attrs.get(&ex.hir_id.local_id).map(|la| *la).unwrap_or(&[]);
284 &*self.arena.alloc_from_iter(
287 .map(|a| self.lower_attr(a))
288 .chain(old_attrs.iter().cloned()),
295 // Desugar `ExprForLoop`
296 // from: `[opt_ident]: for <pat> in <head> <body>`
297 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
298 return self.lower_expr_for(e, pat, head, body, opt_label);
300 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
303 let hir_id = self.lower_node_id(e.id);
304 self.lower_attrs(hir_id, &e.attrs);
305 hir::Expr { hir_id, kind, span: self.lower_span(e.span) }
309 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
311 UnOp::Deref => hir::UnOp::Deref,
312 UnOp::Not => hir::UnOp::Not,
313 UnOp::Neg => hir::UnOp::Neg,
317 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
320 BinOpKind::Add => hir::BinOpKind::Add,
321 BinOpKind::Sub => hir::BinOpKind::Sub,
322 BinOpKind::Mul => hir::BinOpKind::Mul,
323 BinOpKind::Div => hir::BinOpKind::Div,
324 BinOpKind::Rem => hir::BinOpKind::Rem,
325 BinOpKind::And => hir::BinOpKind::And,
326 BinOpKind::Or => hir::BinOpKind::Or,
327 BinOpKind::BitXor => hir::BinOpKind::BitXor,
328 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
329 BinOpKind::BitOr => hir::BinOpKind::BitOr,
330 BinOpKind::Shl => hir::BinOpKind::Shl,
331 BinOpKind::Shr => hir::BinOpKind::Shr,
332 BinOpKind::Eq => hir::BinOpKind::Eq,
333 BinOpKind::Lt => hir::BinOpKind::Lt,
334 BinOpKind::Le => hir::BinOpKind::Le,
335 BinOpKind::Ne => hir::BinOpKind::Ne,
336 BinOpKind::Ge => hir::BinOpKind::Ge,
337 BinOpKind::Gt => hir::BinOpKind::Gt,
339 span: self.lower_span(b.span),
343 fn lower_legacy_const_generics(
346 args: Vec<AstP<Expr>>,
347 legacy_args_idx: &[usize],
348 ) -> hir::ExprKind<'hir> {
349 let ExprKind::Path(None, ref mut path) = f.kind else {
353 // Split the arguments into const generics and normal arguments
354 let mut real_args = vec![];
355 let mut generic_args = vec![];
356 for (idx, arg) in args.into_iter().enumerate() {
357 if legacy_args_idx.contains(&idx) {
358 let parent_def_id = self.current_hir_id_owner;
359 let node_id = self.next_node_id();
361 // Add a definition for the in-band const def.
362 self.create_def(parent_def_id, node_id, DefPathData::AnonConst);
364 let anon_const = AnonConst { id: node_id, value: arg };
365 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
371 // Add generic args to the last element of the path.
372 let last_segment = path.segments.last_mut().unwrap();
373 assert!(last_segment.args.is_none());
374 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs {
379 // Now lower everything as normal.
380 let f = self.lower_expr(&f);
381 hir::ExprKind::Call(f, self.lower_exprs(&real_args))
388 else_opt: Option<&Expr>,
389 ) -> hir::ExprKind<'hir> {
390 let lowered_cond = self.lower_expr(cond);
391 let new_cond = self.manage_let_cond(lowered_cond);
392 let then_expr = self.lower_block_expr(then);
393 if let Some(rslt) = else_opt {
394 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt)))
396 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), None)
400 // If `cond` kind is `let`, returns `let`. Otherwise, wraps and returns `cond`
401 // in a temporary block.
402 fn manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir> {
403 fn has_let_expr<'hir>(expr: &'hir hir::Expr<'hir>) -> bool {
405 hir::ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
406 hir::ExprKind::Let(..) => true,
410 if has_let_expr(cond) {
413 let reason = DesugaringKind::CondTemporary;
414 let span_block = self.mark_span_with_reason(reason, cond.span, None);
415 self.expr_drop_temps(span_block, cond, AttrVec::new())
419 // We desugar: `'label: while $cond $body` into:
423 // if { let _t = $cond; _t } {
432 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
433 // to preserve drop semantics since `while $cond { ... }` does not
434 // let temporaries live outside of `cond`.
435 fn lower_expr_while_in_loop_scope(
440 opt_label: Option<Label>,
441 ) -> hir::ExprKind<'hir> {
442 let lowered_cond = self.with_loop_condition_scope(|t| t.lower_expr(cond));
443 let new_cond = self.manage_let_cond(lowered_cond);
444 let then = self.lower_block_expr(body);
445 let expr_break = self.expr_break(span, AttrVec::new());
446 let stmt_break = self.stmt_expr(span, expr_break);
447 let else_blk = self.block_all(span, arena_vec![self; stmt_break], None);
448 let else_expr = self.arena.alloc(self.expr_block(else_blk, AttrVec::new()));
449 let if_kind = hir::ExprKind::If(new_cond, self.arena.alloc(then), Some(else_expr));
450 let if_expr = self.expr(span, if_kind, AttrVec::new());
451 let block = self.block_expr(self.arena.alloc(if_expr));
452 let span = self.lower_span(span.with_hi(cond.span.hi()));
453 let opt_label = self.lower_label(opt_label);
454 hir::ExprKind::Loop(block, opt_label, hir::LoopSource::While, span)
457 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_output(<expr>) }`,
458 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_output(()) }`
459 /// and save the block id to use it as a break target for desugaring of the `?` operator.
460 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
461 self.with_catch_scope(body.id, |this| {
462 let mut block = this.lower_block_noalloc(body, true);
464 // Final expression of the block (if present) or `()` with span at the end of block
465 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
467 this.mark_span_with_reason(
468 DesugaringKind::TryBlock,
470 this.allow_try_trait.clone(),
475 let try_span = this.mark_span_with_reason(
476 DesugaringKind::TryBlock,
477 this.tcx.sess.source_map().end_point(body.span),
478 this.allow_try_trait.clone(),
481 (try_span, this.expr_unit(try_span))
484 let ok_wrapped_span =
485 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
487 // `::std::ops::Try::from_output($tail_expr)`
488 block.expr = Some(this.wrap_in_try_constructor(
489 hir::LangItem::TryTraitFromOutput,
495 hir::ExprKind::Block(this.arena.alloc(block), None)
499 fn wrap_in_try_constructor(
501 lang_item: hir::LangItem,
503 expr: &'hir hir::Expr<'hir>,
505 ) -> &'hir hir::Expr<'hir> {
506 let constructor = self.arena.alloc(self.expr_lang_item_path(
512 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
515 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
516 let pat = self.lower_pat(&arm.pat);
517 let guard = arm.guard.as_ref().map(|cond| {
518 if let ExprKind::Let(ref pat, ref scrutinee, span) = cond.kind {
519 hir::Guard::IfLet(self.arena.alloc(hir::Let {
520 hir_id: self.next_id(),
521 span: self.lower_span(span),
522 pat: self.lower_pat(pat),
524 init: self.lower_expr(scrutinee),
527 hir::Guard::If(self.lower_expr(cond))
530 let hir_id = self.next_id();
531 self.lower_attrs(hir_id, &arm.attrs);
536 body: self.lower_expr(&arm.body),
537 span: self.lower_span(arm.span),
541 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
546 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
550 pub(super) fn make_async_expr(
552 capture_clause: CaptureBy,
553 closure_node_id: NodeId,
554 ret_ty: Option<AstP<Ty>>,
556 async_gen_kind: hir::AsyncGeneratorKind,
557 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
558 ) -> hir::ExprKind<'hir> {
561 Some(ty) => hir::FnRetTy::Return(self.lower_ty(
563 &mut ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock),
565 None => hir::FnRetTy::DefaultReturn(self.lower_span(span)),
568 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
569 // fully constrained by `future::from_generator`.
570 let input_ty = hir::Ty {
571 hir_id: self.next_id(),
572 kind: hir::TyKind::Infer,
573 span: self.lower_span(span),
576 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
577 let fn_decl = self.arena.alloc(hir::FnDecl {
578 inputs: arena_vec![self; input_ty],
581 implicit_self: hir::ImplicitSelfKind::None,
584 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
585 let (pat, task_context_hid) = self.pat_ident_binding_mode(
587 Ident::with_dummy_span(sym::_task_context),
588 hir::BindingAnnotation::MUT,
590 let param = hir::Param {
591 hir_id: self.next_id(),
593 ty_span: self.lower_span(span),
594 span: self.lower_span(span),
596 let params = arena_vec![self; param];
598 let body = self.lower_body(move |this| {
599 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
601 let old_ctx = this.task_context;
602 this.task_context = Some(task_context_hid);
603 let res = body(this);
604 this.task_context = old_ctx;
608 // `static |_task_context| -> <ret_ty> { body }`:
609 let generator_kind = {
610 let c = self.arena.alloc(hir::Closure {
611 binder: hir::ClosureBinder::Default,
613 bound_generic_params: &[],
616 fn_decl_span: self.lower_span(span),
617 movability: Some(hir::Movability::Static),
620 hir::ExprKind::Closure(c)
622 let generator = hir::Expr {
623 hir_id: self.lower_node_id(closure_node_id),
624 kind: generator_kind,
625 span: self.lower_span(span),
628 // `future::from_generator`:
630 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
631 let gen_future = self.expr_lang_item_path(
633 hir::LangItem::FromGenerator,
638 // `future::from_generator(generator)`:
639 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
642 /// Desugar `<expr>.await` into:
643 /// ```ignore (pseudo-rust)
644 /// match ::std::future::IntoFuture::into_future(<expr>) {
645 /// mut __awaitee => loop {
646 /// match unsafe { ::std::future::Future::poll(
647 /// <::std::pin::Pin>::new_unchecked(&mut __awaitee),
648 /// ::std::future::get_context(task_context),
650 /// ::std::task::Poll::Ready(result) => break result,
651 /// ::std::task::Poll::Pending => {}
653 /// task_context = yield ();
657 fn lower_expr_await(&mut self, dot_await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
658 let full_span = expr.span.to(dot_await_span);
659 match self.generator_kind {
660 Some(hir::GeneratorKind::Async(_)) => {}
661 Some(hir::GeneratorKind::Gen) | None => {
662 self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
664 item_span: self.current_item,
668 let span = self.mark_span_with_reason(DesugaringKind::Await, dot_await_span, None);
669 let gen_future_span = self.mark_span_with_reason(
670 DesugaringKind::Await,
672 self.allow_gen_future.clone(),
674 let expr = self.lower_expr_mut(expr);
675 let expr_hir_id = expr.hir_id;
677 // Note that the name of this binding must not be changed to something else because
678 // debuggers and debugger extensions expect it to be called `__awaitee`. They use
679 // this name to identify what is being awaited by a suspended async functions.
680 let awaitee_ident = Ident::with_dummy_span(sym::__awaitee);
681 let (awaitee_pat, awaitee_pat_hid) =
682 self.pat_ident_binding_mode(span, awaitee_ident, hir::BindingAnnotation::MUT);
684 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
687 // ::std::future::Future::poll(
688 // ::std::pin::Pin::new_unchecked(&mut __awaitee),
689 // ::std::future::get_context(task_context),
693 let awaitee = self.expr_ident(span, awaitee_ident, awaitee_pat_hid);
694 let ref_mut_awaitee = self.expr_mut_addr_of(span, awaitee);
695 let task_context = if let Some(task_context_hid) = self.task_context {
696 self.expr_ident_mut(span, task_context_ident, task_context_hid)
698 // Use of `await` outside of an async context, we cannot use `task_context` here.
701 let new_unchecked = self.expr_call_lang_item_fn_mut(
703 hir::LangItem::PinNewUnchecked,
704 arena_vec![self; ref_mut_awaitee],
707 let get_context = self.expr_call_lang_item_fn_mut(
709 hir::LangItem::GetContext,
710 arena_vec![self; task_context],
713 let call = self.expr_call_lang_item_fn(
715 hir::LangItem::FuturePoll,
716 arena_vec![self; new_unchecked, get_context],
719 self.arena.alloc(self.expr_unsafe(call))
722 // `::std::task::Poll::Ready(result) => break result`
723 let loop_node_id = self.next_node_id();
724 let loop_hir_id = self.lower_node_id(loop_node_id);
726 let x_ident = Ident::with_dummy_span(sym::result);
727 let (x_pat, x_pat_hid) = self.pat_ident(gen_future_span, x_ident);
728 let x_expr = self.expr_ident(gen_future_span, x_ident, x_pat_hid);
729 let ready_field = self.single_pat_field(gen_future_span, x_pat);
730 let ready_pat = self.pat_lang_item_variant(
732 hir::LangItem::PollReady,
736 let break_x = self.with_loop_scope(loop_node_id, move |this| {
738 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
739 this.arena.alloc(this.expr(gen_future_span, expr_break, AttrVec::new()))
741 self.arm(ready_pat, break_x)
744 // `::std::task::Poll::Pending => {}`
746 let pending_pat = self.pat_lang_item_variant(
748 hir::LangItem::PollPending,
752 let empty_block = self.expr_block_empty(span);
753 self.arm(pending_pat, empty_block)
756 let inner_match_stmt = {
757 let match_expr = self.expr_match(
760 arena_vec![self; ready_arm, pending_arm],
761 hir::MatchSource::AwaitDesugar,
763 self.stmt_expr(span, match_expr)
766 // task_context = yield ();
768 let unit = self.expr_unit(span);
769 let yield_expr = self.expr(
771 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr_hir_id) }),
774 let yield_expr = self.arena.alloc(yield_expr);
776 if let Some(task_context_hid) = self.task_context {
777 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
778 let assign = self.expr(
780 hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)),
783 self.stmt_expr(span, assign)
785 // Use of `await` outside of an async context. Return `yield_expr` so that we can
786 // proceed with type checking.
787 self.stmt(span, hir::StmtKind::Semi(yield_expr))
791 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
794 let loop_expr = self.arena.alloc(hir::Expr {
796 kind: hir::ExprKind::Loop(
799 hir::LoopSource::Loop,
800 self.lower_span(span),
802 span: self.lower_span(span),
805 // mut __awaitee => loop { ... }
806 let awaitee_arm = self.arm(awaitee_pat, loop_expr);
808 // `match ::std::future::IntoFuture::into_future(<expr>) { ... }`
809 let into_future_span = self.mark_span_with_reason(
810 DesugaringKind::Await,
812 self.allow_into_future.clone(),
814 let into_future_expr = self.expr_call_lang_item_fn(
816 hir::LangItem::IntoFutureIntoFuture,
817 arena_vec![self; expr],
821 // match <into_future_expr> {
822 // mut __awaitee => loop { .. }
824 hir::ExprKind::Match(
826 arena_vec![self; awaitee_arm],
827 hir::MatchSource::AwaitDesugar,
831 fn lower_expr_closure(
833 binder: &ClosureBinder,
834 capture_clause: CaptureBy,
836 movability: Movability,
840 ) -> hir::ExprKind<'hir> {
841 let (binder_clause, generic_params) = self.lower_closure_binder(binder);
843 let (body_id, generator_option) = self.with_new_scopes(move |this| {
844 let prev = this.current_item;
845 this.current_item = Some(fn_decl_span);
846 let mut generator_kind = None;
847 let body_id = this.lower_fn_body(decl, |this| {
848 let e = this.lower_expr_mut(body);
849 generator_kind = this.generator_kind;
852 let generator_option =
853 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
854 this.current_item = prev;
855 (body_id, generator_option)
858 self.lower_lifetime_binder(closure_id, generic_params, |lctx, bound_generic_params| {
859 // Lower outside new scope to preserve `is_in_loop_condition`.
860 let fn_decl = lctx.lower_fn_decl(decl, None, fn_decl_span, FnDeclKind::Closure, None);
862 let c = lctx.arena.alloc(hir::Closure {
863 binder: binder_clause,
865 bound_generic_params,
868 fn_decl_span: lctx.lower_span(fn_decl_span),
869 movability: generator_option,
872 hir::ExprKind::Closure(c)
876 fn generator_movability_for_fn(
880 generator_kind: Option<hir::GeneratorKind>,
881 movability: Movability,
882 ) -> Option<hir::Movability> {
883 match generator_kind {
884 Some(hir::GeneratorKind::Gen) => {
885 if decl.inputs.len() > 1 {
886 self.tcx.sess.emit_err(GeneratorTooManyParameters { fn_decl_span });
890 Some(hir::GeneratorKind::Async(_)) => {
891 panic!("non-`async` closure body turned `async` during lowering");
894 if movability == Movability::Static {
895 self.tcx.sess.emit_err(ClosureCannotBeStatic { fn_decl_span });
902 fn lower_closure_binder<'c>(
904 binder: &'c ClosureBinder,
905 ) -> (hir::ClosureBinder, &'c [GenericParam]) {
906 let (binder, params) = match binder {
907 ClosureBinder::NotPresent => (hir::ClosureBinder::Default, &[][..]),
908 &ClosureBinder::For { span, ref generic_params } => {
909 let span = self.lower_span(span);
910 (hir::ClosureBinder::For { span }, &**generic_params)
917 fn lower_expr_async_closure(
919 binder: &ClosureBinder,
920 capture_clause: CaptureBy,
922 inner_closure_id: NodeId,
926 ) -> hir::ExprKind<'hir> {
927 if let &ClosureBinder::For { span, .. } = binder {
928 self.tcx.sess.emit_err(NotSupportedForLifetimeBinderAsyncClosure { span });
931 let (binder_clause, generic_params) = self.lower_closure_binder(binder);
934 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
936 let body = self.with_new_scopes(|this| {
937 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
938 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
939 this.tcx.sess.emit_err(AsyncNonMoveClosureNotSupported { fn_decl_span });
942 // Transform `async |x: u8| -> X { ... }` into
943 // `|x: u8| future_from_generator(|| -> X { ... })`.
944 let body_id = this.lower_fn_body(&outer_decl, |this| {
946 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
947 let async_body = this.make_async_expr(
952 hir::AsyncGeneratorKind::Closure,
953 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
955 this.expr(fn_decl_span, async_body, AttrVec::new())
960 self.lower_lifetime_binder(closure_id, generic_params, |lctx, bound_generic_params| {
961 // We need to lower the declaration outside the new scope, because we
962 // have to conserve the state of being inside a loop condition for the
963 // closure argument types.
965 lctx.lower_fn_decl(&outer_decl, None, fn_decl_span, FnDeclKind::Closure, None);
967 let c = lctx.arena.alloc(hir::Closure {
968 binder: binder_clause,
970 bound_generic_params,
973 fn_decl_span: lctx.lower_span(fn_decl_span),
976 hir::ExprKind::Closure(c)
980 /// Destructure the LHS of complex assignments.
981 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
982 fn lower_expr_assign(
988 ) -> hir::ExprKind<'hir> {
989 // Return early in case of an ordinary assignment.
990 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
993 | ExprKind::Struct(..)
995 | ExprKind::Underscore => false,
996 // Check for tuple struct constructor.
997 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
998 ExprKind::Paren(e) => {
1000 // We special-case `(..)` for consistency with patterns.
1001 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
1002 _ => is_ordinary(lower_ctx, e),
1008 if is_ordinary(self, lhs) {
1009 return hir::ExprKind::Assign(
1010 self.lower_expr(lhs),
1011 self.lower_expr(rhs),
1012 self.lower_span(eq_sign_span),
1016 let mut assignments = vec![];
1018 // The LHS becomes a pattern: `(lhs1, lhs2)`.
1019 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
1020 let rhs = self.lower_expr(rhs);
1022 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
1023 let destructure_let = self.stmt_let_pat(
1028 hir::LocalSource::AssignDesugar(self.lower_span(eq_sign_span)),
1031 // `a = lhs1; b = lhs2;`.
1034 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
1036 // Wrap everything in a block.
1037 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
1040 /// If the given expression is a path to a tuple struct, returns that path.
1041 /// It is not a complete check, but just tries to reject most paths early
1042 /// if they are not tuple structs.
1043 /// Type checking will take care of the full validation later.
1044 fn extract_tuple_struct_path<'a>(
1047 ) -> Option<(&'a Option<QSelf>, &'a Path)> {
1048 if let ExprKind::Path(qself, path) = &expr.kind {
1049 // Does the path resolve to something disallowed in a tuple struct/variant pattern?
1050 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1051 if partial_res.unresolved_segments() == 0
1052 && !partial_res.base_res().expected_in_tuple_struct_pat()
1057 return Some((qself, path));
1062 /// If the given expression is a path to a unit struct, returns that path.
1063 /// It is not a complete check, but just tries to reject most paths early
1064 /// if they are not unit structs.
1065 /// Type checking will take care of the full validation later.
1066 fn extract_unit_struct_path<'a>(
1069 ) -> Option<(&'a Option<QSelf>, &'a Path)> {
1070 if let ExprKind::Path(qself, path) = &expr.kind {
1071 // Does the path resolve to something disallowed in a unit struct/variant pattern?
1072 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1073 if partial_res.unresolved_segments() == 0
1074 && !partial_res.base_res().expected_in_unit_struct_pat()
1079 return Some((qself, path));
1084 /// Convert the LHS of a destructuring assignment to a pattern.
1085 /// Each sub-assignment is recorded in `assignments`.
1086 fn destructure_assign(
1090 assignments: &mut Vec<hir::Stmt<'hir>>,
1091 ) -> &'hir hir::Pat<'hir> {
1092 self.arena.alloc(self.destructure_assign_mut(lhs, eq_sign_span, assignments))
1095 fn destructure_assign_mut(
1099 assignments: &mut Vec<hir::Stmt<'hir>>,
1100 ) -> hir::Pat<'hir> {
1102 // Underscore pattern.
1103 ExprKind::Underscore => {
1104 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
1107 ExprKind::Array(elements) => {
1109 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
1110 let slice_pat = if let Some((i, span)) = rest {
1111 let (before, after) = pats.split_at(i);
1112 hir::PatKind::Slice(
1114 Some(self.arena.alloc(self.pat_without_dbm(span, hir::PatKind::Wild))),
1118 hir::PatKind::Slice(pats, None, &[])
1120 return self.pat_without_dbm(lhs.span, slice_pat);
1123 ExprKind::Call(callee, args) => {
1124 if let Some((qself, path)) = self.extract_tuple_struct_path(callee) {
1125 let (pats, rest) = self.destructure_sequence(
1127 "tuple struct or variant",
1131 let qpath = self.lower_qpath(
1135 ParamMode::Optional,
1136 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1138 // Destructure like a tuple struct.
1139 let tuple_struct_pat = hir::PatKind::TupleStruct(
1142 hir::DotDotPos::new(rest.map(|r| r.0)),
1144 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1147 // Unit structs and enum variants.
1148 ExprKind::Path(..) => {
1149 if let Some((qself, path)) = self.extract_unit_struct_path(lhs) {
1150 let qpath = self.lower_qpath(
1154 ParamMode::Optional,
1155 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1157 // Destructure like a unit struct.
1158 let unit_struct_pat = hir::PatKind::Path(qpath);
1159 return self.pat_without_dbm(lhs.span, unit_struct_pat);
1163 ExprKind::Struct(se) => {
1164 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1165 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1167 hir_id: self.next_id(),
1168 ident: self.lower_ident(f.ident),
1170 is_shorthand: f.is_shorthand,
1171 span: self.lower_span(f.span),
1174 let qpath = self.lower_qpath(
1178 ParamMode::Optional,
1179 &mut ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1181 let fields_omitted = match &se.rest {
1182 StructRest::Base(e) => {
1183 self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignemnt {
1188 StructRest::Rest(_) => true,
1189 StructRest::None => false,
1191 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1192 return self.pat_without_dbm(lhs.span, struct_pat);
1195 ExprKind::Tup(elements) => {
1197 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1198 let tuple_pat = hir::PatKind::Tuple(pats, hir::DotDotPos::new(rest.map(|r| r.0)));
1199 return self.pat_without_dbm(lhs.span, tuple_pat);
1201 ExprKind::Paren(e) => {
1202 // We special-case `(..)` for consistency with patterns.
1203 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1204 let tuple_pat = hir::PatKind::Tuple(&[], hir::DotDotPos::new(Some(0)));
1205 return self.pat_without_dbm(lhs.span, tuple_pat);
1207 return self.destructure_assign_mut(e, eq_sign_span, assignments);
1212 // Treat all other cases as normal lvalue.
1213 let ident = Ident::new(sym::lhs, self.lower_span(lhs.span));
1214 let (pat, binding) = self.pat_ident_mut(lhs.span, ident);
1215 let ident = self.expr_ident(lhs.span, ident, binding);
1217 hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span));
1218 let expr = self.expr(lhs.span, assign, AttrVec::new());
1219 assignments.push(self.stmt_expr(lhs.span, expr));
1223 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1224 /// Such a sequence occurs in a tuple (struct)/slice.
1225 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1227 /// Each sub-assignment is recorded in `assignments`.
1228 fn destructure_sequence(
1230 elements: &[AstP<Expr>],
1233 assignments: &mut Vec<hir::Stmt<'hir>>,
1234 ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>) {
1235 let mut rest = None;
1237 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1238 // Check for `..` pattern.
1239 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1240 if let Some((_, prev_span)) = rest {
1241 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1243 rest = Some((i, e.span));
1247 Some(self.destructure_assign_mut(e, eq_sign_span, assignments))
1253 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1254 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1255 let e1 = self.lower_expr_mut(e1);
1256 let e2 = self.lower_expr_mut(e2);
1258 hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, self.lower_span(span), None);
1260 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), AttrVec::new()));
1261 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1264 fn lower_expr_range(
1270 ) -> hir::ExprKind<'hir> {
1271 use rustc_ast::RangeLimits::*;
1273 let lang_item = match (e1, e2, lims) {
1274 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1275 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1276 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1277 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1278 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1279 (Some(..), Some(..), Closed) => unreachable!(),
1280 (start, None, Closed) => {
1281 self.tcx.sess.emit_err(InclusiveRangeWithNoEnd { span });
1283 Some(..) => hir::LangItem::RangeFrom,
1284 None => hir::LangItem::RangeFull,
1289 let fields = self.arena.alloc_from_iter(
1290 e1.iter().map(|e| (sym::start, e)).chain(e2.iter().map(|e| (sym::end, e))).map(
1292 let expr = self.lower_expr(&e);
1293 let ident = Ident::new(s, self.lower_span(e.span));
1294 self.expr_field(ident, expr, e.span)
1299 hir::ExprKind::Struct(
1300 self.arena.alloc(hir::QPath::LangItem(lang_item, self.lower_span(span), None)),
1306 fn lower_label(&self, opt_label: Option<Label>) -> Option<Label> {
1307 let label = opt_label?;
1308 Some(Label { ident: self.lower_ident(label.ident) })
1311 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1312 let target_id = match destination {
1314 if let Some(loop_id) = self.resolver.get_label_res(id) {
1315 Ok(self.lower_node_id(loop_id))
1317 Err(hir::LoopIdError::UnresolvedLabel)
1322 .map(|id| Ok(self.lower_node_id(id)))
1323 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1325 let label = self.lower_label(destination.map(|(_, label)| label));
1326 hir::Destination { label, target_id }
1329 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1330 if self.is_in_loop_condition && opt_label.is_none() {
1333 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1336 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1340 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1341 let old_scope = self.catch_scope.replace(catch_id);
1342 let result = f(self);
1343 self.catch_scope = old_scope;
1347 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1348 // We're no longer in the base loop's condition; we're in another loop.
1349 let was_in_loop_condition = self.is_in_loop_condition;
1350 self.is_in_loop_condition = false;
1352 let old_scope = self.loop_scope.replace(loop_id);
1353 let result = f(self);
1354 self.loop_scope = old_scope;
1356 self.is_in_loop_condition = was_in_loop_condition;
1361 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1362 let was_in_loop_condition = self.is_in_loop_condition;
1363 self.is_in_loop_condition = true;
1365 let result = f(self);
1367 self.is_in_loop_condition = was_in_loop_condition;
1372 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1373 let hir_id = self.lower_node_id(f.id);
1374 self.lower_attrs(hir_id, &f.attrs);
1377 ident: self.lower_ident(f.ident),
1378 expr: self.lower_expr(&f.expr),
1379 span: self.lower_span(f.span),
1380 is_shorthand: f.is_shorthand,
1384 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1385 match self.generator_kind {
1386 Some(hir::GeneratorKind::Gen) => {}
1387 Some(hir::GeneratorKind::Async(_)) => {
1388 self.tcx.sess.emit_err(AsyncGeneratorsNotSupported { span });
1390 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1394 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1396 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1399 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1400 /// ```ignore (pseudo-rust)
1402 /// let result = match IntoIterator::into_iter(<head>) {
1404 /// [opt_ident]: loop {
1405 /// match Iterator::next(&mut iter) {
1407 /// Some(<pat>) => <body>,
1421 opt_label: Option<Label>,
1422 ) -> hir::Expr<'hir> {
1423 let head = self.lower_expr_mut(head);
1424 let pat = self.lower_pat(pat);
1426 self.mark_span_with_reason(DesugaringKind::ForLoop, self.lower_span(e.span), None);
1427 let head_span = self.mark_span_with_reason(DesugaringKind::ForLoop, head.span, None);
1428 let pat_span = self.mark_span_with_reason(DesugaringKind::ForLoop, pat.span, None);
1433 self.with_loop_scope(e.id, |this| this.expr_break_alloc(for_span, AttrVec::new()));
1434 let pat = self.pat_none(for_span);
1435 self.arm(pat, break_expr)
1438 // Some(<pat>) => <body>,
1440 let some_pat = self.pat_some(pat_span, pat);
1441 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1442 let body_expr = self.arena.alloc(self.expr_block(body_block, AttrVec::new()));
1443 self.arm(some_pat, body_expr)
1447 let iter = Ident::with_dummy_span(sym::iter);
1448 let (iter_pat, iter_pat_nid) =
1449 self.pat_ident_binding_mode(head_span, iter, hir::BindingAnnotation::MUT);
1451 // `match Iterator::next(&mut iter) { ... }`
1453 let iter = self.expr_ident(head_span, iter, iter_pat_nid);
1454 let ref_mut_iter = self.expr_mut_addr_of(head_span, iter);
1455 let next_expr = self.expr_call_lang_item_fn(
1457 hir::LangItem::IteratorNext,
1458 arena_vec![self; ref_mut_iter],
1461 let arms = arena_vec![self; none_arm, some_arm];
1463 self.expr_match(head_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1465 let match_stmt = self.stmt_expr(for_span, match_expr);
1467 let loop_block = self.block_all(for_span, arena_vec![self; match_stmt], None);
1469 // `[opt_ident]: loop { ... }`
1470 let kind = hir::ExprKind::Loop(
1472 self.lower_label(opt_label),
1473 hir::LoopSource::ForLoop,
1474 self.lower_span(for_span.with_hi(head.span.hi())),
1477 self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: for_span });
1479 // `mut iter => { ... }`
1480 let iter_arm = self.arm(iter_pat, loop_expr);
1482 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1483 let into_iter_expr = {
1484 self.expr_call_lang_item_fn(
1486 hir::LangItem::IntoIterIntoIter,
1487 arena_vec![self; head],
1492 let match_expr = self.arena.alloc(self.expr_match(
1495 arena_vec![self; iter_arm],
1496 hir::MatchSource::ForLoopDesugar,
1499 // This is effectively `{ let _result = ...; _result }`.
1500 // The construct was introduced in #21984 and is necessary to make sure that
1501 // temporaries in the `head` expression are dropped and do not leak to the
1502 // surrounding scope of the `match` since the `match` is not a terminating scope.
1504 // Also, add the attributes to the outer returned expr node.
1505 self.expr_drop_temps_mut(for_span, match_expr, e.attrs.clone())
1508 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1509 /// ```ignore (pseudo-rust)
1510 /// match Try::branch(<expr>) {
1511 /// ControlFlow::Continue(val) => #[allow(unreachable_code)] val,,
1512 /// ControlFlow::Break(residual) =>
1513 /// #[allow(unreachable_code)]
1514 /// // If there is an enclosing `try {...}`:
1515 /// break 'catch_target Try::from_residual(residual),
1517 /// return Try::from_residual(residual),
1520 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1521 let unstable_span = self.mark_span_with_reason(
1522 DesugaringKind::QuestionMark,
1524 self.allow_try_trait.clone(),
1526 let try_span = self.tcx.sess.source_map().end_point(span);
1527 let try_span = self.mark_span_with_reason(
1528 DesugaringKind::QuestionMark,
1530 self.allow_try_trait.clone(),
1533 // `Try::branch(<expr>)`
1536 let sub_expr = self.lower_expr_mut(sub_expr);
1538 self.expr_call_lang_item_fn(
1540 hir::LangItem::TryTraitBranch,
1541 arena_vec![self; sub_expr],
1546 // `#[allow(unreachable_code)]`
1548 // `allow(unreachable_code)`
1550 let allow_ident = Ident::new(sym::allow, self.lower_span(span));
1551 let uc_ident = Ident::new(sym::unreachable_code, self.lower_span(span));
1552 let uc_nested = attr::mk_nested_word_item(uc_ident);
1553 attr::mk_list_item(allow_ident, vec![uc_nested])
1555 attr::mk_attr_outer(&self.tcx.sess.parse_sess.attr_id_generator, allow)
1557 let attrs: AttrVec = thin_vec![attr];
1559 // `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`
1560 let continue_arm = {
1561 let val_ident = Ident::with_dummy_span(sym::val);
1562 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1563 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1569 let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
1570 self.arm(continue_pat, val_expr)
1573 // `ControlFlow::Break(residual) =>
1574 // #[allow(unreachable_code)]
1575 // return Try::from_residual(residual),`
1577 let residual_ident = Ident::with_dummy_span(sym::residual);
1578 let (residual_local, residual_local_nid) = self.pat_ident(try_span, residual_ident);
1579 let residual_expr = self.expr_ident_mut(try_span, residual_ident, residual_local_nid);
1580 let from_residual_expr = self.wrap_in_try_constructor(
1581 hir::LangItem::TryTraitFromResidual,
1583 self.arena.alloc(residual_expr),
1586 let ret_expr = if let Some(catch_node) = self.catch_scope {
1587 let target_id = Ok(self.lower_node_id(catch_node));
1588 self.arena.alloc(self.expr(
1590 hir::ExprKind::Break(
1591 hir::Destination { label: None, target_id },
1592 Some(from_residual_expr),
1597 self.arena.alloc(self.expr(
1599 hir::ExprKind::Ret(Some(from_residual_expr)),
1604 let break_pat = self.pat_cf_break(try_span, residual_local);
1605 self.arm(break_pat, ret_expr)
1608 hir::ExprKind::Match(
1610 arena_vec![self; break_arm, continue_arm],
1611 hir::MatchSource::TryDesugar,
1615 /// Desugar `ExprKind::Yeet` from: `do yeet <expr>` into:
1617 /// // If there is an enclosing `try {...}`:
1618 /// break 'catch_target FromResidual::from_residual(Yeet(residual)),
1620 /// return FromResidual::from_residual(Yeet(residual)),
1622 /// But to simplify this, there's a `from_yeet` lang item function which
1623 /// handles the combined `FromResidual::from_residual(Yeet(residual))`.
1624 fn lower_expr_yeet(&mut self, span: Span, sub_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1625 // The expression (if present) or `()` otherwise.
1626 let (yeeted_span, yeeted_expr) = if let Some(sub_expr) = sub_expr {
1627 (sub_expr.span, self.lower_expr(sub_expr))
1629 (self.mark_span_with_reason(DesugaringKind::YeetExpr, span, None), self.expr_unit(span))
1632 let unstable_span = self.mark_span_with_reason(
1633 DesugaringKind::YeetExpr,
1635 self.allow_try_trait.clone(),
1638 let from_yeet_expr = self.wrap_in_try_constructor(
1639 hir::LangItem::TryTraitFromYeet,
1645 if let Some(catch_node) = self.catch_scope {
1646 let target_id = Ok(self.lower_node_id(catch_node));
1647 hir::ExprKind::Break(hir::Destination { label: None, target_id }, Some(from_yeet_expr))
1649 hir::ExprKind::Ret(Some(from_yeet_expr))
1653 // =========================================================================
1654 // Helper methods for building HIR.
1655 // =========================================================================
1657 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1659 /// In terms of drop order, it has the same effect as wrapping `expr` in
1660 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1662 /// The drop order can be important in e.g. `if expr { .. }`.
1663 pub(super) fn expr_drop_temps(
1666 expr: &'hir hir::Expr<'hir>,
1668 ) -> &'hir hir::Expr<'hir> {
1669 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1672 pub(super) fn expr_drop_temps_mut(
1675 expr: &'hir hir::Expr<'hir>,
1677 ) -> hir::Expr<'hir> {
1678 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1684 arg: &'hir hir::Expr<'hir>,
1685 arms: &'hir [hir::Arm<'hir>],
1686 source: hir::MatchSource,
1687 ) -> hir::Expr<'hir> {
1688 self.expr(span, hir::ExprKind::Match(arg, arms, source), AttrVec::new())
1691 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> hir::Expr<'hir> {
1692 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1693 self.expr(span, expr_break, attrs)
1696 fn expr_break_alloc(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1697 let expr_break = self.expr_break(span, attrs);
1698 self.arena.alloc(expr_break)
1701 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1704 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1709 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1710 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), AttrVec::new()))
1716 e: &'hir hir::Expr<'hir>,
1717 args: &'hir [hir::Expr<'hir>],
1718 ) -> hir::Expr<'hir> {
1719 self.expr(span, hir::ExprKind::Call(e, args), AttrVec::new())
1725 e: &'hir hir::Expr<'hir>,
1726 args: &'hir [hir::Expr<'hir>],
1727 ) -> &'hir hir::Expr<'hir> {
1728 self.arena.alloc(self.expr_call_mut(span, e, args))
1731 fn expr_call_lang_item_fn_mut(
1734 lang_item: hir::LangItem,
1735 args: &'hir [hir::Expr<'hir>],
1736 hir_id: Option<hir::HirId>,
1737 ) -> hir::Expr<'hir> {
1739 self.arena.alloc(self.expr_lang_item_path(span, lang_item, AttrVec::new(), hir_id));
1740 self.expr_call_mut(span, path, args)
1743 fn expr_call_lang_item_fn(
1746 lang_item: hir::LangItem,
1747 args: &'hir [hir::Expr<'hir>],
1748 hir_id: Option<hir::HirId>,
1749 ) -> &'hir hir::Expr<'hir> {
1750 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args, hir_id))
1753 fn expr_lang_item_path(
1756 lang_item: hir::LangItem,
1758 hir_id: Option<hir::HirId>,
1759 ) -> hir::Expr<'hir> {
1762 hir::ExprKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span), hir_id)),
1767 pub(super) fn expr_ident(
1771 binding: hir::HirId,
1772 ) -> &'hir hir::Expr<'hir> {
1773 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1776 pub(super) fn expr_ident_mut(
1780 binding: hir::HirId,
1781 ) -> hir::Expr<'hir> {
1782 self.expr_ident_with_attrs(sp, ident, binding, AttrVec::new())
1785 fn expr_ident_with_attrs(
1789 binding: hir::HirId,
1791 ) -> hir::Expr<'hir> {
1792 let hir_id = self.next_id();
1793 let res = Res::Local(binding);
1794 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1796 self.arena.alloc(hir::Path {
1797 span: self.lower_span(span),
1799 segments: arena_vec![self; hir::PathSegment::new(ident, hir_id, res)],
1803 self.expr(span, expr_path, attrs)
1806 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1807 let hir_id = self.next_id();
1808 let span = expr.span;
1811 hir::ExprKind::Block(
1812 self.arena.alloc(hir::Block {
1816 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1817 span: self.lower_span(span),
1818 targeted_by_break: false,
1826 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1827 let blk = self.block_all(span, &[], None);
1828 let expr = self.expr_block(blk, AttrVec::new());
1829 self.arena.alloc(expr)
1832 pub(super) fn expr_block(
1834 b: &'hir hir::Block<'hir>,
1836 ) -> hir::Expr<'hir> {
1837 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
1843 kind: hir::ExprKind<'hir>,
1845 ) -> hir::Expr<'hir> {
1846 let hir_id = self.next_id();
1847 self.lower_attrs(hir_id, &attrs);
1848 hir::Expr { hir_id, kind, span: self.lower_span(span) }
1854 expr: &'hir hir::Expr<'hir>,
1856 ) -> hir::ExprField<'hir> {
1858 hir_id: self.next_id(),
1860 span: self.lower_span(span),
1862 is_shorthand: false,
1866 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
1868 hir_id: self.next_id(),
1871 span: self.lower_span(expr.span),