2 AsyncGeneratorsNotSupported, AsyncNonMoveClosureNotSupported, AwaitOnlyInAsyncFnAndBlocks,
3 BaseExpressionDoubleDot, ClosureCannotBeStatic, FunctionalRecordUpdateDestructuringAssignemnt,
4 GeneratorTooManyParameters, InclusiveRangeWithNoEnd, NotSupportedForLifetimeBinderAsyncClosure,
5 RustcBoxAttributeError, UnderscoreExprLhsAssign,
7 use super::ResolverAstLoweringExt;
8 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
9 use crate::{FnDeclKind, ImplTraitPosition};
11 use rustc_ast::ptr::P as AstP;
13 use rustc_data_structures::stack::ensure_sufficient_stack;
15 use rustc_hir::def::Res;
16 use rustc_hir::definitions::DefPathData;
17 use rustc_session::errors::report_lit_error;
18 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
19 use rustc_span::symbol::{sym, Ident, Symbol};
20 use rustc_span::DUMMY_SP;
21 use thin_vec::thin_vec;
23 impl<'hir> LoweringContext<'_, 'hir> {
24 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
25 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
28 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
29 self.arena.alloc(self.lower_expr_mut(e))
32 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
33 ensure_sufficient_stack(|| {
35 // Paranthesis expression does not have a HirId and is handled specially.
36 ExprKind::Paren(ex) => {
37 let mut ex = self.lower_expr_mut(ex);
38 // Include parens in span, but only if it is a super-span.
39 if e.span.contains(ex.span) {
40 ex.span = self.lower_span(e.span);
42 // Merge attributes into the inner expression.
43 if !e.attrs.is_empty() {
45 self.attrs.get(&ex.hir_id.local_id).map(|la| *la).unwrap_or(&[]);
48 &*self.arena.alloc_from_iter(
51 .map(|a| self.lower_attr(a))
52 .chain(old_attrs.iter().cloned()),
58 // Desugar `ExprForLoop`
59 // from: `[opt_ident]: for <pat> in <head> <body>`
61 // This also needs special handling because the HirId of the returned `hir::Expr` will not
62 // correspond to the `e.id`, so `lower_expr_for` handles attribute lowering itself.
63 ExprKind::ForLoop(pat, head, body, opt_label) => {
64 return self.lower_expr_for(e, pat, head, body, *opt_label);
69 let hir_id = self.lower_node_id(e.id);
70 self.lower_attrs(hir_id, &e.attrs);
72 let kind = match &e.kind {
73 ExprKind::Box(inner) => hir::ExprKind::Box(self.lower_expr(inner)),
74 ExprKind::Array(exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
75 ExprKind::ConstBlock(anon_const) => {
76 let anon_const = self.lower_anon_const(anon_const);
77 hir::ExprKind::ConstBlock(anon_const)
79 ExprKind::Repeat(expr, count) => {
80 let expr = self.lower_expr(expr);
81 let count = self.lower_array_length(count);
82 hir::ExprKind::Repeat(expr, count)
84 ExprKind::Tup(elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
85 ExprKind::Call(f, args) => {
86 if e.attrs.get(0).map_or(false, |a| a.has_name(sym::rustc_box)) {
87 if let [inner] = &args[..] && e.attrs.len() == 1 {
88 let kind = hir::ExprKind::Box(self.lower_expr(&inner));
89 return hir::Expr { hir_id, kind, span: self.lower_span(e.span) };
91 self.tcx.sess.emit_err(RustcBoxAttributeError { span: e.span });
94 } else if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
95 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
97 let f = self.lower_expr(f);
98 hir::ExprKind::Call(f, self.lower_exprs(args))
101 ExprKind::MethodCall(box MethodCall { seg, receiver, args, span }) => {
102 let hir_seg = self.arena.alloc(self.lower_path_segment(
106 ParenthesizedGenericArgs::Err,
107 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
109 let receiver = self.lower_expr(receiver);
111 self.arena.alloc_from_iter(args.iter().map(|x| self.lower_expr_mut(x)));
112 hir::ExprKind::MethodCall(hir_seg, receiver, args, self.lower_span(*span))
114 ExprKind::Binary(binop, lhs, rhs) => {
115 let binop = self.lower_binop(*binop);
116 let lhs = self.lower_expr(lhs);
117 let rhs = self.lower_expr(rhs);
118 hir::ExprKind::Binary(binop, lhs, rhs)
120 ExprKind::Unary(op, ohs) => {
121 let op = self.lower_unop(*op);
122 let ohs = self.lower_expr(ohs);
123 hir::ExprKind::Unary(op, ohs)
125 ExprKind::Lit(token_lit) => {
126 let lit_kind = match LitKind::from_token_lit(*token_lit) {
127 Ok(lit_kind) => lit_kind,
129 report_lit_error(&self.tcx.sess.parse_sess, err, *token_lit, e.span);
133 hir::ExprKind::Lit(respan(self.lower_span(e.span), lit_kind))
135 ExprKind::IncludedBytes(bytes) => hir::ExprKind::Lit(respan(
136 self.lower_span(e.span),
137 LitKind::ByteStr(bytes.clone(), StrStyle::Cooked),
139 ExprKind::Cast(expr, ty) => {
140 let expr = self.lower_expr(expr);
142 self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
143 hir::ExprKind::Cast(expr, ty)
145 ExprKind::Type(expr, ty) => {
146 let expr = self.lower_expr(expr);
148 self.lower_ty(ty, &ImplTraitContext::Disallowed(ImplTraitPosition::Type));
149 hir::ExprKind::Type(expr, ty)
151 ExprKind::AddrOf(k, m, ohs) => {
152 let ohs = self.lower_expr(ohs);
153 hir::ExprKind::AddrOf(*k, *m, ohs)
155 ExprKind::Let(pat, scrutinee, span) => {
156 hir::ExprKind::Let(self.arena.alloc(hir::Let {
157 hir_id: self.next_id(),
158 span: self.lower_span(*span),
159 pat: self.lower_pat(pat),
161 init: self.lower_expr(scrutinee),
164 ExprKind::If(cond, then, else_opt) => {
165 self.lower_expr_if(cond, then, else_opt.as_deref())
167 ExprKind::While(cond, body, opt_label) => self.with_loop_scope(e.id, |this| {
168 let span = this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
169 this.lower_expr_while_in_loop_scope(span, cond, body, *opt_label)
171 ExprKind::Loop(body, opt_label, span) => self.with_loop_scope(e.id, |this| {
173 this.lower_block(body, false),
174 this.lower_label(*opt_label),
175 hir::LoopSource::Loop,
176 this.lower_span(*span),
179 ExprKind::TryBlock(body) => self.lower_expr_try_block(body),
180 ExprKind::Match(expr, arms) => hir::ExprKind::Match(
181 self.lower_expr(expr),
182 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
183 hir::MatchSource::Normal,
185 ExprKind::Async(capture_clause, closure_node_id, block) => self.make_async_expr(
191 hir::AsyncGeneratorKind::Block,
192 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
194 ExprKind::Await(expr) => {
195 let dot_await_span = if expr.span.hi() < e.span.hi() {
196 let span_with_whitespace = self
200 .span_extend_while(expr.span, char::is_whitespace)
201 .unwrap_or(expr.span);
202 span_with_whitespace.shrink_to_hi().with_hi(e.span.hi())
204 // this is a recovered `await expr`
207 self.lower_expr_await(dot_await_span, expr)
209 ExprKind::Closure(box Closure {
220 if let Async::Yes { closure_id, .. } = asyncness {
221 self.lower_expr_async_closure(
233 self.lower_expr_closure(
246 ExprKind::Block(blk, opt_label) => {
247 let opt_label = self.lower_label(*opt_label);
248 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
250 ExprKind::Assign(el, er, span) => self.lower_expr_assign(el, er, *span, e.span),
251 ExprKind::AssignOp(op, el, er) => hir::ExprKind::AssignOp(
252 self.lower_binop(*op),
256 ExprKind::Field(el, ident) => {
257 hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(*ident))
259 ExprKind::Index(el, er) => {
260 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
262 ExprKind::Range(Some(e1), Some(e2), RangeLimits::Closed) => {
263 self.lower_expr_range_closed(e.span, e1, e2)
265 ExprKind::Range(e1, e2, lims) => {
266 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), *lims)
268 ExprKind::Underscore => {
269 self.tcx.sess.emit_err(UnderscoreExprLhsAssign { span: e.span });
272 ExprKind::Path(qself, path) => {
273 let qpath = self.lower_qpath(
278 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
280 hir::ExprKind::Path(qpath)
282 ExprKind::Break(opt_label, opt_expr) => {
283 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
284 hir::ExprKind::Break(self.lower_jump_destination(e.id, *opt_label), opt_expr)
286 ExprKind::Continue(opt_label) => {
287 hir::ExprKind::Continue(self.lower_jump_destination(e.id, *opt_label))
289 ExprKind::Ret(e) => {
290 let e = e.as_ref().map(|x| self.lower_expr(x));
291 hir::ExprKind::Ret(e)
293 ExprKind::Yeet(sub_expr) => self.lower_expr_yeet(e.span, sub_expr.as_deref()),
294 ExprKind::InlineAsm(asm) => {
295 hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
297 ExprKind::FormatArgs(fmt) => self.lower_format_args(e.span, fmt),
298 ExprKind::Struct(se) => {
299 let rest = match &se.rest {
300 StructRest::Base(e) => Some(self.lower_expr(e)),
301 StructRest::Rest(sp) => {
302 self.tcx.sess.emit_err(BaseExpressionDoubleDot { span: *sp });
303 Some(&*self.arena.alloc(self.expr_err(*sp)))
305 StructRest::None => None,
307 hir::ExprKind::Struct(
308 self.arena.alloc(self.lower_qpath(
313 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
316 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
320 ExprKind::Yield(opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
321 ExprKind::Err => hir::ExprKind::Err,
322 ExprKind::Try(sub_expr) => self.lower_expr_try(e.span, sub_expr),
324 ExprKind::Paren(_) | ExprKind::ForLoop(..) => unreachable!("already handled"),
326 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
329 hir::Expr { hir_id, kind, span: self.lower_span(e.span) }
333 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
335 UnOp::Deref => hir::UnOp::Deref,
336 UnOp::Not => hir::UnOp::Not,
337 UnOp::Neg => hir::UnOp::Neg,
341 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
344 BinOpKind::Add => hir::BinOpKind::Add,
345 BinOpKind::Sub => hir::BinOpKind::Sub,
346 BinOpKind::Mul => hir::BinOpKind::Mul,
347 BinOpKind::Div => hir::BinOpKind::Div,
348 BinOpKind::Rem => hir::BinOpKind::Rem,
349 BinOpKind::And => hir::BinOpKind::And,
350 BinOpKind::Or => hir::BinOpKind::Or,
351 BinOpKind::BitXor => hir::BinOpKind::BitXor,
352 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
353 BinOpKind::BitOr => hir::BinOpKind::BitOr,
354 BinOpKind::Shl => hir::BinOpKind::Shl,
355 BinOpKind::Shr => hir::BinOpKind::Shr,
356 BinOpKind::Eq => hir::BinOpKind::Eq,
357 BinOpKind::Lt => hir::BinOpKind::Lt,
358 BinOpKind::Le => hir::BinOpKind::Le,
359 BinOpKind::Ne => hir::BinOpKind::Ne,
360 BinOpKind::Ge => hir::BinOpKind::Ge,
361 BinOpKind::Gt => hir::BinOpKind::Gt,
363 span: self.lower_span(b.span),
367 fn lower_legacy_const_generics(
370 args: Vec<AstP<Expr>>,
371 legacy_args_idx: &[usize],
372 ) -> hir::ExprKind<'hir> {
373 let ExprKind::Path(None, path) = &mut f.kind else {
377 // Split the arguments into const generics and normal arguments
378 let mut real_args = vec![];
379 let mut generic_args = vec![];
380 for (idx, arg) in args.into_iter().enumerate() {
381 if legacy_args_idx.contains(&idx) {
382 let parent_def_id = self.current_hir_id_owner;
383 let node_id = self.next_node_id();
385 // Add a definition for the in-band const def.
386 self.create_def(parent_def_id.def_id, node_id, DefPathData::AnonConst, f.span);
388 let anon_const = AnonConst { id: node_id, value: arg };
389 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
395 // Add generic args to the last element of the path.
396 let last_segment = path.segments.last_mut().unwrap();
397 assert!(last_segment.args.is_none());
398 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs {
403 // Now lower everything as normal.
404 let f = self.lower_expr(&f);
405 hir::ExprKind::Call(f, self.lower_exprs(&real_args))
412 else_opt: Option<&Expr>,
413 ) -> hir::ExprKind<'hir> {
414 let lowered_cond = self.lower_cond(cond);
415 let then_expr = self.lower_block_expr(then);
416 if let Some(rslt) = else_opt {
419 self.arena.alloc(then_expr),
420 Some(self.lower_expr(rslt)),
423 hir::ExprKind::If(lowered_cond, self.arena.alloc(then_expr), None)
427 // Lowers a condition (i.e. `cond` in `if cond` or `while cond`), wrapping it in a terminating scope
428 // so that temporaries created in the condition don't live beyond it.
429 fn lower_cond(&mut self, cond: &Expr) -> &'hir hir::Expr<'hir> {
430 fn has_let_expr(expr: &Expr) -> bool {
432 ExprKind::Binary(_, lhs, rhs) => has_let_expr(lhs) || has_let_expr(rhs),
433 ExprKind::Let(..) => true,
438 // We have to take special care for `let` exprs in the condition, e.g. in
439 // `if let pat = val` or `if foo && let pat = val`, as we _do_ want `val` to live beyond the
440 // condition in this case.
442 // In order to mantain the drop behavior for the non `let` parts of the condition,
443 // we still wrap them in terminating scopes, e.g. `if foo && let pat = val` essentially
444 // gets transformed into `if { let _t = foo; _t } && let pat = val`
446 ExprKind::Binary(op @ Spanned { node: ast::BinOpKind::And, .. }, lhs, rhs)
447 if has_let_expr(cond) =>
449 let op = self.lower_binop(*op);
450 let lhs = self.lower_cond(lhs);
451 let rhs = self.lower_cond(rhs);
453 self.arena.alloc(self.expr(cond.span, hir::ExprKind::Binary(op, lhs, rhs)))
455 ExprKind::Let(..) => self.lower_expr(cond),
457 let cond = self.lower_expr(cond);
458 let reason = DesugaringKind::CondTemporary;
459 let span_block = self.mark_span_with_reason(reason, cond.span, None);
460 self.expr_drop_temps(span_block, cond)
465 // We desugar: `'label: while $cond $body` into:
469 // if { let _t = $cond; _t } {
478 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
479 // to preserve drop semantics since `while $cond { ... }` does not
480 // let temporaries live outside of `cond`.
481 fn lower_expr_while_in_loop_scope(
486 opt_label: Option<Label>,
487 ) -> hir::ExprKind<'hir> {
488 let lowered_cond = self.with_loop_condition_scope(|t| t.lower_cond(cond));
489 let then = self.lower_block_expr(body);
490 let expr_break = self.expr_break(span);
491 let stmt_break = self.stmt_expr(span, expr_break);
492 let else_blk = self.block_all(span, arena_vec![self; stmt_break], None);
493 let else_expr = self.arena.alloc(self.expr_block(else_blk));
494 let if_kind = hir::ExprKind::If(lowered_cond, self.arena.alloc(then), Some(else_expr));
495 let if_expr = self.expr(span, if_kind);
496 let block = self.block_expr(self.arena.alloc(if_expr));
497 let span = self.lower_span(span.with_hi(cond.span.hi()));
498 let opt_label = self.lower_label(opt_label);
499 hir::ExprKind::Loop(block, opt_label, hir::LoopSource::While, span)
502 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_output(<expr>) }`,
503 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_output(()) }`
504 /// and save the block id to use it as a break target for desugaring of the `?` operator.
505 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
506 self.with_catch_scope(body.id, |this| {
507 let mut block = this.lower_block_noalloc(body, true);
509 // Final expression of the block (if present) or `()` with span at the end of block
510 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
512 this.mark_span_with_reason(
513 DesugaringKind::TryBlock,
515 this.allow_try_trait.clone(),
520 let try_span = this.mark_span_with_reason(
521 DesugaringKind::TryBlock,
522 this.tcx.sess.source_map().end_point(body.span),
523 this.allow_try_trait.clone(),
526 (try_span, this.expr_unit(try_span))
529 let ok_wrapped_span =
530 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
532 // `::std::ops::Try::from_output($tail_expr)`
533 block.expr = Some(this.wrap_in_try_constructor(
534 hir::LangItem::TryTraitFromOutput,
540 hir::ExprKind::Block(this.arena.alloc(block), None)
544 fn wrap_in_try_constructor(
546 lang_item: hir::LangItem,
548 expr: &'hir hir::Expr<'hir>,
550 ) -> &'hir hir::Expr<'hir> {
551 let constructor = self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, None));
552 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
555 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
556 let pat = self.lower_pat(&arm.pat);
557 let guard = arm.guard.as_ref().map(|cond| {
558 if let ExprKind::Let(pat, scrutinee, span) = &cond.kind {
559 hir::Guard::IfLet(self.arena.alloc(hir::Let {
560 hir_id: self.next_id(),
561 span: self.lower_span(*span),
562 pat: self.lower_pat(pat),
564 init: self.lower_expr(scrutinee),
567 hir::Guard::If(self.lower_expr(cond))
570 let hir_id = self.next_id();
571 self.lower_attrs(hir_id, &arm.attrs);
576 body: self.lower_expr(&arm.body),
577 span: self.lower_span(arm.span),
581 /// Lower an `async` construct to a generator that implements `Future`.
586 /// std::future::identity_future(static move? |_task_context| -> <ret_ty> {
590 pub(super) fn make_async_expr(
592 capture_clause: CaptureBy,
593 outer_hir_id: hir::HirId,
594 closure_node_id: NodeId,
595 ret_ty: Option<hir::FnRetTy<'hir>>,
597 async_gen_kind: hir::AsyncGeneratorKind,
598 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
599 ) -> hir::ExprKind<'hir> {
600 let output = ret_ty.unwrap_or_else(|| hir::FnRetTy::DefaultReturn(self.lower_span(span)));
602 // Resume argument type: `ResumeTy`
604 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
605 let resume_ty = hir::QPath::LangItem(hir::LangItem::ResumeTy, unstable_span, None);
606 let input_ty = hir::Ty {
607 hir_id: self.next_id(),
608 kind: hir::TyKind::Path(resume_ty),
612 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
613 let fn_decl = self.arena.alloc(hir::FnDecl {
614 inputs: arena_vec![self; input_ty],
617 implicit_self: hir::ImplicitSelfKind::None,
618 lifetime_elision_allowed: false,
621 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
622 let (pat, task_context_hid) = self.pat_ident_binding_mode(
624 Ident::with_dummy_span(sym::_task_context),
625 hir::BindingAnnotation::MUT,
627 let param = hir::Param {
628 hir_id: self.next_id(),
630 ty_span: self.lower_span(span),
631 span: self.lower_span(span),
633 let params = arena_vec![self; param];
635 let body = self.lower_body(move |this| {
636 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
638 let old_ctx = this.task_context;
639 this.task_context = Some(task_context_hid);
640 let res = body(this);
641 this.task_context = old_ctx;
645 // `static |_task_context| -> <ret_ty> { body }`:
646 let generator_kind = {
647 let c = self.arena.alloc(hir::Closure {
648 def_id: self.local_def_id(closure_node_id),
649 binder: hir::ClosureBinder::Default,
651 bound_generic_params: &[],
654 fn_decl_span: self.lower_span(span),
656 movability: Some(hir::Movability::Static),
657 constness: hir::Constness::NotConst,
660 hir::ExprKind::Closure(c)
663 let hir_id = self.lower_node_id(closure_node_id);
665 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
667 if self.tcx.features().closure_track_caller
668 && let Some(attrs) = self.attrs.get(&outer_hir_id.local_id)
669 && attrs.into_iter().any(|attr| attr.has_name(sym::track_caller))
674 kind: AttrKind::Normal(ptr::P(NormalAttr {
676 path: Path::from_ident(Ident::new(sym::track_caller, span)),
677 args: AttrArgs::Empty,
682 id: self.tcx.sess.parse_sess.attr_id_generator.mk_attr_id(),
683 style: AttrStyle::Outer,
689 let generator = hir::Expr { hir_id, kind: generator_kind, span: self.lower_span(span) };
692 // For some reason, the async block needs to flow through *any*
693 // call (like the identity function), as otherwise type and lifetime
694 // inference have a hard time figuring things out.
695 // Without this, we would get:
696 // E0720 in tests/ui/impl-trait/in-trait/default-body-with-rpit.rs
697 // E0700 in tests/ui/self/self_lifetime-async.rs
699 // `future::identity_future`:
700 let identity_future =
701 self.expr_lang_item_path(unstable_span, hir::LangItem::IdentityFuture, None);
703 // `future::identity_future(generator)`:
704 hir::ExprKind::Call(self.arena.alloc(identity_future), arena_vec![self; generator])
707 /// Desugar `<expr>.await` into:
708 /// ```ignore (pseudo-rust)
709 /// match ::std::future::IntoFuture::into_future(<expr>) {
710 /// mut __awaitee => loop {
711 /// match unsafe { ::std::future::Future::poll(
712 /// <::std::pin::Pin>::new_unchecked(&mut __awaitee),
713 /// ::std::future::get_context(task_context),
715 /// ::std::task::Poll::Ready(result) => break result,
716 /// ::std::task::Poll::Pending => {}
718 /// task_context = yield ();
722 fn lower_expr_await(&mut self, dot_await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
723 let full_span = expr.span.to(dot_await_span);
724 match self.generator_kind {
725 Some(hir::GeneratorKind::Async(_)) => {}
726 Some(hir::GeneratorKind::Gen) | None => {
727 self.tcx.sess.emit_err(AwaitOnlyInAsyncFnAndBlocks {
729 item_span: self.current_item,
733 let span = self.mark_span_with_reason(DesugaringKind::Await, dot_await_span, None);
734 let gen_future_span = self.mark_span_with_reason(
735 DesugaringKind::Await,
737 self.allow_gen_future.clone(),
739 let expr = self.lower_expr_mut(expr);
740 let expr_hir_id = expr.hir_id;
742 // Note that the name of this binding must not be changed to something else because
743 // debuggers and debugger extensions expect it to be called `__awaitee`. They use
744 // this name to identify what is being awaited by a suspended async functions.
745 let awaitee_ident = Ident::with_dummy_span(sym::__awaitee);
746 let (awaitee_pat, awaitee_pat_hid) =
747 self.pat_ident_binding_mode(span, awaitee_ident, hir::BindingAnnotation::MUT);
749 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
752 // ::std::future::Future::poll(
753 // ::std::pin::Pin::new_unchecked(&mut __awaitee),
754 // ::std::future::get_context(task_context),
758 let awaitee = self.expr_ident(span, awaitee_ident, awaitee_pat_hid);
759 let ref_mut_awaitee = self.expr_mut_addr_of(span, awaitee);
760 let task_context = if let Some(task_context_hid) = self.task_context {
761 self.expr_ident_mut(span, task_context_ident, task_context_hid)
763 // Use of `await` outside of an async context, we cannot use `task_context` here.
766 let new_unchecked = self.expr_call_lang_item_fn_mut(
768 hir::LangItem::PinNewUnchecked,
769 arena_vec![self; ref_mut_awaitee],
772 let get_context = self.expr_call_lang_item_fn_mut(
774 hir::LangItem::GetContext,
775 arena_vec![self; task_context],
778 let call = self.expr_call_lang_item_fn(
780 hir::LangItem::FuturePoll,
781 arena_vec![self; new_unchecked, get_context],
784 self.arena.alloc(self.expr_unsafe(call))
787 // `::std::task::Poll::Ready(result) => break result`
788 let loop_node_id = self.next_node_id();
789 let loop_hir_id = self.lower_node_id(loop_node_id);
791 let x_ident = Ident::with_dummy_span(sym::result);
792 let (x_pat, x_pat_hid) = self.pat_ident(gen_future_span, x_ident);
793 let x_expr = self.expr_ident(gen_future_span, x_ident, x_pat_hid);
794 let ready_field = self.single_pat_field(gen_future_span, x_pat);
795 let ready_pat = self.pat_lang_item_variant(
797 hir::LangItem::PollReady,
801 let break_x = self.with_loop_scope(loop_node_id, move |this| {
803 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
804 this.arena.alloc(this.expr(gen_future_span, expr_break))
806 self.arm(ready_pat, break_x)
809 // `::std::task::Poll::Pending => {}`
811 let pending_pat = self.pat_lang_item_variant(
813 hir::LangItem::PollPending,
817 let empty_block = self.expr_block_empty(span);
818 self.arm(pending_pat, empty_block)
821 let inner_match_stmt = {
822 let match_expr = self.expr_match(
825 arena_vec![self; ready_arm, pending_arm],
826 hir::MatchSource::AwaitDesugar,
828 self.stmt_expr(span, match_expr)
831 // task_context = yield ();
833 let unit = self.expr_unit(span);
834 let yield_expr = self.expr(
836 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr_hir_id) }),
838 let yield_expr = self.arena.alloc(yield_expr);
840 if let Some(task_context_hid) = self.task_context {
841 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
843 self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)));
844 self.stmt_expr(span, assign)
846 // Use of `await` outside of an async context. Return `yield_expr` so that we can
847 // proceed with type checking.
848 self.stmt(span, hir::StmtKind::Semi(yield_expr))
852 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
855 let loop_expr = self.arena.alloc(hir::Expr {
857 kind: hir::ExprKind::Loop(
860 hir::LoopSource::Loop,
861 self.lower_span(span),
863 span: self.lower_span(span),
866 // mut __awaitee => loop { ... }
867 let awaitee_arm = self.arm(awaitee_pat, loop_expr);
869 // `match ::std::future::IntoFuture::into_future(<expr>) { ... }`
870 let into_future_span = self.mark_span_with_reason(
871 DesugaringKind::Await,
873 self.allow_into_future.clone(),
875 let into_future_expr = self.expr_call_lang_item_fn(
877 hir::LangItem::IntoFutureIntoFuture,
878 arena_vec![self; expr],
882 // match <into_future_expr> {
883 // mut __awaitee => loop { .. }
885 hir::ExprKind::Match(
887 arena_vec![self; awaitee_arm],
888 hir::MatchSource::AwaitDesugar,
892 fn lower_expr_closure(
894 binder: &ClosureBinder,
895 capture_clause: CaptureBy,
898 movability: Movability,
903 ) -> hir::ExprKind<'hir> {
904 let (binder_clause, generic_params) = self.lower_closure_binder(binder);
906 let (body_id, generator_option) = self.with_new_scopes(move |this| {
907 let prev = this.current_item;
908 this.current_item = Some(fn_decl_span);
909 let mut generator_kind = None;
910 let body_id = this.lower_fn_body(decl, |this| {
911 let e = this.lower_expr_mut(body);
912 generator_kind = this.generator_kind;
915 let generator_option =
916 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
917 this.current_item = prev;
918 (body_id, generator_option)
921 let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
922 // Lower outside new scope to preserve `is_in_loop_condition`.
923 let fn_decl = self.lower_fn_decl(decl, closure_id, fn_decl_span, FnDeclKind::Closure, None);
925 let c = self.arena.alloc(hir::Closure {
926 def_id: self.local_def_id(closure_id),
927 binder: binder_clause,
929 bound_generic_params,
932 fn_decl_span: self.lower_span(fn_decl_span),
933 fn_arg_span: Some(self.lower_span(fn_arg_span)),
934 movability: generator_option,
935 constness: self.lower_constness(constness),
938 hir::ExprKind::Closure(c)
941 fn generator_movability_for_fn(
945 generator_kind: Option<hir::GeneratorKind>,
946 movability: Movability,
947 ) -> Option<hir::Movability> {
948 match generator_kind {
949 Some(hir::GeneratorKind::Gen) => {
950 if decl.inputs.len() > 1 {
951 self.tcx.sess.emit_err(GeneratorTooManyParameters { fn_decl_span });
955 Some(hir::GeneratorKind::Async(_)) => {
956 panic!("non-`async` closure body turned `async` during lowering");
959 if movability == Movability::Static {
960 self.tcx.sess.emit_err(ClosureCannotBeStatic { fn_decl_span });
967 fn lower_closure_binder<'c>(
969 binder: &'c ClosureBinder,
970 ) -> (hir::ClosureBinder, &'c [GenericParam]) {
971 let (binder, params) = match binder {
972 ClosureBinder::NotPresent => (hir::ClosureBinder::Default, &[][..]),
973 ClosureBinder::For { span, generic_params } => {
974 let span = self.lower_span(*span);
975 (hir::ClosureBinder::For { span }, &**generic_params)
982 fn lower_expr_async_closure(
984 binder: &ClosureBinder,
985 capture_clause: CaptureBy,
987 closure_hir_id: hir::HirId,
988 inner_closure_id: NodeId,
993 ) -> hir::ExprKind<'hir> {
994 if let &ClosureBinder::For { span, .. } = binder {
995 self.tcx.sess.emit_err(NotSupportedForLifetimeBinderAsyncClosure { span });
998 let (binder_clause, generic_params) = self.lower_closure_binder(binder);
1001 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
1003 let body = self.with_new_scopes(|this| {
1004 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
1005 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
1006 this.tcx.sess.emit_err(AsyncNonMoveClosureNotSupported { fn_decl_span });
1009 // Transform `async |x: u8| -> X { ... }` into
1010 // `|x: u8| identity_future(|| -> X { ... })`.
1011 let body_id = this.lower_fn_body(&outer_decl, |this| {
1012 let async_ret_ty = if let FnRetTy::Ty(ty) = &decl.output {
1013 let itctx = ImplTraitContext::Disallowed(ImplTraitPosition::AsyncBlock);
1014 Some(hir::FnRetTy::Return(this.lower_ty(&ty, &itctx)))
1019 let async_body = this.make_async_expr(
1025 hir::AsyncGeneratorKind::Closure,
1026 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
1028 this.expr(fn_decl_span, async_body)
1033 let bound_generic_params = self.lower_lifetime_binder(closure_id, generic_params);
1034 // We need to lower the declaration outside the new scope, because we
1035 // have to conserve the state of being inside a loop condition for the
1036 // closure argument types.
1038 self.lower_fn_decl(&outer_decl, closure_id, fn_decl_span, FnDeclKind::Closure, None);
1040 let c = self.arena.alloc(hir::Closure {
1041 def_id: self.local_def_id(closure_id),
1042 binder: binder_clause,
1044 bound_generic_params,
1047 fn_decl_span: self.lower_span(fn_decl_span),
1048 fn_arg_span: Some(self.lower_span(fn_arg_span)),
1050 constness: hir::Constness::NotConst,
1052 hir::ExprKind::Closure(c)
1055 /// Destructure the LHS of complex assignments.
1056 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
1057 fn lower_expr_assign(
1063 ) -> hir::ExprKind<'hir> {
1064 // Return early in case of an ordinary assignment.
1065 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
1068 | ExprKind::Struct(..)
1070 | ExprKind::Underscore => false,
1071 // Check for tuple struct constructor.
1072 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
1073 ExprKind::Paren(e) => {
1075 // We special-case `(..)` for consistency with patterns.
1076 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
1077 _ => is_ordinary(lower_ctx, e),
1083 if is_ordinary(self, lhs) {
1084 return hir::ExprKind::Assign(
1085 self.lower_expr(lhs),
1086 self.lower_expr(rhs),
1087 self.lower_span(eq_sign_span),
1091 let mut assignments = vec![];
1093 // The LHS becomes a pattern: `(lhs1, lhs2)`.
1094 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
1095 let rhs = self.lower_expr(rhs);
1097 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
1098 let destructure_let = self.stmt_let_pat(
1103 hir::LocalSource::AssignDesugar(self.lower_span(eq_sign_span)),
1106 // `a = lhs1; b = lhs2;`.
1109 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
1111 // Wrap everything in a block.
1112 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
1115 /// If the given expression is a path to a tuple struct, returns that path.
1116 /// It is not a complete check, but just tries to reject most paths early
1117 /// if they are not tuple structs.
1118 /// Type checking will take care of the full validation later.
1119 fn extract_tuple_struct_path<'a>(
1122 ) -> Option<(&'a Option<AstP<QSelf>>, &'a Path)> {
1123 if let ExprKind::Path(qself, path) = &expr.kind {
1124 // Does the path resolve to something disallowed in a tuple struct/variant pattern?
1125 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1126 if let Some(res) = partial_res.full_res() && !res.expected_in_tuple_struct_pat() {
1130 return Some((qself, path));
1135 /// If the given expression is a path to a unit struct, returns that path.
1136 /// It is not a complete check, but just tries to reject most paths early
1137 /// if they are not unit structs.
1138 /// Type checking will take care of the full validation later.
1139 fn extract_unit_struct_path<'a>(
1142 ) -> Option<(&'a Option<AstP<QSelf>>, &'a Path)> {
1143 if let ExprKind::Path(qself, path) = &expr.kind {
1144 // Does the path resolve to something disallowed in a unit struct/variant pattern?
1145 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1146 if let Some(res) = partial_res.full_res() && !res.expected_in_unit_struct_pat() {
1150 return Some((qself, path));
1155 /// Convert the LHS of a destructuring assignment to a pattern.
1156 /// Each sub-assignment is recorded in `assignments`.
1157 fn destructure_assign(
1161 assignments: &mut Vec<hir::Stmt<'hir>>,
1162 ) -> &'hir hir::Pat<'hir> {
1163 self.arena.alloc(self.destructure_assign_mut(lhs, eq_sign_span, assignments))
1166 fn destructure_assign_mut(
1170 assignments: &mut Vec<hir::Stmt<'hir>>,
1171 ) -> hir::Pat<'hir> {
1173 // Underscore pattern.
1174 ExprKind::Underscore => {
1175 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
1178 ExprKind::Array(elements) => {
1180 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
1181 let slice_pat = if let Some((i, span)) = rest {
1182 let (before, after) = pats.split_at(i);
1183 hir::PatKind::Slice(
1185 Some(self.arena.alloc(self.pat_without_dbm(span, hir::PatKind::Wild))),
1189 hir::PatKind::Slice(pats, None, &[])
1191 return self.pat_without_dbm(lhs.span, slice_pat);
1194 ExprKind::Call(callee, args) => {
1195 if let Some((qself, path)) = self.extract_tuple_struct_path(callee) {
1196 let (pats, rest) = self.destructure_sequence(
1198 "tuple struct or variant",
1202 let qpath = self.lower_qpath(
1206 ParamMode::Optional,
1207 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1209 // Destructure like a tuple struct.
1210 let tuple_struct_pat = hir::PatKind::TupleStruct(
1213 hir::DotDotPos::new(rest.map(|r| r.0)),
1215 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1218 // Unit structs and enum variants.
1219 ExprKind::Path(..) => {
1220 if let Some((qself, path)) = self.extract_unit_struct_path(lhs) {
1221 let qpath = self.lower_qpath(
1225 ParamMode::Optional,
1226 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1228 // Destructure like a unit struct.
1229 let unit_struct_pat = hir::PatKind::Path(qpath);
1230 return self.pat_without_dbm(lhs.span, unit_struct_pat);
1234 ExprKind::Struct(se) => {
1235 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1236 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1238 hir_id: self.next_id(),
1239 ident: self.lower_ident(f.ident),
1241 is_shorthand: f.is_shorthand,
1242 span: self.lower_span(f.span),
1245 let qpath = self.lower_qpath(
1249 ParamMode::Optional,
1250 &ImplTraitContext::Disallowed(ImplTraitPosition::Path),
1252 let fields_omitted = match &se.rest {
1253 StructRest::Base(e) => {
1254 self.tcx.sess.emit_err(FunctionalRecordUpdateDestructuringAssignemnt {
1259 StructRest::Rest(_) => true,
1260 StructRest::None => false,
1262 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1263 return self.pat_without_dbm(lhs.span, struct_pat);
1266 ExprKind::Tup(elements) => {
1268 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1269 let tuple_pat = hir::PatKind::Tuple(pats, hir::DotDotPos::new(rest.map(|r| r.0)));
1270 return self.pat_without_dbm(lhs.span, tuple_pat);
1272 ExprKind::Paren(e) => {
1273 // We special-case `(..)` for consistency with patterns.
1274 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1275 let tuple_pat = hir::PatKind::Tuple(&[], hir::DotDotPos::new(Some(0)));
1276 return self.pat_without_dbm(lhs.span, tuple_pat);
1278 return self.destructure_assign_mut(e, eq_sign_span, assignments);
1283 // Treat all other cases as normal lvalue.
1284 let ident = Ident::new(sym::lhs, self.lower_span(lhs.span));
1285 let (pat, binding) = self.pat_ident_mut(lhs.span, ident);
1286 let ident = self.expr_ident(lhs.span, ident, binding);
1288 hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span));
1289 let expr = self.expr(lhs.span, assign);
1290 assignments.push(self.stmt_expr(lhs.span, expr));
1294 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1295 /// Such a sequence occurs in a tuple (struct)/slice.
1296 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1298 /// Each sub-assignment is recorded in `assignments`.
1299 fn destructure_sequence(
1301 elements: &[AstP<Expr>],
1304 assignments: &mut Vec<hir::Stmt<'hir>>,
1305 ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>) {
1306 let mut rest = None;
1308 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1309 // Check for `..` pattern.
1310 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1311 if let Some((_, prev_span)) = rest {
1312 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1314 rest = Some((i, e.span));
1318 Some(self.destructure_assign_mut(e, eq_sign_span, assignments))
1324 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1325 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1326 let e1 = self.lower_expr_mut(e1);
1327 let e2 = self.lower_expr_mut(e2);
1329 hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, self.lower_span(span), None);
1330 let fn_expr = self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path)));
1331 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1334 fn lower_expr_range(
1340 ) -> hir::ExprKind<'hir> {
1341 use rustc_ast::RangeLimits::*;
1343 let lang_item = match (e1, e2, lims) {
1344 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1345 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1346 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1347 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1348 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1349 (Some(..), Some(..), Closed) => unreachable!(),
1350 (start, None, Closed) => {
1351 self.tcx.sess.emit_err(InclusiveRangeWithNoEnd { span });
1353 Some(..) => hir::LangItem::RangeFrom,
1354 None => hir::LangItem::RangeFull,
1359 let fields = self.arena.alloc_from_iter(
1360 e1.iter().map(|e| (sym::start, e)).chain(e2.iter().map(|e| (sym::end, e))).map(
1362 let expr = self.lower_expr(&e);
1363 let ident = Ident::new(s, self.lower_span(e.span));
1364 self.expr_field(ident, expr, e.span)
1369 hir::ExprKind::Struct(
1370 self.arena.alloc(hir::QPath::LangItem(lang_item, self.lower_span(span), None)),
1376 fn lower_label(&self, opt_label: Option<Label>) -> Option<Label> {
1377 let label = opt_label?;
1378 Some(Label { ident: self.lower_ident(label.ident) })
1381 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1382 let target_id = match destination {
1384 if let Some(loop_id) = self.resolver.get_label_res(id) {
1385 Ok(self.lower_node_id(loop_id))
1387 Err(hir::LoopIdError::UnresolvedLabel)
1392 .map(|id| Ok(self.lower_node_id(id)))
1393 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1395 let label = self.lower_label(destination.map(|(_, label)| label));
1396 hir::Destination { label, target_id }
1399 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1400 if self.is_in_loop_condition && opt_label.is_none() {
1403 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1406 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1410 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1411 let old_scope = self.catch_scope.replace(catch_id);
1412 let result = f(self);
1413 self.catch_scope = old_scope;
1417 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1418 // We're no longer in the base loop's condition; we're in another loop.
1419 let was_in_loop_condition = self.is_in_loop_condition;
1420 self.is_in_loop_condition = false;
1422 let old_scope = self.loop_scope.replace(loop_id);
1423 let result = f(self);
1424 self.loop_scope = old_scope;
1426 self.is_in_loop_condition = was_in_loop_condition;
1431 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1432 let was_in_loop_condition = self.is_in_loop_condition;
1433 self.is_in_loop_condition = true;
1435 let result = f(self);
1437 self.is_in_loop_condition = was_in_loop_condition;
1442 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1443 let hir_id = self.lower_node_id(f.id);
1444 self.lower_attrs(hir_id, &f.attrs);
1447 ident: self.lower_ident(f.ident),
1448 expr: self.lower_expr(&f.expr),
1449 span: self.lower_span(f.span),
1450 is_shorthand: f.is_shorthand,
1454 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1455 match self.generator_kind {
1456 Some(hir::GeneratorKind::Gen) => {}
1457 Some(hir::GeneratorKind::Async(_)) => {
1458 self.tcx.sess.emit_err(AsyncGeneratorsNotSupported { span });
1460 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1464 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1466 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1469 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1470 /// ```ignore (pseudo-rust)
1472 /// let result = match IntoIterator::into_iter(<head>) {
1474 /// [opt_ident]: loop {
1475 /// match Iterator::next(&mut iter) {
1477 /// Some(<pat>) => <body>,
1491 opt_label: Option<Label>,
1492 ) -> hir::Expr<'hir> {
1493 let head = self.lower_expr_mut(head);
1494 let pat = self.lower_pat(pat);
1496 self.mark_span_with_reason(DesugaringKind::ForLoop, self.lower_span(e.span), None);
1497 let head_span = self.mark_span_with_reason(DesugaringKind::ForLoop, head.span, None);
1498 let pat_span = self.mark_span_with_reason(DesugaringKind::ForLoop, pat.span, None);
1502 let break_expr = self.with_loop_scope(e.id, |this| this.expr_break_alloc(for_span));
1503 let pat = self.pat_none(for_span);
1504 self.arm(pat, break_expr)
1507 // Some(<pat>) => <body>,
1509 let some_pat = self.pat_some(pat_span, pat);
1510 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1511 let body_expr = self.arena.alloc(self.expr_block(body_block));
1512 self.arm(some_pat, body_expr)
1516 let iter = Ident::with_dummy_span(sym::iter);
1517 let (iter_pat, iter_pat_nid) =
1518 self.pat_ident_binding_mode(head_span, iter, hir::BindingAnnotation::MUT);
1520 // `match Iterator::next(&mut iter) { ... }`
1522 let iter = self.expr_ident(head_span, iter, iter_pat_nid);
1523 let ref_mut_iter = self.expr_mut_addr_of(head_span, iter);
1524 let next_expr = self.expr_call_lang_item_fn(
1526 hir::LangItem::IteratorNext,
1527 arena_vec![self; ref_mut_iter],
1530 let arms = arena_vec![self; none_arm, some_arm];
1532 self.expr_match(head_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1534 let match_stmt = self.stmt_expr(for_span, match_expr);
1536 let loop_block = self.block_all(for_span, arena_vec![self; match_stmt], None);
1538 // `[opt_ident]: loop { ... }`
1539 let kind = hir::ExprKind::Loop(
1541 self.lower_label(opt_label),
1542 hir::LoopSource::ForLoop,
1543 self.lower_span(for_span.with_hi(head.span.hi())),
1546 self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: for_span });
1548 // `mut iter => { ... }`
1549 let iter_arm = self.arm(iter_pat, loop_expr);
1551 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1552 let into_iter_expr = {
1553 self.expr_call_lang_item_fn(
1555 hir::LangItem::IntoIterIntoIter,
1556 arena_vec![self; head],
1561 let match_expr = self.arena.alloc(self.expr_match(
1564 arena_vec![self; iter_arm],
1565 hir::MatchSource::ForLoopDesugar,
1568 // This is effectively `{ let _result = ...; _result }`.
1569 // The construct was introduced in #21984 and is necessary to make sure that
1570 // temporaries in the `head` expression are dropped and do not leak to the
1571 // surrounding scope of the `match` since the `match` is not a terminating scope.
1573 // Also, add the attributes to the outer returned expr node.
1574 let expr = self.expr_drop_temps_mut(for_span, match_expr);
1575 self.lower_attrs(expr.hir_id, &e.attrs);
1579 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1580 /// ```ignore (pseudo-rust)
1581 /// match Try::branch(<expr>) {
1582 /// ControlFlow::Continue(val) => #[allow(unreachable_code)] val,,
1583 /// ControlFlow::Break(residual) =>
1584 /// #[allow(unreachable_code)]
1585 /// // If there is an enclosing `try {...}`:
1586 /// break 'catch_target Try::from_residual(residual),
1588 /// return Try::from_residual(residual),
1591 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1592 let unstable_span = self.mark_span_with_reason(
1593 DesugaringKind::QuestionMark,
1595 self.allow_try_trait.clone(),
1597 let try_span = self.tcx.sess.source_map().end_point(span);
1598 let try_span = self.mark_span_with_reason(
1599 DesugaringKind::QuestionMark,
1601 self.allow_try_trait.clone(),
1604 // `Try::branch(<expr>)`
1607 let sub_expr = self.lower_expr_mut(sub_expr);
1609 self.expr_call_lang_item_fn(
1611 hir::LangItem::TryTraitBranch,
1612 arena_vec![self; sub_expr],
1617 // `#[allow(unreachable_code)]`
1618 let attr = attr::mk_attr_nested_word(
1619 &self.tcx.sess.parse_sess.attr_id_generator,
1622 sym::unreachable_code,
1623 self.lower_span(span),
1625 let attrs: AttrVec = thin_vec![attr];
1627 // `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`
1628 let continue_arm = {
1629 let val_ident = Ident::with_dummy_span(sym::val);
1630 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1631 let val_expr = self.expr_ident(span, val_ident, val_pat_nid);
1632 self.lower_attrs(val_expr.hir_id, &attrs);
1633 let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
1634 self.arm(continue_pat, val_expr)
1637 // `ControlFlow::Break(residual) =>
1638 // #[allow(unreachable_code)]
1639 // return Try::from_residual(residual),`
1641 let residual_ident = Ident::with_dummy_span(sym::residual);
1642 let (residual_local, residual_local_nid) = self.pat_ident(try_span, residual_ident);
1643 let residual_expr = self.expr_ident_mut(try_span, residual_ident, residual_local_nid);
1644 let from_residual_expr = self.wrap_in_try_constructor(
1645 hir::LangItem::TryTraitFromResidual,
1647 self.arena.alloc(residual_expr),
1650 let ret_expr = if let Some(catch_node) = self.catch_scope {
1651 let target_id = Ok(self.lower_node_id(catch_node));
1652 self.arena.alloc(self.expr(
1654 hir::ExprKind::Break(
1655 hir::Destination { label: None, target_id },
1656 Some(from_residual_expr),
1660 self.arena.alloc(self.expr(try_span, hir::ExprKind::Ret(Some(from_residual_expr))))
1662 self.lower_attrs(ret_expr.hir_id, &attrs);
1664 let break_pat = self.pat_cf_break(try_span, residual_local);
1665 self.arm(break_pat, ret_expr)
1668 hir::ExprKind::Match(
1670 arena_vec![self; break_arm, continue_arm],
1671 hir::MatchSource::TryDesugar,
1675 /// Desugar `ExprKind::Yeet` from: `do yeet <expr>` into:
1676 /// ```ignore(illustrative)
1677 /// // If there is an enclosing `try {...}`:
1678 /// break 'catch_target FromResidual::from_residual(Yeet(residual));
1680 /// return FromResidual::from_residual(Yeet(residual));
1682 /// But to simplify this, there's a `from_yeet` lang item function which
1683 /// handles the combined `FromResidual::from_residual(Yeet(residual))`.
1684 fn lower_expr_yeet(&mut self, span: Span, sub_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1685 // The expression (if present) or `()` otherwise.
1686 let (yeeted_span, yeeted_expr) = if let Some(sub_expr) = sub_expr {
1687 (sub_expr.span, self.lower_expr(sub_expr))
1689 (self.mark_span_with_reason(DesugaringKind::YeetExpr, span, None), self.expr_unit(span))
1692 let unstable_span = self.mark_span_with_reason(
1693 DesugaringKind::YeetExpr,
1695 self.allow_try_trait.clone(),
1698 let from_yeet_expr = self.wrap_in_try_constructor(
1699 hir::LangItem::TryTraitFromYeet,
1705 if let Some(catch_node) = self.catch_scope {
1706 let target_id = Ok(self.lower_node_id(catch_node));
1707 hir::ExprKind::Break(hir::Destination { label: None, target_id }, Some(from_yeet_expr))
1709 hir::ExprKind::Ret(Some(from_yeet_expr))
1713 // =========================================================================
1714 // Helper methods for building HIR.
1715 // =========================================================================
1717 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1719 /// In terms of drop order, it has the same effect as wrapping `expr` in
1720 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1722 /// The drop order can be important in e.g. `if expr { .. }`.
1723 pub(super) fn expr_drop_temps(
1726 expr: &'hir hir::Expr<'hir>,
1727 ) -> &'hir hir::Expr<'hir> {
1728 self.arena.alloc(self.expr_drop_temps_mut(span, expr))
1731 pub(super) fn expr_drop_temps_mut(
1734 expr: &'hir hir::Expr<'hir>,
1735 ) -> hir::Expr<'hir> {
1736 self.expr(span, hir::ExprKind::DropTemps(expr))
1739 pub(super) fn expr_match(
1742 arg: &'hir hir::Expr<'hir>,
1743 arms: &'hir [hir::Arm<'hir>],
1744 source: hir::MatchSource,
1745 ) -> hir::Expr<'hir> {
1746 self.expr(span, hir::ExprKind::Match(arg, arms, source))
1749 fn expr_break(&mut self, span: Span) -> hir::Expr<'hir> {
1750 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1751 self.expr(span, expr_break)
1754 fn expr_break_alloc(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1755 let expr_break = self.expr_break(span);
1756 self.arena.alloc(expr_break)
1759 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1760 self.expr(span, hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e))
1763 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1764 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[])))
1767 pub(super) fn expr_usize(&mut self, sp: Span, value: usize) -> hir::Expr<'hir> {
1770 hir::ExprKind::Lit(hir::Lit {
1772 node: ast::LitKind::Int(
1774 ast::LitIntType::Unsigned(ast::UintTy::Usize),
1780 pub(super) fn expr_u32(&mut self, sp: Span, value: u32) -> hir::Expr<'hir> {
1783 hir::ExprKind::Lit(hir::Lit {
1785 node: ast::LitKind::Int(value.into(), ast::LitIntType::Unsigned(ast::UintTy::U32)),
1790 pub(super) fn expr_char(&mut self, sp: Span, value: char) -> hir::Expr<'hir> {
1791 self.expr(sp, hir::ExprKind::Lit(hir::Lit { span: sp, node: ast::LitKind::Char(value) }))
1794 pub(super) fn expr_str(&mut self, sp: Span, value: Symbol) -> hir::Expr<'hir> {
1797 hir::ExprKind::Lit(hir::Lit {
1799 node: ast::LitKind::Str(value, ast::StrStyle::Cooked),
1804 pub(super) fn expr_call_mut(
1807 e: &'hir hir::Expr<'hir>,
1808 args: &'hir [hir::Expr<'hir>],
1809 ) -> hir::Expr<'hir> {
1810 self.expr(span, hir::ExprKind::Call(e, args))
1813 pub(super) fn expr_call(
1816 e: &'hir hir::Expr<'hir>,
1817 args: &'hir [hir::Expr<'hir>],
1818 ) -> &'hir hir::Expr<'hir> {
1819 self.arena.alloc(self.expr_call_mut(span, e, args))
1822 fn expr_call_lang_item_fn_mut(
1825 lang_item: hir::LangItem,
1826 args: &'hir [hir::Expr<'hir>],
1827 hir_id: Option<hir::HirId>,
1828 ) -> hir::Expr<'hir> {
1829 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, hir_id));
1830 self.expr_call_mut(span, path, args)
1833 fn expr_call_lang_item_fn(
1836 lang_item: hir::LangItem,
1837 args: &'hir [hir::Expr<'hir>],
1838 hir_id: Option<hir::HirId>,
1839 ) -> &'hir hir::Expr<'hir> {
1840 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args, hir_id))
1843 fn expr_lang_item_path(
1846 lang_item: hir::LangItem,
1847 hir_id: Option<hir::HirId>,
1848 ) -> hir::Expr<'hir> {
1851 hir::ExprKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span), hir_id)),
1855 /// `<LangItem>::name`
1856 pub(super) fn expr_lang_item_type_relative(
1859 lang_item: hir::LangItem,
1861 ) -> hir::Expr<'hir> {
1862 let path = hir::ExprKind::Path(hir::QPath::TypeRelative(
1863 self.arena.alloc(self.ty(
1865 hir::TyKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span), None)),
1867 self.arena.alloc(hir::PathSegment::new(
1868 Ident::new(name, span),
1873 self.expr(span, path)
1876 pub(super) fn expr_ident(
1880 binding: hir::HirId,
1881 ) -> &'hir hir::Expr<'hir> {
1882 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1885 pub(super) fn expr_ident_mut(
1889 binding: hir::HirId,
1890 ) -> hir::Expr<'hir> {
1891 let hir_id = self.next_id();
1892 let res = Res::Local(binding);
1893 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1895 self.arena.alloc(hir::Path {
1896 span: self.lower_span(span),
1898 segments: arena_vec![self; hir::PathSegment::new(ident, hir_id, res)],
1902 self.expr(span, expr_path)
1905 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1906 let hir_id = self.next_id();
1907 let span = expr.span;
1910 hir::ExprKind::Block(
1911 self.arena.alloc(hir::Block {
1915 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1916 span: self.lower_span(span),
1917 targeted_by_break: false,
1924 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1925 let blk = self.block_all(span, &[], None);
1926 let expr = self.expr_block(blk);
1927 self.arena.alloc(expr)
1930 pub(super) fn expr_block(&mut self, b: &'hir hir::Block<'hir>) -> hir::Expr<'hir> {
1931 self.expr(b.span, hir::ExprKind::Block(b, None))
1934 pub(super) fn expr_array_ref(
1937 elements: &'hir [hir::Expr<'hir>],
1938 ) -> hir::Expr<'hir> {
1939 let addrof = hir::ExprKind::AddrOf(
1940 hir::BorrowKind::Ref,
1941 hir::Mutability::Not,
1942 self.arena.alloc(self.expr(span, hir::ExprKind::Array(elements))),
1944 self.expr(span, addrof)
1947 pub(super) fn expr(&mut self, span: Span, kind: hir::ExprKind<'hir>) -> hir::Expr<'hir> {
1948 let hir_id = self.next_id();
1949 hir::Expr { hir_id, kind, span: self.lower_span(span) }
1952 pub(super) fn expr_field(
1955 expr: &'hir hir::Expr<'hir>,
1957 ) -> hir::ExprField<'hir> {
1959 hir_id: self.next_id(),
1961 span: self.lower_span(span),
1963 is_shorthand: false,
1969 pat: &'hir hir::Pat<'hir>,
1970 expr: &'hir hir::Expr<'hir>,
1971 ) -> hir::Arm<'hir> {
1973 hir_id: self.next_id(),
1976 span: self.lower_span(expr.span),