]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_ast_lowering/src/expr.rs
330776fc8c5980f8fdaee02cac5cfdb3a4c365e7
[rust.git] / compiler / rustc_ast_lowering / src / expr.rs
1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
2
3 use rustc_ast::attr;
4 use rustc_ast::ptr::P as AstP;
5 use rustc_ast::*;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
10 use rustc_hir as hir;
11 use rustc_hir::def::Res;
12 use rustc_session::parse::feature_err;
13 use rustc_span::hygiene::ForLoopLoc;
14 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
15 use rustc_span::symbol::{sym, Ident, Symbol};
16 use rustc_target::asm;
17 use std::collections::hash_map::Entry;
18 use std::fmt::Write;
19
20 impl<'hir> LoweringContext<'_, 'hir> {
21     fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
22         self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
23     }
24
25     pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
26         self.arena.alloc(self.lower_expr_mut(e))
27     }
28
29     pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
30         ensure_sufficient_stack(|| {
31             let kind = match e.kind {
32                 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
33                 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
34                 ExprKind::ConstBlock(ref anon_const) => {
35                     let anon_const = self.lower_anon_const(anon_const);
36                     hir::ExprKind::ConstBlock(anon_const)
37                 }
38                 ExprKind::Repeat(ref expr, ref count) => {
39                     let expr = self.lower_expr(expr);
40                     let count = self.lower_anon_const(count);
41                     hir::ExprKind::Repeat(expr, count)
42                 }
43                 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
44                 ExprKind::Call(ref f, ref args) => {
45                     let f = self.lower_expr(f);
46                     hir::ExprKind::Call(f, self.lower_exprs(args))
47                 }
48                 ExprKind::MethodCall(ref seg, ref args, span) => {
49                     let hir_seg = self.arena.alloc(self.lower_path_segment(
50                         e.span,
51                         seg,
52                         ParamMode::Optional,
53                         0,
54                         ParenthesizedGenericArgs::Err,
55                         ImplTraitContext::disallowed(),
56                         None,
57                     ));
58                     let args = self.lower_exprs(args);
59                     hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
60                 }
61                 ExprKind::Binary(binop, ref lhs, ref rhs) => {
62                     let binop = self.lower_binop(binop);
63                     let lhs = self.lower_expr(lhs);
64                     let rhs = self.lower_expr(rhs);
65                     hir::ExprKind::Binary(binop, lhs, rhs)
66                 }
67                 ExprKind::Unary(op, ref ohs) => {
68                     let op = self.lower_unop(op);
69                     let ohs = self.lower_expr(ohs);
70                     hir::ExprKind::Unary(op, ohs)
71                 }
72                 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
73                 ExprKind::Cast(ref expr, ref ty) => {
74                     let expr = self.lower_expr(expr);
75                     let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
76                     hir::ExprKind::Cast(expr, ty)
77                 }
78                 ExprKind::Type(ref expr, ref ty) => {
79                     let expr = self.lower_expr(expr);
80                     let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
81                     hir::ExprKind::Type(expr, ty)
82                 }
83                 ExprKind::AddrOf(k, m, ref ohs) => {
84                     let ohs = self.lower_expr(ohs);
85                     hir::ExprKind::AddrOf(k, m, ohs)
86                 }
87                 ExprKind::Let(ref pat, ref scrutinee) => {
88                     self.lower_expr_let(e.span, pat, scrutinee)
89                 }
90                 ExprKind::If(ref cond, ref then, ref else_opt) => {
91                     self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
92                 }
93                 ExprKind::While(ref cond, ref body, opt_label) => self
94                     .with_loop_scope(e.id, |this| {
95                         this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
96                     }),
97                 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
98                     hir::ExprKind::Loop(
99                         this.lower_block(body, false),
100                         opt_label,
101                         hir::LoopSource::Loop,
102                     )
103                 }),
104                 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
105                 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
106                     self.lower_expr(expr),
107                     self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
108                     hir::MatchSource::Normal,
109                 ),
110                 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
111                     .make_async_expr(
112                         capture_clause,
113                         closure_node_id,
114                         None,
115                         block.span,
116                         hir::AsyncGeneratorKind::Block,
117                         |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
118                     ),
119                 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
120                 ExprKind::Closure(
121                     capture_clause,
122                     asyncness,
123                     movability,
124                     ref decl,
125                     ref body,
126                     fn_decl_span,
127                 ) => {
128                     if let Async::Yes { closure_id, .. } = asyncness {
129                         self.lower_expr_async_closure(
130                             capture_clause,
131                             closure_id,
132                             decl,
133                             body,
134                             fn_decl_span,
135                         )
136                     } else {
137                         self.lower_expr_closure(
138                             capture_clause,
139                             movability,
140                             decl,
141                             body,
142                             fn_decl_span,
143                         )
144                     }
145                 }
146                 ExprKind::Block(ref blk, opt_label) => {
147                     hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
148                 }
149                 ExprKind::Assign(ref el, ref er, span) => {
150                     self.lower_expr_assign(el, er, span, e.span)
151                 }
152                 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
153                     self.lower_binop(op),
154                     self.lower_expr(el),
155                     self.lower_expr(er),
156                 ),
157                 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
158                 ExprKind::Index(ref el, ref er) => {
159                     hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
160                 }
161                 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
162                     self.lower_expr_range_closed(e.span, e1, e2)
163                 }
164                 ExprKind::Range(ref e1, ref e2, lims) => {
165                     self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
166                 }
167                 ExprKind::Path(ref qself, ref path) => {
168                     let qpath = self.lower_qpath(
169                         e.id,
170                         qself,
171                         path,
172                         ParamMode::Optional,
173                         ImplTraitContext::disallowed(),
174                     );
175                     hir::ExprKind::Path(qpath)
176                 }
177                 ExprKind::Break(opt_label, ref opt_expr) => {
178                     let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
179                     hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
180                 }
181                 ExprKind::Continue(opt_label) => {
182                     hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
183                 }
184                 ExprKind::Ret(ref e) => {
185                     let e = e.as_ref().map(|x| self.lower_expr(x));
186                     hir::ExprKind::Ret(e)
187                 }
188                 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
189                 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
190                 ExprKind::Struct(ref path, ref fields, ref rest) => {
191                     let rest = match rest {
192                         StructRest::Base(e) => Some(self.lower_expr(e)),
193                         StructRest::Rest(sp) => {
194                             self.sess
195                                 .struct_span_err(*sp, "base expression required after `..`")
196                                 .span_label(*sp, "add a base expression here")
197                                 .emit();
198                             Some(&*self.arena.alloc(self.expr_err(*sp)))
199                         }
200                         StructRest::None => None,
201                     };
202                     hir::ExprKind::Struct(
203                         self.arena.alloc(self.lower_qpath(
204                             e.id,
205                             &None,
206                             path,
207                             ParamMode::Optional,
208                             ImplTraitContext::disallowed(),
209                         )),
210                         self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
211                         rest,
212                     )
213                 }
214                 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
215                 ExprKind::Err => hir::ExprKind::Err,
216                 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
217                 ExprKind::Paren(ref ex) => {
218                     let mut ex = self.lower_expr_mut(ex);
219                     // Include parens in span, but only if it is a super-span.
220                     if e.span.contains(ex.span) {
221                         ex.span = e.span;
222                     }
223                     // Merge attributes into the inner expression.
224                     let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
225                     attrs.extend::<Vec<_>>(ex.attrs.into());
226                     ex.attrs = attrs.into();
227                     return ex;
228                 }
229
230                 // Desugar `ExprForLoop`
231                 // from: `[opt_ident]: for <pat> in <head> <body>`
232                 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
233                     return self.lower_expr_for(e, pat, head, body, opt_label);
234                 }
235                 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
236             };
237
238             hir::Expr {
239                 hir_id: self.lower_node_id(e.id),
240                 kind,
241                 span: e.span,
242                 attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
243             }
244         })
245     }
246
247     fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
248         match u {
249             UnOp::Deref => hir::UnOp::UnDeref,
250             UnOp::Not => hir::UnOp::UnNot,
251             UnOp::Neg => hir::UnOp::UnNeg,
252         }
253     }
254
255     fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
256         Spanned {
257             node: match b.node {
258                 BinOpKind::Add => hir::BinOpKind::Add,
259                 BinOpKind::Sub => hir::BinOpKind::Sub,
260                 BinOpKind::Mul => hir::BinOpKind::Mul,
261                 BinOpKind::Div => hir::BinOpKind::Div,
262                 BinOpKind::Rem => hir::BinOpKind::Rem,
263                 BinOpKind::And => hir::BinOpKind::And,
264                 BinOpKind::Or => hir::BinOpKind::Or,
265                 BinOpKind::BitXor => hir::BinOpKind::BitXor,
266                 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
267                 BinOpKind::BitOr => hir::BinOpKind::BitOr,
268                 BinOpKind::Shl => hir::BinOpKind::Shl,
269                 BinOpKind::Shr => hir::BinOpKind::Shr,
270                 BinOpKind::Eq => hir::BinOpKind::Eq,
271                 BinOpKind::Lt => hir::BinOpKind::Lt,
272                 BinOpKind::Le => hir::BinOpKind::Le,
273                 BinOpKind::Ne => hir::BinOpKind::Ne,
274                 BinOpKind::Ge => hir::BinOpKind::Ge,
275                 BinOpKind::Gt => hir::BinOpKind::Gt,
276             },
277             span: b.span,
278         }
279     }
280
281     /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
282     /// ```rust
283     /// match scrutinee { pats => true, _ => false }
284     /// ```
285     fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
286         // If we got here, the `let` expression is not allowed.
287
288         if self.sess.opts.unstable_features.is_nightly_build() {
289             self.sess
290                 .struct_span_err(span, "`let` expressions are not supported here")
291                 .note("only supported directly in conditions of `if`- and `while`-expressions")
292                 .note("as well as when nested within `&&` and parenthesis in those conditions")
293                 .emit();
294         } else {
295             self.sess
296                 .struct_span_err(span, "expected expression, found statement (`let`)")
297                 .note("variable declaration using `let` is a statement")
298                 .emit();
299         }
300
301         // For better recovery, we emit:
302         // ```
303         // match scrutinee { pat => true, _ => false }
304         // ```
305         // While this doesn't fully match the user's intent, it has key advantages:
306         // 1. We can avoid using `abort_if_errors`.
307         // 2. We can typeck both `pat` and `scrutinee`.
308         // 3. `pat` is allowed to be refutable.
309         // 4. The return type of the block is `bool` which seems like what the user wanted.
310         let scrutinee = self.lower_expr(scrutinee);
311         let then_arm = {
312             let pat = self.lower_pat(pat);
313             let expr = self.expr_bool(span, true);
314             self.arm(pat, expr)
315         };
316         let else_arm = {
317             let pat = self.pat_wild(span);
318             let expr = self.expr_bool(span, false);
319             self.arm(pat, expr)
320         };
321         hir::ExprKind::Match(
322             scrutinee,
323             arena_vec![self; then_arm, else_arm],
324             hir::MatchSource::Normal,
325         )
326     }
327
328     fn lower_expr_if(
329         &mut self,
330         span: Span,
331         cond: &Expr,
332         then: &Block,
333         else_opt: Option<&Expr>,
334     ) -> hir::ExprKind<'hir> {
335         // FIXME(#53667): handle lowering of && and parens.
336
337         // `_ => else_block` where `else_block` is `{}` if there's `None`:
338         let else_pat = self.pat_wild(span);
339         let (else_expr, contains_else_clause) = match else_opt {
340             None => (self.expr_block_empty(span), false),
341             Some(els) => (self.lower_expr(els), true),
342         };
343         let else_arm = self.arm(else_pat, else_expr);
344
345         // Handle then + scrutinee:
346         let then_expr = self.lower_block_expr(then);
347         let (then_pat, scrutinee, desugar) = match cond.kind {
348             // `<pat> => <then>`:
349             ExprKind::Let(ref pat, ref scrutinee) => {
350                 let scrutinee = self.lower_expr(scrutinee);
351                 let pat = self.lower_pat(pat);
352                 (pat, scrutinee, hir::MatchSource::IfLetDesugar { contains_else_clause })
353             }
354             // `true => <then>`:
355             _ => {
356                 // Lower condition:
357                 let cond = self.lower_expr(cond);
358                 let span_block =
359                     self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
360                 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
361                 // to preserve drop semantics since `if cond { ... }` does not
362                 // let temporaries live outside of `cond`.
363                 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
364                 let pat = self.pat_bool(span, true);
365                 (pat, cond, hir::MatchSource::IfDesugar { contains_else_clause })
366             }
367         };
368         let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
369
370         hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
371     }
372
373     fn lower_expr_while_in_loop_scope(
374         &mut self,
375         span: Span,
376         cond: &Expr,
377         body: &Block,
378         opt_label: Option<Label>,
379     ) -> hir::ExprKind<'hir> {
380         // FIXME(#53667): handle lowering of && and parens.
381
382         // Note that the block AND the condition are evaluated in the loop scope.
383         // This is done to allow `break` from inside the condition of the loop.
384
385         // `_ => break`:
386         let else_arm = {
387             let else_pat = self.pat_wild(span);
388             let else_expr = self.expr_break(span, ThinVec::new());
389             self.arm(else_pat, else_expr)
390         };
391
392         // Handle then + scrutinee:
393         let then_expr = self.lower_block_expr(body);
394         let (then_pat, scrutinee, desugar, source) = match cond.kind {
395             ExprKind::Let(ref pat, ref scrutinee) => {
396                 // to:
397                 //
398                 //   [opt_ident]: loop {
399                 //     match <sub_expr> {
400                 //       <pat> => <body>,
401                 //       _ => break
402                 //     }
403                 //   }
404                 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
405                 let pat = self.lower_pat(pat);
406                 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
407             }
408             _ => {
409                 // We desugar: `'label: while $cond $body` into:
410                 //
411                 // ```
412                 // 'label: loop {
413                 //     match drop-temps { $cond } {
414                 //         true => $body,
415                 //         _ => break,
416                 //     }
417                 // }
418                 // ```
419
420                 // Lower condition:
421                 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
422                 let span_block =
423                     self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
424                 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
425                 // to preserve drop semantics since `while cond { ... }` does not
426                 // let temporaries live outside of `cond`.
427                 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
428                 // `true => <then>`:
429                 let pat = self.pat_bool(span, true);
430                 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
431             }
432         };
433         let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
434
435         // `match <scrutinee> { ... }`
436         let match_expr =
437             self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
438
439         // `[opt_ident]: loop { ... }`
440         hir::ExprKind::Loop(self.block_expr(self.arena.alloc(match_expr)), opt_label, source)
441     }
442
443     /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
444     /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
445     /// and save the block id to use it as a break target for desugaring of the `?` operator.
446     fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
447         self.with_catch_scope(body.id, |this| {
448             let mut block = this.lower_block_noalloc(body, true);
449
450             // Final expression of the block (if present) or `()` with span at the end of block
451             let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
452                 (
453                     this.mark_span_with_reason(
454                         DesugaringKind::TryBlock,
455                         expr.span,
456                         this.allow_try_trait.clone(),
457                     ),
458                     expr,
459                 )
460             } else {
461                 let try_span = this.mark_span_with_reason(
462                     DesugaringKind::TryBlock,
463                     this.sess.source_map().end_point(body.span),
464                     this.allow_try_trait.clone(),
465                 );
466
467                 (try_span, this.expr_unit(try_span))
468             };
469
470             let ok_wrapped_span =
471                 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
472
473             // `::std::ops::Try::from_ok($tail_expr)`
474             block.expr = Some(this.wrap_in_try_constructor(
475                 hir::LangItem::TryFromOk,
476                 try_span,
477                 tail_expr,
478                 ok_wrapped_span,
479             ));
480
481             hir::ExprKind::Block(this.arena.alloc(block), None)
482         })
483     }
484
485     fn wrap_in_try_constructor(
486         &mut self,
487         lang_item: hir::LangItem,
488         method_span: Span,
489         expr: &'hir hir::Expr<'hir>,
490         overall_span: Span,
491     ) -> &'hir hir::Expr<'hir> {
492         let constructor =
493             self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
494         self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
495     }
496
497     fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
498         hir::Arm {
499             hir_id: self.next_id(),
500             attrs: self.lower_attrs(&arm.attrs),
501             pat: self.lower_pat(&arm.pat),
502             guard: match arm.guard {
503                 Some(ref x) => Some(hir::Guard::If(self.lower_expr(x))),
504                 _ => None,
505             },
506             body: self.lower_expr(&arm.body),
507             span: arm.span,
508         }
509     }
510
511     /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
512     ///
513     /// This results in:
514     ///
515     /// ```text
516     /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
517     ///     <body>
518     /// })
519     /// ```
520     pub(super) fn make_async_expr(
521         &mut self,
522         capture_clause: CaptureBy,
523         closure_node_id: NodeId,
524         ret_ty: Option<AstP<Ty>>,
525         span: Span,
526         async_gen_kind: hir::AsyncGeneratorKind,
527         body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
528     ) -> hir::ExprKind<'hir> {
529         let output = match ret_ty {
530             Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
531             None => hir::FnRetTy::DefaultReturn(span),
532         };
533
534         // Resume argument type. We let the compiler infer this to simplify the lowering. It is
535         // fully constrained by `future::from_generator`.
536         let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
537
538         // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
539         let decl = self.arena.alloc(hir::FnDecl {
540             inputs: arena_vec![self; input_ty],
541             output,
542             c_variadic: false,
543             implicit_self: hir::ImplicitSelfKind::None,
544         });
545
546         // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
547         let (pat, task_context_hid) = self.pat_ident_binding_mode(
548             span,
549             Ident::with_dummy_span(sym::_task_context),
550             hir::BindingAnnotation::Mutable,
551         );
552         let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, ty_span: span, span };
553         let params = arena_vec![self; param];
554
555         let body_id = self.lower_body(move |this| {
556             this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
557
558             let old_ctx = this.task_context;
559             this.task_context = Some(task_context_hid);
560             let res = body(this);
561             this.task_context = old_ctx;
562             (params, res)
563         });
564
565         // `static |_task_context| -> <ret_ty> { body }`:
566         let generator_kind = hir::ExprKind::Closure(
567             capture_clause,
568             decl,
569             body_id,
570             span,
571             Some(hir::Movability::Static),
572         );
573         let generator = hir::Expr {
574             hir_id: self.lower_node_id(closure_node_id),
575             kind: generator_kind,
576             span,
577             attrs: ThinVec::new(),
578         };
579
580         // `future::from_generator`:
581         let unstable_span =
582             self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
583         let gen_future =
584             self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
585
586         // `future::from_generator(generator)`:
587         hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
588     }
589
590     /// Desugar `<expr>.await` into:
591     /// ```rust
592     /// match <expr> {
593     ///     mut pinned => loop {
594     ///         match unsafe { ::std::future::Future::poll(
595     ///             <::std::pin::Pin>::new_unchecked(&mut pinned),
596     ///             ::std::future::get_context(task_context),
597     ///         ) } {
598     ///             ::std::task::Poll::Ready(result) => break result,
599     ///             ::std::task::Poll::Pending => {}
600     ///         }
601     ///         task_context = yield ();
602     ///     }
603     /// }
604     /// ```
605     fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
606         match self.generator_kind {
607             Some(hir::GeneratorKind::Async(_)) => {}
608             Some(hir::GeneratorKind::Gen) | None => {
609                 let mut err = struct_span_err!(
610                     self.sess,
611                     await_span,
612                     E0728,
613                     "`await` is only allowed inside `async` functions and blocks"
614                 );
615                 err.span_label(await_span, "only allowed inside `async` functions and blocks");
616                 if let Some(item_sp) = self.current_item {
617                     err.span_label(item_sp, "this is not `async`");
618                 }
619                 err.emit();
620             }
621         }
622         let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
623         let gen_future_span = self.mark_span_with_reason(
624             DesugaringKind::Await,
625             await_span,
626             self.allow_gen_future.clone(),
627         );
628         let expr = self.lower_expr(expr);
629
630         let pinned_ident = Ident::with_dummy_span(sym::pinned);
631         let (pinned_pat, pinned_pat_hid) =
632             self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
633
634         let task_context_ident = Ident::with_dummy_span(sym::_task_context);
635
636         // unsafe {
637         //     ::std::future::Future::poll(
638         //         ::std::pin::Pin::new_unchecked(&mut pinned),
639         //         ::std::future::get_context(task_context),
640         //     )
641         // }
642         let poll_expr = {
643             let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
644             let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
645             let task_context = if let Some(task_context_hid) = self.task_context {
646                 self.expr_ident_mut(span, task_context_ident, task_context_hid)
647             } else {
648                 // Use of `await` outside of an async context, we cannot use `task_context` here.
649                 self.expr_err(span)
650             };
651             let new_unchecked = self.expr_call_lang_item_fn_mut(
652                 span,
653                 hir::LangItem::PinNewUnchecked,
654                 arena_vec![self; ref_mut_pinned],
655             );
656             let get_context = self.expr_call_lang_item_fn_mut(
657                 gen_future_span,
658                 hir::LangItem::GetContext,
659                 arena_vec![self; task_context],
660             );
661             let call = self.expr_call_lang_item_fn(
662                 span,
663                 hir::LangItem::FuturePoll,
664                 arena_vec![self; new_unchecked, get_context],
665             );
666             self.arena.alloc(self.expr_unsafe(call))
667         };
668
669         // `::std::task::Poll::Ready(result) => break result`
670         let loop_node_id = self.resolver.next_node_id();
671         let loop_hir_id = self.lower_node_id(loop_node_id);
672         let ready_arm = {
673             let x_ident = Ident::with_dummy_span(sym::result);
674             let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
675             let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
676             let ready_field = self.single_pat_field(span, x_pat);
677             let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
678             let break_x = self.with_loop_scope(loop_node_id, move |this| {
679                 let expr_break =
680                     hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
681                 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
682             });
683             self.arm(ready_pat, break_x)
684         };
685
686         // `::std::task::Poll::Pending => {}`
687         let pending_arm = {
688             let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
689             let empty_block = self.expr_block_empty(span);
690             self.arm(pending_pat, empty_block)
691         };
692
693         let inner_match_stmt = {
694             let match_expr = self.expr_match(
695                 span,
696                 poll_expr,
697                 arena_vec![self; ready_arm, pending_arm],
698                 hir::MatchSource::AwaitDesugar,
699             );
700             self.stmt_expr(span, match_expr)
701         };
702
703         // task_context = yield ();
704         let yield_stmt = {
705             let unit = self.expr_unit(span);
706             let yield_expr = self.expr(
707                 span,
708                 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
709                 ThinVec::new(),
710             );
711             let yield_expr = self.arena.alloc(yield_expr);
712
713             if let Some(task_context_hid) = self.task_context {
714                 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
715                 let assign =
716                     self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
717                 self.stmt_expr(span, assign)
718             } else {
719                 // Use of `await` outside of an async context. Return `yield_expr` so that we can
720                 // proceed with type checking.
721                 self.stmt(span, hir::StmtKind::Semi(yield_expr))
722             }
723         };
724
725         let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
726
727         // loop { .. }
728         let loop_expr = self.arena.alloc(hir::Expr {
729             hir_id: loop_hir_id,
730             kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop),
731             span,
732             attrs: ThinVec::new(),
733         });
734
735         // mut pinned => loop { ... }
736         let pinned_arm = self.arm(pinned_pat, loop_expr);
737
738         // match <expr> {
739         //     mut pinned => loop { .. }
740         // }
741         hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
742     }
743
744     fn lower_expr_closure(
745         &mut self,
746         capture_clause: CaptureBy,
747         movability: Movability,
748         decl: &FnDecl,
749         body: &Expr,
750         fn_decl_span: Span,
751     ) -> hir::ExprKind<'hir> {
752         // Lower outside new scope to preserve `is_in_loop_condition`.
753         let fn_decl = self.lower_fn_decl(decl, None, false, None);
754
755         self.with_new_scopes(move |this| {
756             let prev = this.current_item;
757             this.current_item = Some(fn_decl_span);
758             let mut generator_kind = None;
759             let body_id = this.lower_fn_body(decl, |this| {
760                 let e = this.lower_expr_mut(body);
761                 generator_kind = this.generator_kind;
762                 e
763             });
764             let generator_option =
765                 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
766             this.current_item = prev;
767             hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
768         })
769     }
770
771     fn generator_movability_for_fn(
772         &mut self,
773         decl: &FnDecl,
774         fn_decl_span: Span,
775         generator_kind: Option<hir::GeneratorKind>,
776         movability: Movability,
777     ) -> Option<hir::Movability> {
778         match generator_kind {
779             Some(hir::GeneratorKind::Gen) => {
780                 if decl.inputs.len() > 1 {
781                     struct_span_err!(
782                         self.sess,
783                         fn_decl_span,
784                         E0628,
785                         "too many parameters for a generator (expected 0 or 1 parameters)"
786                     )
787                     .emit();
788                 }
789                 Some(movability)
790             }
791             Some(hir::GeneratorKind::Async(_)) => {
792                 panic!("non-`async` closure body turned `async` during lowering");
793             }
794             None => {
795                 if movability == Movability::Static {
796                     struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
797                         .emit();
798                 }
799                 None
800             }
801         }
802     }
803
804     fn lower_expr_async_closure(
805         &mut self,
806         capture_clause: CaptureBy,
807         closure_id: NodeId,
808         decl: &FnDecl,
809         body: &Expr,
810         fn_decl_span: Span,
811     ) -> hir::ExprKind<'hir> {
812         let outer_decl =
813             FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
814         // We need to lower the declaration outside the new scope, because we
815         // have to conserve the state of being inside a loop condition for the
816         // closure argument types.
817         let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
818
819         self.with_new_scopes(move |this| {
820             // FIXME(cramertj): allow `async` non-`move` closures with arguments.
821             if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
822                 struct_span_err!(
823                     this.sess,
824                     fn_decl_span,
825                     E0708,
826                     "`async` non-`move` closures with parameters are not currently supported",
827                 )
828                 .help(
829                     "consider using `let` statements to manually capture \
830                     variables by reference before entering an `async move` closure",
831                 )
832                 .emit();
833             }
834
835             // Transform `async |x: u8| -> X { ... }` into
836             // `|x: u8| future_from_generator(|| -> X { ... })`.
837             let body_id = this.lower_fn_body(&outer_decl, |this| {
838                 let async_ret_ty =
839                     if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
840                 let async_body = this.make_async_expr(
841                     capture_clause,
842                     closure_id,
843                     async_ret_ty,
844                     body.span,
845                     hir::AsyncGeneratorKind::Closure,
846                     |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
847                 );
848                 this.expr(fn_decl_span, async_body, ThinVec::new())
849             });
850             hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
851         })
852     }
853
854     /// Destructure the LHS of complex assignments.
855     /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
856     fn lower_expr_assign(
857         &mut self,
858         lhs: &Expr,
859         rhs: &Expr,
860         eq_sign_span: Span,
861         whole_span: Span,
862     ) -> hir::ExprKind<'hir> {
863         // Return early in case of an ordinary assignment.
864         fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
865             match &lhs.kind {
866                 ExprKind::Array(..) | ExprKind::Struct(..) | ExprKind::Tup(..) => false,
867                 // Check for tuple struct constructor.
868                 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
869                 ExprKind::Paren(e) => {
870                     match e.kind {
871                         // We special-case `(..)` for consistency with patterns.
872                         ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
873                         _ => is_ordinary(lower_ctx, e),
874                     }
875                 }
876                 _ => true,
877             }
878         }
879         if is_ordinary(self, lhs) {
880             return hir::ExprKind::Assign(self.lower_expr(lhs), self.lower_expr(rhs), eq_sign_span);
881         }
882         if !self.sess.features_untracked().destructuring_assignment {
883             feature_err(
884                 &self.sess.parse_sess,
885                 sym::destructuring_assignment,
886                 eq_sign_span,
887                 "destructuring assignments are unstable",
888             )
889             .span_label(lhs.span, "cannot assign to this expression")
890             .emit();
891         }
892
893         let mut assignments = vec![];
894
895         // The LHS becomes a pattern: `(lhs1, lhs2)`.
896         let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
897         let rhs = self.lower_expr(rhs);
898
899         // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
900         let destructure_let = self.stmt_let_pat(
901             ThinVec::new(),
902             whole_span,
903             Some(rhs),
904             pat,
905             hir::LocalSource::AssignDesugar(eq_sign_span),
906         );
907
908         // `a = lhs1; b = lhs2;`.
909         let stmts = self
910             .arena
911             .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
912
913         // Wrap everything in a block.
914         hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
915     }
916
917     /// If the given expression is a path to a tuple struct, returns that path.
918     /// It is not a complete check, but just tries to reject most paths early
919     /// if they are not tuple structs.
920     /// Type checking will take care of the full validation later.
921     fn extract_tuple_struct_path<'a>(&mut self, expr: &'a Expr) -> Option<&'a Path> {
922         // For tuple struct destructuring, it must be a non-qualified path (like in patterns).
923         if let ExprKind::Path(None, path) = &expr.kind {
924             // Does the path resolves to something disallowed in a tuple struct/variant pattern?
925             if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
926                 if partial_res.unresolved_segments() == 0
927                     && !partial_res.base_res().expected_in_tuple_struct_pat()
928                 {
929                     return None;
930                 }
931             }
932             return Some(path);
933         }
934         None
935     }
936
937     /// Convert the LHS of a destructuring assignment to a pattern.
938     /// Each sub-assignment is recorded in `assignments`.
939     fn destructure_assign(
940         &mut self,
941         lhs: &Expr,
942         eq_sign_span: Span,
943         assignments: &mut Vec<hir::Stmt<'hir>>,
944     ) -> &'hir hir::Pat<'hir> {
945         match &lhs.kind {
946             // Slice patterns.
947             ExprKind::Array(elements) => {
948                 let (pats, rest) =
949                     self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
950                 let slice_pat = if let Some((i, span)) = rest {
951                     let (before, after) = pats.split_at(i);
952                     hir::PatKind::Slice(
953                         before,
954                         Some(self.pat_without_dbm(span, hir::PatKind::Wild)),
955                         after,
956                     )
957                 } else {
958                     hir::PatKind::Slice(pats, None, &[])
959                 };
960                 return self.pat_without_dbm(lhs.span, slice_pat);
961             }
962             // Tuple structs.
963             ExprKind::Call(callee, args) => {
964                 if let Some(path) = self.extract_tuple_struct_path(callee) {
965                     let (pats, rest) = self.destructure_sequence(
966                         args,
967                         "tuple struct or variant",
968                         eq_sign_span,
969                         assignments,
970                     );
971                     let qpath = self.lower_qpath(
972                         callee.id,
973                         &None,
974                         path,
975                         ParamMode::Optional,
976                         ImplTraitContext::disallowed(),
977                     );
978                     // Destructure like a tuple struct.
979                     let tuple_struct_pat =
980                         hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
981                     return self.pat_without_dbm(lhs.span, tuple_struct_pat);
982                 }
983             }
984             // Structs.
985             ExprKind::Struct(path, fields, rest) => {
986                 let field_pats = self.arena.alloc_from_iter(fields.iter().map(|f| {
987                     let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
988                     hir::FieldPat {
989                         hir_id: self.next_id(),
990                         ident: f.ident,
991                         pat,
992                         is_shorthand: f.is_shorthand,
993                         span: f.span,
994                     }
995                 }));
996                 let qpath = self.lower_qpath(
997                     lhs.id,
998                     &None,
999                     path,
1000                     ParamMode::Optional,
1001                     ImplTraitContext::disallowed(),
1002                 );
1003                 let fields_omitted = match rest {
1004                     StructRest::Base(e) => {
1005                         self.sess
1006                             .struct_span_err(
1007                                 e.span,
1008                                 "functional record updates are not allowed in destructuring \
1009                                     assignments",
1010                             )
1011                             .span_suggestion(
1012                                 e.span,
1013                                 "consider removing the trailing pattern",
1014                                 String::new(),
1015                                 rustc_errors::Applicability::MachineApplicable,
1016                             )
1017                             .emit();
1018                         true
1019                     }
1020                     StructRest::Rest(_) => true,
1021                     StructRest::None => false,
1022                 };
1023                 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1024                 return self.pat_without_dbm(lhs.span, struct_pat);
1025             }
1026             // Tuples.
1027             ExprKind::Tup(elements) => {
1028                 let (pats, rest) =
1029                     self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1030                 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1031                 return self.pat_without_dbm(lhs.span, tuple_pat);
1032             }
1033             ExprKind::Paren(e) => {
1034                 // We special-case `(..)` for consistency with patterns.
1035                 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1036                     let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1037                     return self.pat_without_dbm(lhs.span, tuple_pat);
1038                 } else {
1039                     return self.destructure_assign(e, eq_sign_span, assignments);
1040                 }
1041             }
1042             _ => {}
1043         }
1044         // Treat all other cases as normal lvalue.
1045         let ident = Ident::new(sym::lhs, lhs.span);
1046         let (pat, binding) = self.pat_ident(lhs.span, ident);
1047         let ident = self.expr_ident(lhs.span, ident, binding);
1048         let assign = hir::ExprKind::Assign(self.lower_expr(lhs), ident, eq_sign_span);
1049         let expr = self.expr(lhs.span, assign, ThinVec::new());
1050         assignments.push(self.stmt_expr(lhs.span, expr));
1051         pat
1052     }
1053
1054     /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1055     /// Such a sequence occurs in a tuple (struct)/slice.
1056     /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1057     /// exists.
1058     /// Each sub-assignment is recorded in `assignments`.
1059     fn destructure_sequence(
1060         &mut self,
1061         elements: &[AstP<Expr>],
1062         ctx: &str,
1063         eq_sign_span: Span,
1064         assignments: &mut Vec<hir::Stmt<'hir>>,
1065     ) -> (&'hir [&'hir hir::Pat<'hir>], Option<(usize, Span)>) {
1066         let mut rest = None;
1067         let elements =
1068             self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1069                 // Check for `..` pattern.
1070                 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1071                     if let Some((_, prev_span)) = rest {
1072                         self.ban_extra_rest_pat(e.span, prev_span, ctx);
1073                     } else {
1074                         rest = Some((i, e.span));
1075                     }
1076                     None
1077                 } else {
1078                     Some(self.destructure_assign(e, eq_sign_span, assignments))
1079                 }
1080             }));
1081         (elements, rest)
1082     }
1083
1084     /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1085     fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1086         let e1 = self.lower_expr_mut(e1);
1087         let e2 = self.lower_expr_mut(e2);
1088         let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, span);
1089         let fn_expr =
1090             self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1091         hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1092     }
1093
1094     fn lower_expr_range(
1095         &mut self,
1096         span: Span,
1097         e1: Option<&Expr>,
1098         e2: Option<&Expr>,
1099         lims: RangeLimits,
1100     ) -> hir::ExprKind<'hir> {
1101         use rustc_ast::RangeLimits::*;
1102
1103         let lang_item = match (e1, e2, lims) {
1104             (None, None, HalfOpen) => hir::LangItem::RangeFull,
1105             (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1106             (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1107             (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1108             (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1109             (Some(..), Some(..), Closed) => unreachable!(),
1110             (_, None, Closed) => {
1111                 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
1112             }
1113         };
1114
1115         let fields = self.arena.alloc_from_iter(
1116             e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
1117                 let expr = self.lower_expr(&e);
1118                 let ident = Ident::new(Symbol::intern(s), e.span);
1119                 self.field(ident, expr, e.span)
1120             }),
1121         );
1122
1123         hir::ExprKind::Struct(self.arena.alloc(hir::QPath::LangItem(lang_item, span)), fields, None)
1124     }
1125
1126     fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1127         let target_id = match destination {
1128             Some((id, _)) => {
1129                 if let Some(loop_id) = self.resolver.get_label_res(id) {
1130                     Ok(self.lower_node_id(loop_id))
1131                 } else {
1132                     Err(hir::LoopIdError::UnresolvedLabel)
1133                 }
1134             }
1135             None => self
1136                 .loop_scopes
1137                 .last()
1138                 .cloned()
1139                 .map(|id| Ok(self.lower_node_id(id)))
1140                 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1141         };
1142         hir::Destination { label: destination.map(|(_, label)| label), target_id }
1143     }
1144
1145     fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1146         if self.is_in_loop_condition && opt_label.is_none() {
1147             hir::Destination {
1148                 label: None,
1149                 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1150             }
1151         } else {
1152             self.lower_loop_destination(opt_label.map(|label| (id, label)))
1153         }
1154     }
1155
1156     fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1157         let len = self.catch_scopes.len();
1158         self.catch_scopes.push(catch_id);
1159
1160         let result = f(self);
1161         assert_eq!(
1162             len + 1,
1163             self.catch_scopes.len(),
1164             "catch scopes should be added and removed in stack order"
1165         );
1166
1167         self.catch_scopes.pop().unwrap();
1168
1169         result
1170     }
1171
1172     fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1173         // We're no longer in the base loop's condition; we're in another loop.
1174         let was_in_loop_condition = self.is_in_loop_condition;
1175         self.is_in_loop_condition = false;
1176
1177         let len = self.loop_scopes.len();
1178         self.loop_scopes.push(loop_id);
1179
1180         let result = f(self);
1181         assert_eq!(
1182             len + 1,
1183             self.loop_scopes.len(),
1184             "loop scopes should be added and removed in stack order"
1185         );
1186
1187         self.loop_scopes.pop().unwrap();
1188
1189         self.is_in_loop_condition = was_in_loop_condition;
1190
1191         result
1192     }
1193
1194     fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1195         let was_in_loop_condition = self.is_in_loop_condition;
1196         self.is_in_loop_condition = true;
1197
1198         let result = f(self);
1199
1200         self.is_in_loop_condition = was_in_loop_condition;
1201
1202         result
1203     }
1204
1205     fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
1206         if self.sess.asm_arch.is_none() {
1207             struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
1208         }
1209         if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
1210             && !matches!(
1211                 self.sess.asm_arch,
1212                 Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)
1213             )
1214         {
1215             self.sess
1216                 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
1217                 .emit();
1218         }
1219
1220         // Lower operands to HIR, filter_map skips any operands with invalid
1221         // register classes.
1222         let sess = self.sess;
1223         let operands: Vec<_> = asm
1224             .operands
1225             .iter()
1226             .filter_map(|(op, op_sp)| {
1227                 let lower_reg = |reg| {
1228                     Some(match reg {
1229                         InlineAsmRegOrRegClass::Reg(s) => asm::InlineAsmRegOrRegClass::Reg(
1230                             asm::InlineAsmReg::parse(
1231                                 sess.asm_arch?,
1232                                 |feature| sess.target_features.contains(&Symbol::intern(feature)),
1233                                 &sess.target,
1234                                 s,
1235                             )
1236                             .map_err(|e| {
1237                                 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
1238                                 sess.struct_span_err(*op_sp, &msg).emit();
1239                             })
1240                             .ok()?,
1241                         ),
1242                         InlineAsmRegOrRegClass::RegClass(s) => {
1243                             asm::InlineAsmRegOrRegClass::RegClass(
1244                                 asm::InlineAsmRegClass::parse(sess.asm_arch?, s)
1245                                     .map_err(|e| {
1246                                         let msg = format!(
1247                                             "invalid register class `{}`: {}",
1248                                             s.as_str(),
1249                                             e
1250                                         );
1251                                         sess.struct_span_err(*op_sp, &msg).emit();
1252                                     })
1253                                     .ok()?,
1254                             )
1255                         }
1256                     })
1257                 };
1258
1259                 // lower_reg is executed last because we need to lower all
1260                 // sub-expressions even if we throw them away later.
1261                 let op = match *op {
1262                     InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1263                         expr: self.lower_expr_mut(expr),
1264                         reg: lower_reg(reg)?,
1265                     },
1266                     InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1267                         late,
1268                         expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1269                         reg: lower_reg(reg)?,
1270                     },
1271                     InlineAsmOperand::InOut { reg, late, ref expr } => {
1272                         hir::InlineAsmOperand::InOut {
1273                             late,
1274                             expr: self.lower_expr_mut(expr),
1275                             reg: lower_reg(reg)?,
1276                         }
1277                     }
1278                     InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1279                         hir::InlineAsmOperand::SplitInOut {
1280                             late,
1281                             in_expr: self.lower_expr_mut(in_expr),
1282                             out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1283                             reg: lower_reg(reg)?,
1284                         }
1285                     }
1286                     InlineAsmOperand::Const { ref expr } => {
1287                         hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1288                     }
1289                     InlineAsmOperand::Sym { ref expr } => {
1290                         hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1291                     }
1292                 };
1293                 Some(op)
1294             })
1295             .collect();
1296
1297         // Stop if there were any errors when lowering the register classes
1298         if operands.len() != asm.operands.len() || sess.asm_arch.is_none() {
1299             return hir::ExprKind::Err;
1300         }
1301
1302         // Validate template modifiers against the register classes for the operands
1303         let asm_arch = sess.asm_arch.unwrap();
1304         for p in &asm.template {
1305             if let InlineAsmTemplatePiece::Placeholder {
1306                 operand_idx,
1307                 modifier: Some(modifier),
1308                 span: placeholder_span,
1309             } = *p
1310             {
1311                 let op_sp = asm.operands[operand_idx].1;
1312                 match &operands[operand_idx] {
1313                     hir::InlineAsmOperand::In { reg, .. }
1314                     | hir::InlineAsmOperand::Out { reg, .. }
1315                     | hir::InlineAsmOperand::InOut { reg, .. }
1316                     | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1317                         let class = reg.reg_class();
1318                         let valid_modifiers = class.valid_modifiers(asm_arch);
1319                         if !valid_modifiers.contains(&modifier) {
1320                             let mut err = sess.struct_span_err(
1321                                 placeholder_span,
1322                                 "invalid asm template modifier for this register class",
1323                             );
1324                             err.span_label(placeholder_span, "template modifier");
1325                             err.span_label(op_sp, "argument");
1326                             if !valid_modifiers.is_empty() {
1327                                 let mut mods = format!("`{}`", valid_modifiers[0]);
1328                                 for m in &valid_modifiers[1..] {
1329                                     let _ = write!(mods, ", `{}`", m);
1330                                 }
1331                                 err.note(&format!(
1332                                     "the `{}` register class supports \
1333                                      the following template modifiers: {}",
1334                                     class.name(),
1335                                     mods
1336                                 ));
1337                             } else {
1338                                 err.note(&format!(
1339                                     "the `{}` register class does not support template modifiers",
1340                                     class.name()
1341                                 ));
1342                             }
1343                             err.emit();
1344                         }
1345                     }
1346                     hir::InlineAsmOperand::Const { .. } => {
1347                         let mut err = sess.struct_span_err(
1348                             placeholder_span,
1349                             "asm template modifiers are not allowed for `const` arguments",
1350                         );
1351                         err.span_label(placeholder_span, "template modifier");
1352                         err.span_label(op_sp, "argument");
1353                         err.emit();
1354                     }
1355                     hir::InlineAsmOperand::Sym { .. } => {
1356                         let mut err = sess.struct_span_err(
1357                             placeholder_span,
1358                             "asm template modifiers are not allowed for `sym` arguments",
1359                         );
1360                         err.span_label(placeholder_span, "template modifier");
1361                         err.span_label(op_sp, "argument");
1362                         err.emit();
1363                     }
1364                 }
1365             }
1366         }
1367
1368         let mut used_input_regs = FxHashMap::default();
1369         let mut used_output_regs = FxHashMap::default();
1370         for (idx, op) in operands.iter().enumerate() {
1371             let op_sp = asm.operands[idx].1;
1372             if let Some(reg) = op.reg() {
1373                 // Validate register classes against currently enabled target
1374                 // features. We check that at least one type is available for
1375                 // the current target.
1376                 let reg_class = reg.reg_class();
1377                 let mut required_features: Vec<&str> = vec![];
1378                 for &(_, feature) in reg_class.supported_types(asm_arch) {
1379                     if let Some(feature) = feature {
1380                         if self.sess.target_features.contains(&Symbol::intern(feature)) {
1381                             required_features.clear();
1382                             break;
1383                         } else {
1384                             required_features.push(feature);
1385                         }
1386                     } else {
1387                         required_features.clear();
1388                         break;
1389                     }
1390                 }
1391                 // We are sorting primitive strs here and can use unstable sort here
1392                 required_features.sort_unstable();
1393                 required_features.dedup();
1394                 match &required_features[..] {
1395                     [] => {}
1396                     [feature] => {
1397                         let msg = format!(
1398                             "register class `{}` requires the `{}` target feature",
1399                             reg_class.name(),
1400                             feature
1401                         );
1402                         sess.struct_span_err(op_sp, &msg).emit();
1403                     }
1404                     features => {
1405                         let msg = format!(
1406                             "register class `{}` requires at least one target feature: {}",
1407                             reg_class.name(),
1408                             features.join(", ")
1409                         );
1410                         sess.struct_span_err(op_sp, &msg).emit();
1411                     }
1412                 }
1413
1414                 // Check for conflicts between explicit register operands.
1415                 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1416                     let (input, output) = match op {
1417                         hir::InlineAsmOperand::In { .. } => (true, false),
1418                         // Late output do not conflict with inputs, but normal outputs do
1419                         hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1420                         hir::InlineAsmOperand::InOut { .. }
1421                         | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1422                         hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1423                             unreachable!()
1424                         }
1425                     };
1426
1427                     // Flag to output the error only once per operand
1428                     let mut skip = false;
1429                     reg.overlapping_regs(|r| {
1430                         let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1431                                          input| {
1432                             match used_regs.entry(r) {
1433                                 Entry::Occupied(o) => {
1434                                     if skip {
1435                                         return;
1436                                     }
1437                                     skip = true;
1438
1439                                     let idx2 = *o.get();
1440                                     let op2 = &operands[idx2];
1441                                     let op_sp2 = asm.operands[idx2].1;
1442                                     let reg2 = match op2.reg() {
1443                                         Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1444                                         _ => unreachable!(),
1445                                     };
1446
1447                                     let msg = format!(
1448                                         "register `{}` conflicts with register `{}`",
1449                                         reg.name(),
1450                                         reg2.name()
1451                                     );
1452                                     let mut err = sess.struct_span_err(op_sp, &msg);
1453                                     err.span_label(op_sp, &format!("register `{}`", reg.name()));
1454                                     err.span_label(op_sp2, &format!("register `{}`", reg2.name()));
1455
1456                                     match (op, op2) {
1457                                         (
1458                                             hir::InlineAsmOperand::In { .. },
1459                                             hir::InlineAsmOperand::Out { late, .. },
1460                                         )
1461                                         | (
1462                                             hir::InlineAsmOperand::Out { late, .. },
1463                                             hir::InlineAsmOperand::In { .. },
1464                                         ) => {
1465                                             assert!(!*late);
1466                                             let out_op_sp = if input { op_sp2 } else { op_sp };
1467                                             let msg = "use `lateout` instead of \
1468                                                     `out` to avoid conflict";
1469                                             err.span_help(out_op_sp, msg);
1470                                         }
1471                                         _ => {}
1472                                     }
1473
1474                                     err.emit();
1475                                 }
1476                                 Entry::Vacant(v) => {
1477                                     v.insert(idx);
1478                                 }
1479                             }
1480                         };
1481                         if input {
1482                             check(&mut used_input_regs, true);
1483                         }
1484                         if output {
1485                             check(&mut used_output_regs, false);
1486                         }
1487                     });
1488                 }
1489             }
1490         }
1491
1492         let operands = self.arena.alloc_from_iter(operands);
1493         let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1494         let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1495         let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1496         hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1497     }
1498
1499     fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1500         let inner = hir::LlvmInlineAsmInner {
1501             inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1502             outputs: asm
1503                 .outputs
1504                 .iter()
1505                 .map(|out| hir::LlvmInlineAsmOutput {
1506                     constraint: out.constraint,
1507                     is_rw: out.is_rw,
1508                     is_indirect: out.is_indirect,
1509                     span: out.expr.span,
1510                 })
1511                 .collect(),
1512             asm: asm.asm,
1513             asm_str_style: asm.asm_str_style,
1514             clobbers: asm.clobbers.clone(),
1515             volatile: asm.volatile,
1516             alignstack: asm.alignstack,
1517             dialect: asm.dialect,
1518         };
1519         let hir_asm = hir::LlvmInlineAsm {
1520             inner,
1521             inputs_exprs: self.arena.alloc_from_iter(
1522                 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1523             ),
1524             outputs_exprs: self
1525                 .arena
1526                 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1527         };
1528         hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1529     }
1530
1531     fn lower_field(&mut self, f: &Field) -> hir::Field<'hir> {
1532         hir::Field {
1533             hir_id: self.next_id(),
1534             ident: f.ident,
1535             expr: self.lower_expr(&f.expr),
1536             span: f.span,
1537             is_shorthand: f.is_shorthand,
1538         }
1539     }
1540
1541     fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1542         match self.generator_kind {
1543             Some(hir::GeneratorKind::Gen) => {}
1544             Some(hir::GeneratorKind::Async(_)) => {
1545                 struct_span_err!(
1546                     self.sess,
1547                     span,
1548                     E0727,
1549                     "`async` generators are not yet supported"
1550                 )
1551                 .emit();
1552             }
1553             None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1554         }
1555
1556         let expr =
1557             opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1558
1559         hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1560     }
1561
1562     /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1563     /// ```rust
1564     /// {
1565     ///     let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1566     ///         mut iter => {
1567     ///             [opt_ident]: loop {
1568     ///                 let mut __next;
1569     ///                 match ::std::iter::Iterator::next(&mut iter) {
1570     ///                     ::std::option::Option::Some(val) => __next = val,
1571     ///                     ::std::option::Option::None => break
1572     ///                 };
1573     ///                 let <pat> = __next;
1574     ///                 StmtKind::Expr(<body>);
1575     ///             }
1576     ///         }
1577     ///     };
1578     ///     result
1579     /// }
1580     /// ```
1581     fn lower_expr_for(
1582         &mut self,
1583         e: &Expr,
1584         pat: &Pat,
1585         head: &Expr,
1586         body: &Block,
1587         opt_label: Option<Label>,
1588     ) -> hir::Expr<'hir> {
1589         let orig_head_span = head.span;
1590         // expand <head>
1591         let mut head = self.lower_expr_mut(head);
1592         let desugared_span = self.mark_span_with_reason(
1593             DesugaringKind::ForLoop(ForLoopLoc::Head),
1594             orig_head_span,
1595             None,
1596         );
1597         head.span = desugared_span;
1598
1599         let iter = Ident::with_dummy_span(sym::iter);
1600
1601         let next_ident = Ident::with_dummy_span(sym::__next);
1602         let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1603             desugared_span,
1604             next_ident,
1605             hir::BindingAnnotation::Mutable,
1606         );
1607
1608         // `::std::option::Option::Some(val) => __next = val`
1609         let pat_arm = {
1610             let val_ident = Ident::with_dummy_span(sym::val);
1611             let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1612             let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1613             let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1614             let assign = self.arena.alloc(self.expr(
1615                 pat.span,
1616                 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1617                 ThinVec::new(),
1618             ));
1619             let some_pat = self.pat_some(pat.span, val_pat);
1620             self.arm(some_pat, assign)
1621         };
1622
1623         // `::std::option::Option::None => break`
1624         let break_arm = {
1625             let break_expr =
1626                 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1627             let pat = self.pat_none(e.span);
1628             self.arm(pat, break_expr)
1629         };
1630
1631         // `mut iter`
1632         let (iter_pat, iter_pat_nid) =
1633             self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1634
1635         // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1636         let match_expr = {
1637             let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1638             let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1639             let next_expr = self.expr_call_lang_item_fn(
1640                 desugared_span,
1641                 hir::LangItem::IteratorNext,
1642                 arena_vec![self; ref_mut_iter],
1643             );
1644             let arms = arena_vec![self; pat_arm, break_arm];
1645
1646             self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1647         };
1648         let match_stmt = self.stmt_expr(desugared_span, match_expr);
1649
1650         let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1651
1652         // `let mut __next`
1653         let next_let = self.stmt_let_pat(
1654             ThinVec::new(),
1655             desugared_span,
1656             None,
1657             next_pat,
1658             hir::LocalSource::ForLoopDesugar,
1659         );
1660
1661         // `let <pat> = __next`
1662         let pat = self.lower_pat(pat);
1663         let pat_let = self.stmt_let_pat(
1664             ThinVec::new(),
1665             desugared_span,
1666             Some(next_expr),
1667             pat,
1668             hir::LocalSource::ForLoopDesugar,
1669         );
1670
1671         let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1672         let body_expr = self.expr_block(body_block, ThinVec::new());
1673         let body_stmt = self.stmt_expr(body.span, body_expr);
1674
1675         let loop_block = self.block_all(
1676             e.span,
1677             arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1678             None,
1679         );
1680
1681         // `[opt_ident]: loop { ... }`
1682         let kind = hir::ExprKind::Loop(loop_block, opt_label, hir::LoopSource::ForLoop);
1683         let loop_expr = self.arena.alloc(hir::Expr {
1684             hir_id: self.lower_node_id(e.id),
1685             kind,
1686             span: e.span,
1687             attrs: ThinVec::new(),
1688         });
1689
1690         // `mut iter => { ... }`
1691         let iter_arm = self.arm(iter_pat, loop_expr);
1692
1693         let into_iter_span = self.mark_span_with_reason(
1694             DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1695             orig_head_span,
1696             None,
1697         );
1698
1699         // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1700         let into_iter_expr = {
1701             self.expr_call_lang_item_fn(
1702                 into_iter_span,
1703                 hir::LangItem::IntoIterIntoIter,
1704                 arena_vec![self; head],
1705             )
1706         };
1707
1708         let match_expr = self.arena.alloc(self.expr_match(
1709             desugared_span,
1710             into_iter_expr,
1711             arena_vec![self; iter_arm],
1712             hir::MatchSource::ForLoopDesugar,
1713         ));
1714
1715         let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1716
1717         // This is effectively `{ let _result = ...; _result }`.
1718         // The construct was introduced in #21984 and is necessary to make sure that
1719         // temporaries in the `head` expression are dropped and do not leak to the
1720         // surrounding scope of the `match` since the `match` is not a terminating scope.
1721         //
1722         // Also, add the attributes to the outer returned expr node.
1723         self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
1724     }
1725
1726     /// Desugar `ExprKind::Try` from: `<expr>?` into:
1727     /// ```rust
1728     /// match Try::into_result(<expr>) {
1729     ///     Ok(val) => #[allow(unreachable_code)] val,
1730     ///     Err(err) => #[allow(unreachable_code)]
1731     ///                 // If there is an enclosing `try {...}`:
1732     ///                 break 'catch_target Try::from_error(From::from(err)),
1733     ///                 // Otherwise:
1734     ///                 return Try::from_error(From::from(err)),
1735     /// }
1736     /// ```
1737     fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1738         let unstable_span = self.mark_span_with_reason(
1739             DesugaringKind::QuestionMark,
1740             span,
1741             self.allow_try_trait.clone(),
1742         );
1743         let try_span = self.sess.source_map().end_point(span);
1744         let try_span = self.mark_span_with_reason(
1745             DesugaringKind::QuestionMark,
1746             try_span,
1747             self.allow_try_trait.clone(),
1748         );
1749
1750         // `Try::into_result(<expr>)`
1751         let scrutinee = {
1752             // expand <expr>
1753             let sub_expr = self.lower_expr_mut(sub_expr);
1754
1755             self.expr_call_lang_item_fn(
1756                 unstable_span,
1757                 hir::LangItem::TryIntoResult,
1758                 arena_vec![self; sub_expr],
1759             )
1760         };
1761
1762         // `#[allow(unreachable_code)]`
1763         let attr = {
1764             // `allow(unreachable_code)`
1765             let allow = {
1766                 let allow_ident = Ident::new(sym::allow, span);
1767                 let uc_ident = Ident::new(sym::unreachable_code, span);
1768                 let uc_nested = attr::mk_nested_word_item(uc_ident);
1769                 attr::mk_list_item(allow_ident, vec![uc_nested])
1770             };
1771             attr::mk_attr_outer(allow)
1772         };
1773         let attrs = vec![attr];
1774
1775         // `Ok(val) => #[allow(unreachable_code)] val,`
1776         let ok_arm = {
1777             let val_ident = Ident::with_dummy_span(sym::val);
1778             let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1779             let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1780                 span,
1781                 val_ident,
1782                 val_pat_nid,
1783                 ThinVec::from(attrs.clone()),
1784             ));
1785             let ok_pat = self.pat_ok(span, val_pat);
1786             self.arm(ok_pat, val_expr)
1787         };
1788
1789         // `Err(err) => #[allow(unreachable_code)]
1790         //              return Try::from_error(From::from(err)),`
1791         let err_arm = {
1792             let err_ident = Ident::with_dummy_span(sym::err);
1793             let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1794             let from_expr = {
1795                 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1796                 self.expr_call_lang_item_fn(
1797                     try_span,
1798                     hir::LangItem::FromFrom,
1799                     arena_vec![self; err_expr],
1800                 )
1801             };
1802             let from_err_expr = self.wrap_in_try_constructor(
1803                 hir::LangItem::TryFromError,
1804                 unstable_span,
1805                 from_expr,
1806                 unstable_span,
1807             );
1808             let thin_attrs = ThinVec::from(attrs);
1809             let catch_scope = self.catch_scopes.last().copied();
1810             let ret_expr = if let Some(catch_node) = catch_scope {
1811                 let target_id = Ok(self.lower_node_id(catch_node));
1812                 self.arena.alloc(self.expr(
1813                     try_span,
1814                     hir::ExprKind::Break(
1815                         hir::Destination { label: None, target_id },
1816                         Some(from_err_expr),
1817                     ),
1818                     thin_attrs,
1819                 ))
1820             } else {
1821                 self.arena.alloc(self.expr(
1822                     try_span,
1823                     hir::ExprKind::Ret(Some(from_err_expr)),
1824                     thin_attrs,
1825                 ))
1826             };
1827
1828             let err_pat = self.pat_err(try_span, err_local);
1829             self.arm(err_pat, ret_expr)
1830         };
1831
1832         hir::ExprKind::Match(
1833             scrutinee,
1834             arena_vec![self; err_arm, ok_arm],
1835             hir::MatchSource::TryDesugar,
1836         )
1837     }
1838
1839     // =========================================================================
1840     // Helper methods for building HIR.
1841     // =========================================================================
1842
1843     /// Constructs a `true` or `false` literal expression.
1844     pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1845         let lit = Spanned { span, node: LitKind::Bool(val) };
1846         self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1847     }
1848
1849     /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1850     ///
1851     /// In terms of drop order, it has the same effect as wrapping `expr` in
1852     /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1853     ///
1854     /// The drop order can be important in e.g. `if expr { .. }`.
1855     pub(super) fn expr_drop_temps(
1856         &mut self,
1857         span: Span,
1858         expr: &'hir hir::Expr<'hir>,
1859         attrs: AttrVec,
1860     ) -> &'hir hir::Expr<'hir> {
1861         self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1862     }
1863
1864     pub(super) fn expr_drop_temps_mut(
1865         &mut self,
1866         span: Span,
1867         expr: &'hir hir::Expr<'hir>,
1868         attrs: AttrVec,
1869     ) -> hir::Expr<'hir> {
1870         self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1871     }
1872
1873     fn expr_match(
1874         &mut self,
1875         span: Span,
1876         arg: &'hir hir::Expr<'hir>,
1877         arms: &'hir [hir::Arm<'hir>],
1878         source: hir::MatchSource,
1879     ) -> hir::Expr<'hir> {
1880         self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1881     }
1882
1883     fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1884         let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1885         self.arena.alloc(self.expr(span, expr_break, attrs))
1886     }
1887
1888     fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1889         self.expr(
1890             span,
1891             hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1892             ThinVec::new(),
1893         )
1894     }
1895
1896     fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1897         self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1898     }
1899
1900     fn expr_call_mut(
1901         &mut self,
1902         span: Span,
1903         e: &'hir hir::Expr<'hir>,
1904         args: &'hir [hir::Expr<'hir>],
1905     ) -> hir::Expr<'hir> {
1906         self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1907     }
1908
1909     fn expr_call(
1910         &mut self,
1911         span: Span,
1912         e: &'hir hir::Expr<'hir>,
1913         args: &'hir [hir::Expr<'hir>],
1914     ) -> &'hir hir::Expr<'hir> {
1915         self.arena.alloc(self.expr_call_mut(span, e, args))
1916     }
1917
1918     fn expr_call_lang_item_fn_mut(
1919         &mut self,
1920         span: Span,
1921         lang_item: hir::LangItem,
1922         args: &'hir [hir::Expr<'hir>],
1923     ) -> hir::Expr<'hir> {
1924         let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
1925         self.expr_call_mut(span, path, args)
1926     }
1927
1928     fn expr_call_lang_item_fn(
1929         &mut self,
1930         span: Span,
1931         lang_item: hir::LangItem,
1932         args: &'hir [hir::Expr<'hir>],
1933     ) -> &'hir hir::Expr<'hir> {
1934         self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
1935     }
1936
1937     fn expr_lang_item_path(
1938         &mut self,
1939         span: Span,
1940         lang_item: hir::LangItem,
1941         attrs: AttrVec,
1942     ) -> hir::Expr<'hir> {
1943         self.expr(span, hir::ExprKind::Path(hir::QPath::LangItem(lang_item, span)), attrs)
1944     }
1945
1946     pub(super) fn expr_ident(
1947         &mut self,
1948         sp: Span,
1949         ident: Ident,
1950         binding: hir::HirId,
1951     ) -> &'hir hir::Expr<'hir> {
1952         self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1953     }
1954
1955     pub(super) fn expr_ident_mut(
1956         &mut self,
1957         sp: Span,
1958         ident: Ident,
1959         binding: hir::HirId,
1960     ) -> hir::Expr<'hir> {
1961         self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1962     }
1963
1964     fn expr_ident_with_attrs(
1965         &mut self,
1966         span: Span,
1967         ident: Ident,
1968         binding: hir::HirId,
1969         attrs: AttrVec,
1970     ) -> hir::Expr<'hir> {
1971         let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1972             None,
1973             self.arena.alloc(hir::Path {
1974                 span,
1975                 res: Res::Local(binding),
1976                 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
1977             }),
1978         ));
1979
1980         self.expr(span, expr_path, attrs)
1981     }
1982
1983     fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1984         let hir_id = self.next_id();
1985         let span = expr.span;
1986         self.expr(
1987             span,
1988             hir::ExprKind::Block(
1989                 self.arena.alloc(hir::Block {
1990                     stmts: &[],
1991                     expr: Some(expr),
1992                     hir_id,
1993                     rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1994                     span,
1995                     targeted_by_break: false,
1996                 }),
1997                 None,
1998             ),
1999             ThinVec::new(),
2000         )
2001     }
2002
2003     fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2004         let blk = self.block_all(span, &[], None);
2005         let expr = self.expr_block(blk, ThinVec::new());
2006         self.arena.alloc(expr)
2007     }
2008
2009     pub(super) fn expr_block(
2010         &mut self,
2011         b: &'hir hir::Block<'hir>,
2012         attrs: AttrVec,
2013     ) -> hir::Expr<'hir> {
2014         self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
2015     }
2016
2017     pub(super) fn expr(
2018         &mut self,
2019         span: Span,
2020         kind: hir::ExprKind<'hir>,
2021         attrs: AttrVec,
2022     ) -> hir::Expr<'hir> {
2023         hir::Expr { hir_id: self.next_id(), kind, span, attrs }
2024     }
2025
2026     fn field(&mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span) -> hir::Field<'hir> {
2027         hir::Field { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
2028     }
2029
2030     fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
2031         hir::Arm {
2032             hir_id: self.next_id(),
2033             attrs: &[],
2034             pat,
2035             guard: None,
2036             span: expr.span,
2037             body: expr,
2038         }
2039     }
2040 }