]> git.lizzy.rs Git - rust.git/blob - compiler/rustc_ast_lowering/src/expr.rs
Use `summary_opts()` in another spot
[rust.git] / compiler / rustc_ast_lowering / src / expr.rs
1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
2
3 use rustc_ast::attr;
4 use rustc_ast::ptr::P as AstP;
5 use rustc_ast::*;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
10 use rustc_hir as hir;
11 use rustc_hir::def::Res;
12 use rustc_session::parse::feature_err;
13 use rustc_span::hygiene::ForLoopLoc;
14 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
15 use rustc_span::symbol::{sym, Ident, Symbol};
16 use rustc_target::asm;
17 use std::collections::hash_map::Entry;
18 use std::fmt::Write;
19
20 impl<'hir> LoweringContext<'_, 'hir> {
21     fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
22         self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
23     }
24
25     pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
26         self.arena.alloc(self.lower_expr_mut(e))
27     }
28
29     pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
30         ensure_sufficient_stack(|| {
31             let kind = match e.kind {
32                 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
33                 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
34                 ExprKind::ConstBlock(ref anon_const) => {
35                     let anon_const = self.lower_anon_const(anon_const);
36                     hir::ExprKind::ConstBlock(anon_const)
37                 }
38                 ExprKind::Repeat(ref expr, ref count) => {
39                     let expr = self.lower_expr(expr);
40                     let count = self.lower_anon_const(count);
41                     hir::ExprKind::Repeat(expr, count)
42                 }
43                 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
44                 ExprKind::Call(ref f, ref args) => {
45                     let f = self.lower_expr(f);
46                     hir::ExprKind::Call(f, self.lower_exprs(args))
47                 }
48                 ExprKind::MethodCall(ref seg, ref args, span) => {
49                     let hir_seg = self.arena.alloc(self.lower_path_segment(
50                         e.span,
51                         seg,
52                         ParamMode::Optional,
53                         0,
54                         ParenthesizedGenericArgs::Err,
55                         ImplTraitContext::disallowed(),
56                         None,
57                     ));
58                     let args = self.lower_exprs(args);
59                     hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
60                 }
61                 ExprKind::Binary(binop, ref lhs, ref rhs) => {
62                     let binop = self.lower_binop(binop);
63                     let lhs = self.lower_expr(lhs);
64                     let rhs = self.lower_expr(rhs);
65                     hir::ExprKind::Binary(binop, lhs, rhs)
66                 }
67                 ExprKind::Unary(op, ref ohs) => {
68                     let op = self.lower_unop(op);
69                     let ohs = self.lower_expr(ohs);
70                     hir::ExprKind::Unary(op, ohs)
71                 }
72                 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
73                 ExprKind::Cast(ref expr, ref ty) => {
74                     let expr = self.lower_expr(expr);
75                     let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
76                     hir::ExprKind::Cast(expr, ty)
77                 }
78                 ExprKind::Type(ref expr, ref ty) => {
79                     let expr = self.lower_expr(expr);
80                     let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
81                     hir::ExprKind::Type(expr, ty)
82                 }
83                 ExprKind::AddrOf(k, m, ref ohs) => {
84                     let ohs = self.lower_expr(ohs);
85                     hir::ExprKind::AddrOf(k, m, ohs)
86                 }
87                 ExprKind::Let(ref pat, ref scrutinee) => {
88                     self.lower_expr_let(e.span, pat, scrutinee)
89                 }
90                 ExprKind::If(ref cond, ref then, ref else_opt) => {
91                     self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
92                 }
93                 ExprKind::While(ref cond, ref body, opt_label) => self
94                     .with_loop_scope(e.id, |this| {
95                         this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
96                     }),
97                 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
98                     hir::ExprKind::Loop(
99                         this.lower_block(body, false),
100                         opt_label,
101                         hir::LoopSource::Loop,
102                     )
103                 }),
104                 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
105                 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
106                     self.lower_expr(expr),
107                     self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
108                     hir::MatchSource::Normal,
109                 ),
110                 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
111                     .make_async_expr(
112                         capture_clause,
113                         closure_node_id,
114                         None,
115                         block.span,
116                         hir::AsyncGeneratorKind::Block,
117                         |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
118                     ),
119                 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
120                 ExprKind::Closure(
121                     capture_clause,
122                     asyncness,
123                     movability,
124                     ref decl,
125                     ref body,
126                     fn_decl_span,
127                 ) => {
128                     if let Async::Yes { closure_id, .. } = asyncness {
129                         self.lower_expr_async_closure(
130                             capture_clause,
131                             closure_id,
132                             decl,
133                             body,
134                             fn_decl_span,
135                         )
136                     } else {
137                         self.lower_expr_closure(
138                             capture_clause,
139                             movability,
140                             decl,
141                             body,
142                             fn_decl_span,
143                         )
144                     }
145                 }
146                 ExprKind::Block(ref blk, opt_label) => {
147                     hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
148                 }
149                 ExprKind::Assign(ref el, ref er, span) => {
150                     self.lower_expr_assign(el, er, span, e.span)
151                 }
152                 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
153                     self.lower_binop(op),
154                     self.lower_expr(el),
155                     self.lower_expr(er),
156                 ),
157                 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
158                 ExprKind::Index(ref el, ref er) => {
159                     hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
160                 }
161                 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
162                     self.lower_expr_range_closed(e.span, e1, e2)
163                 }
164                 ExprKind::Range(ref e1, ref e2, lims) => {
165                     self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
166                 }
167                 ExprKind::Underscore => {
168                     self.sess
169                         .struct_span_err(
170                             e.span,
171                             "in expressions, `_` can only be used on the left-hand side of an assignment",
172                         )
173                         .span_label(e.span, "`_` not allowed here")
174                         .emit();
175                     hir::ExprKind::Err
176                 }
177                 ExprKind::Path(ref qself, ref path) => {
178                     let qpath = self.lower_qpath(
179                         e.id,
180                         qself,
181                         path,
182                         ParamMode::Optional,
183                         ImplTraitContext::disallowed(),
184                     );
185                     hir::ExprKind::Path(qpath)
186                 }
187                 ExprKind::Break(opt_label, ref opt_expr) => {
188                     let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
189                     hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
190                 }
191                 ExprKind::Continue(opt_label) => {
192                     hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
193                 }
194                 ExprKind::Ret(ref e) => {
195                     let e = e.as_ref().map(|x| self.lower_expr(x));
196                     hir::ExprKind::Ret(e)
197                 }
198                 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
199                 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
200                 ExprKind::Struct(ref path, ref fields, ref rest) => {
201                     let rest = match rest {
202                         StructRest::Base(e) => Some(self.lower_expr(e)),
203                         StructRest::Rest(sp) => {
204                             self.sess
205                                 .struct_span_err(*sp, "base expression required after `..`")
206                                 .span_label(*sp, "add a base expression here")
207                                 .emit();
208                             Some(&*self.arena.alloc(self.expr_err(*sp)))
209                         }
210                         StructRest::None => None,
211                     };
212                     hir::ExprKind::Struct(
213                         self.arena.alloc(self.lower_qpath(
214                             e.id,
215                             &None,
216                             path,
217                             ParamMode::Optional,
218                             ImplTraitContext::disallowed(),
219                         )),
220                         self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
221                         rest,
222                     )
223                 }
224                 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
225                 ExprKind::Err => hir::ExprKind::Err,
226                 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
227                 ExprKind::Paren(ref ex) => {
228                     let mut ex = self.lower_expr_mut(ex);
229                     // Include parens in span, but only if it is a super-span.
230                     if e.span.contains(ex.span) {
231                         ex.span = e.span;
232                     }
233                     // Merge attributes into the inner expression.
234                     let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
235                     attrs.extend::<Vec<_>>(ex.attrs.into());
236                     ex.attrs = attrs.into();
237                     return ex;
238                 }
239
240                 // Desugar `ExprForLoop`
241                 // from: `[opt_ident]: for <pat> in <head> <body>`
242                 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
243                     return self.lower_expr_for(e, pat, head, body, opt_label);
244                 }
245                 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
246             };
247
248             hir::Expr {
249                 hir_id: self.lower_node_id(e.id),
250                 kind,
251                 span: e.span,
252                 attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
253             }
254         })
255     }
256
257     fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
258         match u {
259             UnOp::Deref => hir::UnOp::UnDeref,
260             UnOp::Not => hir::UnOp::UnNot,
261             UnOp::Neg => hir::UnOp::UnNeg,
262         }
263     }
264
265     fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
266         Spanned {
267             node: match b.node {
268                 BinOpKind::Add => hir::BinOpKind::Add,
269                 BinOpKind::Sub => hir::BinOpKind::Sub,
270                 BinOpKind::Mul => hir::BinOpKind::Mul,
271                 BinOpKind::Div => hir::BinOpKind::Div,
272                 BinOpKind::Rem => hir::BinOpKind::Rem,
273                 BinOpKind::And => hir::BinOpKind::And,
274                 BinOpKind::Or => hir::BinOpKind::Or,
275                 BinOpKind::BitXor => hir::BinOpKind::BitXor,
276                 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
277                 BinOpKind::BitOr => hir::BinOpKind::BitOr,
278                 BinOpKind::Shl => hir::BinOpKind::Shl,
279                 BinOpKind::Shr => hir::BinOpKind::Shr,
280                 BinOpKind::Eq => hir::BinOpKind::Eq,
281                 BinOpKind::Lt => hir::BinOpKind::Lt,
282                 BinOpKind::Le => hir::BinOpKind::Le,
283                 BinOpKind::Ne => hir::BinOpKind::Ne,
284                 BinOpKind::Ge => hir::BinOpKind::Ge,
285                 BinOpKind::Gt => hir::BinOpKind::Gt,
286             },
287             span: b.span,
288         }
289     }
290
291     /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
292     /// ```rust
293     /// match scrutinee { pats => true, _ => false }
294     /// ```
295     fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
296         // If we got here, the `let` expression is not allowed.
297
298         if self.sess.opts.unstable_features.is_nightly_build() {
299             self.sess
300                 .struct_span_err(span, "`let` expressions are not supported here")
301                 .note("only supported directly in conditions of `if`- and `while`-expressions")
302                 .note("as well as when nested within `&&` and parenthesis in those conditions")
303                 .emit();
304         } else {
305             self.sess
306                 .struct_span_err(span, "expected expression, found statement (`let`)")
307                 .note("variable declaration using `let` is a statement")
308                 .emit();
309         }
310
311         // For better recovery, we emit:
312         // ```
313         // match scrutinee { pat => true, _ => false }
314         // ```
315         // While this doesn't fully match the user's intent, it has key advantages:
316         // 1. We can avoid using `abort_if_errors`.
317         // 2. We can typeck both `pat` and `scrutinee`.
318         // 3. `pat` is allowed to be refutable.
319         // 4. The return type of the block is `bool` which seems like what the user wanted.
320         let scrutinee = self.lower_expr(scrutinee);
321         let then_arm = {
322             let pat = self.lower_pat(pat);
323             let expr = self.expr_bool(span, true);
324             self.arm(pat, expr)
325         };
326         let else_arm = {
327             let pat = self.pat_wild(span);
328             let expr = self.expr_bool(span, false);
329             self.arm(pat, expr)
330         };
331         hir::ExprKind::Match(
332             scrutinee,
333             arena_vec![self; then_arm, else_arm],
334             hir::MatchSource::Normal,
335         )
336     }
337
338     fn lower_expr_if(
339         &mut self,
340         span: Span,
341         cond: &Expr,
342         then: &Block,
343         else_opt: Option<&Expr>,
344     ) -> hir::ExprKind<'hir> {
345         // FIXME(#53667): handle lowering of && and parens.
346
347         // `_ => else_block` where `else_block` is `{}` if there's `None`:
348         let else_pat = self.pat_wild(span);
349         let (else_expr, contains_else_clause) = match else_opt {
350             None => (self.expr_block_empty(span.shrink_to_hi()), false),
351             Some(els) => (self.lower_expr(els), true),
352         };
353         let else_arm = self.arm(else_pat, else_expr);
354
355         // Handle then + scrutinee:
356         let (then_pat, scrutinee, desugar) = match cond.kind {
357             // `<pat> => <then>`:
358             ExprKind::Let(ref pat, ref scrutinee) => {
359                 let scrutinee = self.lower_expr(scrutinee);
360                 let pat = self.lower_pat(pat);
361                 (pat, scrutinee, hir::MatchSource::IfLetDesugar { contains_else_clause })
362             }
363             // `true => <then>`:
364             _ => {
365                 // Lower condition:
366                 let cond = self.lower_expr(cond);
367                 let span_block =
368                     self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
369                 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
370                 // to preserve drop semantics since `if cond { ... }` does not
371                 // let temporaries live outside of `cond`.
372                 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
373                 let pat = self.pat_bool(span, true);
374                 (pat, cond, hir::MatchSource::IfDesugar { contains_else_clause })
375             }
376         };
377         let then_expr = self.lower_block_expr(then);
378         let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
379
380         hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
381     }
382
383     fn lower_expr_while_in_loop_scope(
384         &mut self,
385         span: Span,
386         cond: &Expr,
387         body: &Block,
388         opt_label: Option<Label>,
389     ) -> hir::ExprKind<'hir> {
390         // FIXME(#53667): handle lowering of && and parens.
391
392         // Note that the block AND the condition are evaluated in the loop scope.
393         // This is done to allow `break` from inside the condition of the loop.
394
395         // `_ => break`:
396         let else_arm = {
397             let else_pat = self.pat_wild(span);
398             let else_expr = self.expr_break(span, ThinVec::new());
399             self.arm(else_pat, else_expr)
400         };
401
402         // Handle then + scrutinee:
403         let (then_pat, scrutinee, desugar, source) = match cond.kind {
404             ExprKind::Let(ref pat, ref scrutinee) => {
405                 // to:
406                 //
407                 //   [opt_ident]: loop {
408                 //     match <sub_expr> {
409                 //       <pat> => <body>,
410                 //       _ => break
411                 //     }
412                 //   }
413                 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
414                 let pat = self.lower_pat(pat);
415                 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
416             }
417             _ => {
418                 // We desugar: `'label: while $cond $body` into:
419                 //
420                 // ```
421                 // 'label: loop {
422                 //     match drop-temps { $cond } {
423                 //         true => $body,
424                 //         _ => break,
425                 //     }
426                 // }
427                 // ```
428
429                 // Lower condition:
430                 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
431                 let span_block =
432                     self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
433                 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
434                 // to preserve drop semantics since `while cond { ... }` does not
435                 // let temporaries live outside of `cond`.
436                 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
437                 // `true => <then>`:
438                 let pat = self.pat_bool(span, true);
439                 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
440             }
441         };
442         let then_expr = self.lower_block_expr(body);
443         let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
444
445         // `match <scrutinee> { ... }`
446         let match_expr =
447             self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
448
449         // `[opt_ident]: loop { ... }`
450         hir::ExprKind::Loop(self.block_expr(self.arena.alloc(match_expr)), opt_label, source)
451     }
452
453     /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
454     /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
455     /// and save the block id to use it as a break target for desugaring of the `?` operator.
456     fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
457         self.with_catch_scope(body.id, |this| {
458             let mut block = this.lower_block_noalloc(body, true);
459
460             // Final expression of the block (if present) or `()` with span at the end of block
461             let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
462                 (
463                     this.mark_span_with_reason(
464                         DesugaringKind::TryBlock,
465                         expr.span,
466                         this.allow_try_trait.clone(),
467                     ),
468                     expr,
469                 )
470             } else {
471                 let try_span = this.mark_span_with_reason(
472                     DesugaringKind::TryBlock,
473                     this.sess.source_map().end_point(body.span),
474                     this.allow_try_trait.clone(),
475                 );
476
477                 (try_span, this.expr_unit(try_span))
478             };
479
480             let ok_wrapped_span =
481                 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
482
483             // `::std::ops::Try::from_ok($tail_expr)`
484             block.expr = Some(this.wrap_in_try_constructor(
485                 hir::LangItem::TryFromOk,
486                 try_span,
487                 tail_expr,
488                 ok_wrapped_span,
489             ));
490
491             hir::ExprKind::Block(this.arena.alloc(block), None)
492         })
493     }
494
495     fn wrap_in_try_constructor(
496         &mut self,
497         lang_item: hir::LangItem,
498         method_span: Span,
499         expr: &'hir hir::Expr<'hir>,
500         overall_span: Span,
501     ) -> &'hir hir::Expr<'hir> {
502         let constructor =
503             self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
504         self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
505     }
506
507     fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
508         hir::Arm {
509             hir_id: self.next_id(),
510             attrs: self.lower_attrs(&arm.attrs),
511             pat: self.lower_pat(&arm.pat),
512             guard: match arm.guard {
513                 Some(ref x) => Some(hir::Guard::If(self.lower_expr(x))),
514                 _ => None,
515             },
516             body: self.lower_expr(&arm.body),
517             span: arm.span,
518         }
519     }
520
521     /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
522     ///
523     /// This results in:
524     ///
525     /// ```text
526     /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
527     ///     <body>
528     /// })
529     /// ```
530     pub(super) fn make_async_expr(
531         &mut self,
532         capture_clause: CaptureBy,
533         closure_node_id: NodeId,
534         ret_ty: Option<AstP<Ty>>,
535         span: Span,
536         async_gen_kind: hir::AsyncGeneratorKind,
537         body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
538     ) -> hir::ExprKind<'hir> {
539         let output = match ret_ty {
540             Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
541             None => hir::FnRetTy::DefaultReturn(span),
542         };
543
544         // Resume argument type. We let the compiler infer this to simplify the lowering. It is
545         // fully constrained by `future::from_generator`.
546         let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
547
548         // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
549         let decl = self.arena.alloc(hir::FnDecl {
550             inputs: arena_vec![self; input_ty],
551             output,
552             c_variadic: false,
553             implicit_self: hir::ImplicitSelfKind::None,
554         });
555
556         // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
557         let (pat, task_context_hid) = self.pat_ident_binding_mode(
558             span,
559             Ident::with_dummy_span(sym::_task_context),
560             hir::BindingAnnotation::Mutable,
561         );
562         let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, ty_span: span, span };
563         let params = arena_vec![self; param];
564
565         let body_id = self.lower_body(move |this| {
566             this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
567
568             let old_ctx = this.task_context;
569             this.task_context = Some(task_context_hid);
570             let res = body(this);
571             this.task_context = old_ctx;
572             (params, res)
573         });
574
575         // `static |_task_context| -> <ret_ty> { body }`:
576         let generator_kind = hir::ExprKind::Closure(
577             capture_clause,
578             decl,
579             body_id,
580             span,
581             Some(hir::Movability::Static),
582         );
583         let generator = hir::Expr {
584             hir_id: self.lower_node_id(closure_node_id),
585             kind: generator_kind,
586             span,
587             attrs: ThinVec::new(),
588         };
589
590         // `future::from_generator`:
591         let unstable_span =
592             self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
593         let gen_future =
594             self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
595
596         // `future::from_generator(generator)`:
597         hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
598     }
599
600     /// Desugar `<expr>.await` into:
601     /// ```rust
602     /// match <expr> {
603     ///     mut pinned => loop {
604     ///         match unsafe { ::std::future::Future::poll(
605     ///             <::std::pin::Pin>::new_unchecked(&mut pinned),
606     ///             ::std::future::get_context(task_context),
607     ///         ) } {
608     ///             ::std::task::Poll::Ready(result) => break result,
609     ///             ::std::task::Poll::Pending => {}
610     ///         }
611     ///         task_context = yield ();
612     ///     }
613     /// }
614     /// ```
615     fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
616         match self.generator_kind {
617             Some(hir::GeneratorKind::Async(_)) => {}
618             Some(hir::GeneratorKind::Gen) | None => {
619                 let mut err = struct_span_err!(
620                     self.sess,
621                     await_span,
622                     E0728,
623                     "`await` is only allowed inside `async` functions and blocks"
624                 );
625                 err.span_label(await_span, "only allowed inside `async` functions and blocks");
626                 if let Some(item_sp) = self.current_item {
627                     err.span_label(item_sp, "this is not `async`");
628                 }
629                 err.emit();
630             }
631         }
632         let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
633         let gen_future_span = self.mark_span_with_reason(
634             DesugaringKind::Await,
635             await_span,
636             self.allow_gen_future.clone(),
637         );
638         let expr = self.lower_expr(expr);
639
640         let pinned_ident = Ident::with_dummy_span(sym::pinned);
641         let (pinned_pat, pinned_pat_hid) =
642             self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
643
644         let task_context_ident = Ident::with_dummy_span(sym::_task_context);
645
646         // unsafe {
647         //     ::std::future::Future::poll(
648         //         ::std::pin::Pin::new_unchecked(&mut pinned),
649         //         ::std::future::get_context(task_context),
650         //     )
651         // }
652         let poll_expr = {
653             let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
654             let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
655             let task_context = if let Some(task_context_hid) = self.task_context {
656                 self.expr_ident_mut(span, task_context_ident, task_context_hid)
657             } else {
658                 // Use of `await` outside of an async context, we cannot use `task_context` here.
659                 self.expr_err(span)
660             };
661             let new_unchecked = self.expr_call_lang_item_fn_mut(
662                 span,
663                 hir::LangItem::PinNewUnchecked,
664                 arena_vec![self; ref_mut_pinned],
665             );
666             let get_context = self.expr_call_lang_item_fn_mut(
667                 gen_future_span,
668                 hir::LangItem::GetContext,
669                 arena_vec![self; task_context],
670             );
671             let call = self.expr_call_lang_item_fn(
672                 span,
673                 hir::LangItem::FuturePoll,
674                 arena_vec![self; new_unchecked, get_context],
675             );
676             self.arena.alloc(self.expr_unsafe(call))
677         };
678
679         // `::std::task::Poll::Ready(result) => break result`
680         let loop_node_id = self.resolver.next_node_id();
681         let loop_hir_id = self.lower_node_id(loop_node_id);
682         let ready_arm = {
683             let x_ident = Ident::with_dummy_span(sym::result);
684             let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
685             let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
686             let ready_field = self.single_pat_field(span, x_pat);
687             let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
688             let break_x = self.with_loop_scope(loop_node_id, move |this| {
689                 let expr_break =
690                     hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
691                 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
692             });
693             self.arm(ready_pat, break_x)
694         };
695
696         // `::std::task::Poll::Pending => {}`
697         let pending_arm = {
698             let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
699             let empty_block = self.expr_block_empty(span);
700             self.arm(pending_pat, empty_block)
701         };
702
703         let inner_match_stmt = {
704             let match_expr = self.expr_match(
705                 span,
706                 poll_expr,
707                 arena_vec![self; ready_arm, pending_arm],
708                 hir::MatchSource::AwaitDesugar,
709             );
710             self.stmt_expr(span, match_expr)
711         };
712
713         // task_context = yield ();
714         let yield_stmt = {
715             let unit = self.expr_unit(span);
716             let yield_expr = self.expr(
717                 span,
718                 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
719                 ThinVec::new(),
720             );
721             let yield_expr = self.arena.alloc(yield_expr);
722
723             if let Some(task_context_hid) = self.task_context {
724                 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
725                 let assign =
726                     self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
727                 self.stmt_expr(span, assign)
728             } else {
729                 // Use of `await` outside of an async context. Return `yield_expr` so that we can
730                 // proceed with type checking.
731                 self.stmt(span, hir::StmtKind::Semi(yield_expr))
732             }
733         };
734
735         let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
736
737         // loop { .. }
738         let loop_expr = self.arena.alloc(hir::Expr {
739             hir_id: loop_hir_id,
740             kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop),
741             span,
742             attrs: ThinVec::new(),
743         });
744
745         // mut pinned => loop { ... }
746         let pinned_arm = self.arm(pinned_pat, loop_expr);
747
748         // match <expr> {
749         //     mut pinned => loop { .. }
750         // }
751         hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
752     }
753
754     fn lower_expr_closure(
755         &mut self,
756         capture_clause: CaptureBy,
757         movability: Movability,
758         decl: &FnDecl,
759         body: &Expr,
760         fn_decl_span: Span,
761     ) -> hir::ExprKind<'hir> {
762         // Lower outside new scope to preserve `is_in_loop_condition`.
763         let fn_decl = self.lower_fn_decl(decl, None, false, None);
764
765         self.with_new_scopes(move |this| {
766             let prev = this.current_item;
767             this.current_item = Some(fn_decl_span);
768             let mut generator_kind = None;
769             let body_id = this.lower_fn_body(decl, |this| {
770                 let e = this.lower_expr_mut(body);
771                 generator_kind = this.generator_kind;
772                 e
773             });
774             let generator_option =
775                 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
776             this.current_item = prev;
777             hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
778         })
779     }
780
781     fn generator_movability_for_fn(
782         &mut self,
783         decl: &FnDecl,
784         fn_decl_span: Span,
785         generator_kind: Option<hir::GeneratorKind>,
786         movability: Movability,
787     ) -> Option<hir::Movability> {
788         match generator_kind {
789             Some(hir::GeneratorKind::Gen) => {
790                 if decl.inputs.len() > 1 {
791                     struct_span_err!(
792                         self.sess,
793                         fn_decl_span,
794                         E0628,
795                         "too many parameters for a generator (expected 0 or 1 parameters)"
796                     )
797                     .emit();
798                 }
799                 Some(movability)
800             }
801             Some(hir::GeneratorKind::Async(_)) => {
802                 panic!("non-`async` closure body turned `async` during lowering");
803             }
804             None => {
805                 if movability == Movability::Static {
806                     struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
807                         .emit();
808                 }
809                 None
810             }
811         }
812     }
813
814     fn lower_expr_async_closure(
815         &mut self,
816         capture_clause: CaptureBy,
817         closure_id: NodeId,
818         decl: &FnDecl,
819         body: &Expr,
820         fn_decl_span: Span,
821     ) -> hir::ExprKind<'hir> {
822         let outer_decl =
823             FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
824         // We need to lower the declaration outside the new scope, because we
825         // have to conserve the state of being inside a loop condition for the
826         // closure argument types.
827         let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
828
829         self.with_new_scopes(move |this| {
830             // FIXME(cramertj): allow `async` non-`move` closures with arguments.
831             if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
832                 struct_span_err!(
833                     this.sess,
834                     fn_decl_span,
835                     E0708,
836                     "`async` non-`move` closures with parameters are not currently supported",
837                 )
838                 .help(
839                     "consider using `let` statements to manually capture \
840                     variables by reference before entering an `async move` closure",
841                 )
842                 .emit();
843             }
844
845             // Transform `async |x: u8| -> X { ... }` into
846             // `|x: u8| future_from_generator(|| -> X { ... })`.
847             let body_id = this.lower_fn_body(&outer_decl, |this| {
848                 let async_ret_ty =
849                     if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
850                 let async_body = this.make_async_expr(
851                     capture_clause,
852                     closure_id,
853                     async_ret_ty,
854                     body.span,
855                     hir::AsyncGeneratorKind::Closure,
856                     |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
857                 );
858                 this.expr(fn_decl_span, async_body, ThinVec::new())
859             });
860             hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
861         })
862     }
863
864     /// Destructure the LHS of complex assignments.
865     /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
866     fn lower_expr_assign(
867         &mut self,
868         lhs: &Expr,
869         rhs: &Expr,
870         eq_sign_span: Span,
871         whole_span: Span,
872     ) -> hir::ExprKind<'hir> {
873         // Return early in case of an ordinary assignment.
874         fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
875             match &lhs.kind {
876                 ExprKind::Array(..)
877                 | ExprKind::Struct(..)
878                 | ExprKind::Tup(..)
879                 | ExprKind::Underscore => false,
880                 // Check for tuple struct constructor.
881                 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
882                 ExprKind::Paren(e) => {
883                     match e.kind {
884                         // We special-case `(..)` for consistency with patterns.
885                         ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
886                         _ => is_ordinary(lower_ctx, e),
887                     }
888                 }
889                 _ => true,
890             }
891         }
892         if is_ordinary(self, lhs) {
893             return hir::ExprKind::Assign(self.lower_expr(lhs), self.lower_expr(rhs), eq_sign_span);
894         }
895         if !self.sess.features_untracked().destructuring_assignment {
896             feature_err(
897                 &self.sess.parse_sess,
898                 sym::destructuring_assignment,
899                 eq_sign_span,
900                 "destructuring assignments are unstable",
901             )
902             .span_label(lhs.span, "cannot assign to this expression")
903             .emit();
904         }
905
906         let mut assignments = vec![];
907
908         // The LHS becomes a pattern: `(lhs1, lhs2)`.
909         let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
910         let rhs = self.lower_expr(rhs);
911
912         // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
913         let destructure_let = self.stmt_let_pat(
914             ThinVec::new(),
915             whole_span,
916             Some(rhs),
917             pat,
918             hir::LocalSource::AssignDesugar(eq_sign_span),
919         );
920
921         // `a = lhs1; b = lhs2;`.
922         let stmts = self
923             .arena
924             .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
925
926         // Wrap everything in a block.
927         hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
928     }
929
930     /// If the given expression is a path to a tuple struct, returns that path.
931     /// It is not a complete check, but just tries to reject most paths early
932     /// if they are not tuple structs.
933     /// Type checking will take care of the full validation later.
934     fn extract_tuple_struct_path<'a>(&mut self, expr: &'a Expr) -> Option<&'a Path> {
935         // For tuple struct destructuring, it must be a non-qualified path (like in patterns).
936         if let ExprKind::Path(None, path) = &expr.kind {
937             // Does the path resolves to something disallowed in a tuple struct/variant pattern?
938             if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
939                 if partial_res.unresolved_segments() == 0
940                     && !partial_res.base_res().expected_in_tuple_struct_pat()
941                 {
942                     return None;
943                 }
944             }
945             return Some(path);
946         }
947         None
948     }
949
950     /// Convert the LHS of a destructuring assignment to a pattern.
951     /// Each sub-assignment is recorded in `assignments`.
952     fn destructure_assign(
953         &mut self,
954         lhs: &Expr,
955         eq_sign_span: Span,
956         assignments: &mut Vec<hir::Stmt<'hir>>,
957     ) -> &'hir hir::Pat<'hir> {
958         match &lhs.kind {
959             // Underscore pattern.
960             ExprKind::Underscore => {
961                 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
962             }
963             // Slice patterns.
964             ExprKind::Array(elements) => {
965                 let (pats, rest) =
966                     self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
967                 let slice_pat = if let Some((i, span)) = rest {
968                     let (before, after) = pats.split_at(i);
969                     hir::PatKind::Slice(
970                         before,
971                         Some(self.pat_without_dbm(span, hir::PatKind::Wild)),
972                         after,
973                     )
974                 } else {
975                     hir::PatKind::Slice(pats, None, &[])
976                 };
977                 return self.pat_without_dbm(lhs.span, slice_pat);
978             }
979             // Tuple structs.
980             ExprKind::Call(callee, args) => {
981                 if let Some(path) = self.extract_tuple_struct_path(callee) {
982                     let (pats, rest) = self.destructure_sequence(
983                         args,
984                         "tuple struct or variant",
985                         eq_sign_span,
986                         assignments,
987                     );
988                     let qpath = self.lower_qpath(
989                         callee.id,
990                         &None,
991                         path,
992                         ParamMode::Optional,
993                         ImplTraitContext::disallowed(),
994                     );
995                     // Destructure like a tuple struct.
996                     let tuple_struct_pat =
997                         hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
998                     return self.pat_without_dbm(lhs.span, tuple_struct_pat);
999                 }
1000             }
1001             // Structs.
1002             ExprKind::Struct(path, fields, rest) => {
1003                 let field_pats = self.arena.alloc_from_iter(fields.iter().map(|f| {
1004                     let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1005                     hir::FieldPat {
1006                         hir_id: self.next_id(),
1007                         ident: f.ident,
1008                         pat,
1009                         is_shorthand: f.is_shorthand,
1010                         span: f.span,
1011                     }
1012                 }));
1013                 let qpath = self.lower_qpath(
1014                     lhs.id,
1015                     &None,
1016                     path,
1017                     ParamMode::Optional,
1018                     ImplTraitContext::disallowed(),
1019                 );
1020                 let fields_omitted = match rest {
1021                     StructRest::Base(e) => {
1022                         self.sess
1023                             .struct_span_err(
1024                                 e.span,
1025                                 "functional record updates are not allowed in destructuring \
1026                                     assignments",
1027                             )
1028                             .span_suggestion(
1029                                 e.span,
1030                                 "consider removing the trailing pattern",
1031                                 String::new(),
1032                                 rustc_errors::Applicability::MachineApplicable,
1033                             )
1034                             .emit();
1035                         true
1036                     }
1037                     StructRest::Rest(_) => true,
1038                     StructRest::None => false,
1039                 };
1040                 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1041                 return self.pat_without_dbm(lhs.span, struct_pat);
1042             }
1043             // Tuples.
1044             ExprKind::Tup(elements) => {
1045                 let (pats, rest) =
1046                     self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1047                 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1048                 return self.pat_without_dbm(lhs.span, tuple_pat);
1049             }
1050             ExprKind::Paren(e) => {
1051                 // We special-case `(..)` for consistency with patterns.
1052                 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1053                     let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1054                     return self.pat_without_dbm(lhs.span, tuple_pat);
1055                 } else {
1056                     return self.destructure_assign(e, eq_sign_span, assignments);
1057                 }
1058             }
1059             _ => {}
1060         }
1061         // Treat all other cases as normal lvalue.
1062         let ident = Ident::new(sym::lhs, lhs.span);
1063         let (pat, binding) = self.pat_ident(lhs.span, ident);
1064         let ident = self.expr_ident(lhs.span, ident, binding);
1065         let assign = hir::ExprKind::Assign(self.lower_expr(lhs), ident, eq_sign_span);
1066         let expr = self.expr(lhs.span, assign, ThinVec::new());
1067         assignments.push(self.stmt_expr(lhs.span, expr));
1068         pat
1069     }
1070
1071     /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1072     /// Such a sequence occurs in a tuple (struct)/slice.
1073     /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1074     /// exists.
1075     /// Each sub-assignment is recorded in `assignments`.
1076     fn destructure_sequence(
1077         &mut self,
1078         elements: &[AstP<Expr>],
1079         ctx: &str,
1080         eq_sign_span: Span,
1081         assignments: &mut Vec<hir::Stmt<'hir>>,
1082     ) -> (&'hir [&'hir hir::Pat<'hir>], Option<(usize, Span)>) {
1083         let mut rest = None;
1084         let elements =
1085             self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1086                 // Check for `..` pattern.
1087                 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1088                     if let Some((_, prev_span)) = rest {
1089                         self.ban_extra_rest_pat(e.span, prev_span, ctx);
1090                     } else {
1091                         rest = Some((i, e.span));
1092                     }
1093                     None
1094                 } else {
1095                     Some(self.destructure_assign(e, eq_sign_span, assignments))
1096                 }
1097             }));
1098         (elements, rest)
1099     }
1100
1101     /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1102     fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1103         let e1 = self.lower_expr_mut(e1);
1104         let e2 = self.lower_expr_mut(e2);
1105         let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, span);
1106         let fn_expr =
1107             self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1108         hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1109     }
1110
1111     fn lower_expr_range(
1112         &mut self,
1113         span: Span,
1114         e1: Option<&Expr>,
1115         e2: Option<&Expr>,
1116         lims: RangeLimits,
1117     ) -> hir::ExprKind<'hir> {
1118         use rustc_ast::RangeLimits::*;
1119
1120         let lang_item = match (e1, e2, lims) {
1121             (None, None, HalfOpen) => hir::LangItem::RangeFull,
1122             (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1123             (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1124             (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1125             (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1126             (Some(..), Some(..), Closed) => unreachable!(),
1127             (_, None, Closed) => {
1128                 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
1129             }
1130         };
1131
1132         let fields = self.arena.alloc_from_iter(
1133             e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
1134                 let expr = self.lower_expr(&e);
1135                 let ident = Ident::new(Symbol::intern(s), e.span);
1136                 self.field(ident, expr, e.span)
1137             }),
1138         );
1139
1140         hir::ExprKind::Struct(self.arena.alloc(hir::QPath::LangItem(lang_item, span)), fields, None)
1141     }
1142
1143     fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1144         let target_id = match destination {
1145             Some((id, _)) => {
1146                 if let Some(loop_id) = self.resolver.get_label_res(id) {
1147                     Ok(self.lower_node_id(loop_id))
1148                 } else {
1149                     Err(hir::LoopIdError::UnresolvedLabel)
1150                 }
1151             }
1152             None => self
1153                 .loop_scopes
1154                 .last()
1155                 .cloned()
1156                 .map(|id| Ok(self.lower_node_id(id)))
1157                 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1158         };
1159         hir::Destination { label: destination.map(|(_, label)| label), target_id }
1160     }
1161
1162     fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1163         if self.is_in_loop_condition && opt_label.is_none() {
1164             hir::Destination {
1165                 label: None,
1166                 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1167             }
1168         } else {
1169             self.lower_loop_destination(opt_label.map(|label| (id, label)))
1170         }
1171     }
1172
1173     fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1174         let len = self.catch_scopes.len();
1175         self.catch_scopes.push(catch_id);
1176
1177         let result = f(self);
1178         assert_eq!(
1179             len + 1,
1180             self.catch_scopes.len(),
1181             "catch scopes should be added and removed in stack order"
1182         );
1183
1184         self.catch_scopes.pop().unwrap();
1185
1186         result
1187     }
1188
1189     fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1190         // We're no longer in the base loop's condition; we're in another loop.
1191         let was_in_loop_condition = self.is_in_loop_condition;
1192         self.is_in_loop_condition = false;
1193
1194         let len = self.loop_scopes.len();
1195         self.loop_scopes.push(loop_id);
1196
1197         let result = f(self);
1198         assert_eq!(
1199             len + 1,
1200             self.loop_scopes.len(),
1201             "loop scopes should be added and removed in stack order"
1202         );
1203
1204         self.loop_scopes.pop().unwrap();
1205
1206         self.is_in_loop_condition = was_in_loop_condition;
1207
1208         result
1209     }
1210
1211     fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1212         let was_in_loop_condition = self.is_in_loop_condition;
1213         self.is_in_loop_condition = true;
1214
1215         let result = f(self);
1216
1217         self.is_in_loop_condition = was_in_loop_condition;
1218
1219         result
1220     }
1221
1222     fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
1223         if self.sess.asm_arch.is_none() {
1224             struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
1225         }
1226         if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
1227             && !matches!(
1228                 self.sess.asm_arch,
1229                 Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)
1230             )
1231         {
1232             self.sess
1233                 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
1234                 .emit();
1235         }
1236
1237         // Lower operands to HIR, filter_map skips any operands with invalid
1238         // register classes.
1239         let sess = self.sess;
1240         let operands: Vec<_> = asm
1241             .operands
1242             .iter()
1243             .filter_map(|(op, op_sp)| {
1244                 let lower_reg = |reg| {
1245                     Some(match reg {
1246                         InlineAsmRegOrRegClass::Reg(s) => asm::InlineAsmRegOrRegClass::Reg(
1247                             asm::InlineAsmReg::parse(
1248                                 sess.asm_arch?,
1249                                 |feature| sess.target_features.contains(&Symbol::intern(feature)),
1250                                 &sess.target,
1251                                 s,
1252                             )
1253                             .map_err(|e| {
1254                                 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
1255                                 sess.struct_span_err(*op_sp, &msg).emit();
1256                             })
1257                             .ok()?,
1258                         ),
1259                         InlineAsmRegOrRegClass::RegClass(s) => {
1260                             asm::InlineAsmRegOrRegClass::RegClass(
1261                                 asm::InlineAsmRegClass::parse(sess.asm_arch?, s)
1262                                     .map_err(|e| {
1263                                         let msg = format!(
1264                                             "invalid register class `{}`: {}",
1265                                             s.as_str(),
1266                                             e
1267                                         );
1268                                         sess.struct_span_err(*op_sp, &msg).emit();
1269                                     })
1270                                     .ok()?,
1271                             )
1272                         }
1273                     })
1274                 };
1275
1276                 // lower_reg is executed last because we need to lower all
1277                 // sub-expressions even if we throw them away later.
1278                 let op = match *op {
1279                     InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1280                         expr: self.lower_expr_mut(expr),
1281                         reg: lower_reg(reg)?,
1282                     },
1283                     InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1284                         late,
1285                         expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1286                         reg: lower_reg(reg)?,
1287                     },
1288                     InlineAsmOperand::InOut { reg, late, ref expr } => {
1289                         hir::InlineAsmOperand::InOut {
1290                             late,
1291                             expr: self.lower_expr_mut(expr),
1292                             reg: lower_reg(reg)?,
1293                         }
1294                     }
1295                     InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1296                         hir::InlineAsmOperand::SplitInOut {
1297                             late,
1298                             in_expr: self.lower_expr_mut(in_expr),
1299                             out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1300                             reg: lower_reg(reg)?,
1301                         }
1302                     }
1303                     InlineAsmOperand::Const { ref expr } => {
1304                         hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1305                     }
1306                     InlineAsmOperand::Sym { ref expr } => {
1307                         hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1308                     }
1309                 };
1310                 Some(op)
1311             })
1312             .collect();
1313
1314         // Stop if there were any errors when lowering the register classes
1315         if operands.len() != asm.operands.len() || sess.asm_arch.is_none() {
1316             return hir::ExprKind::Err;
1317         }
1318
1319         // Validate template modifiers against the register classes for the operands
1320         let asm_arch = sess.asm_arch.unwrap();
1321         for p in &asm.template {
1322             if let InlineAsmTemplatePiece::Placeholder {
1323                 operand_idx,
1324                 modifier: Some(modifier),
1325                 span: placeholder_span,
1326             } = *p
1327             {
1328                 let op_sp = asm.operands[operand_idx].1;
1329                 match &operands[operand_idx] {
1330                     hir::InlineAsmOperand::In { reg, .. }
1331                     | hir::InlineAsmOperand::Out { reg, .. }
1332                     | hir::InlineAsmOperand::InOut { reg, .. }
1333                     | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1334                         let class = reg.reg_class();
1335                         let valid_modifiers = class.valid_modifiers(asm_arch);
1336                         if !valid_modifiers.contains(&modifier) {
1337                             let mut err = sess.struct_span_err(
1338                                 placeholder_span,
1339                                 "invalid asm template modifier for this register class",
1340                             );
1341                             err.span_label(placeholder_span, "template modifier");
1342                             err.span_label(op_sp, "argument");
1343                             if !valid_modifiers.is_empty() {
1344                                 let mut mods = format!("`{}`", valid_modifiers[0]);
1345                                 for m in &valid_modifiers[1..] {
1346                                     let _ = write!(mods, ", `{}`", m);
1347                                 }
1348                                 err.note(&format!(
1349                                     "the `{}` register class supports \
1350                                      the following template modifiers: {}",
1351                                     class.name(),
1352                                     mods
1353                                 ));
1354                             } else {
1355                                 err.note(&format!(
1356                                     "the `{}` register class does not support template modifiers",
1357                                     class.name()
1358                                 ));
1359                             }
1360                             err.emit();
1361                         }
1362                     }
1363                     hir::InlineAsmOperand::Const { .. } => {
1364                         let mut err = sess.struct_span_err(
1365                             placeholder_span,
1366                             "asm template modifiers are not allowed for `const` arguments",
1367                         );
1368                         err.span_label(placeholder_span, "template modifier");
1369                         err.span_label(op_sp, "argument");
1370                         err.emit();
1371                     }
1372                     hir::InlineAsmOperand::Sym { .. } => {
1373                         let mut err = sess.struct_span_err(
1374                             placeholder_span,
1375                             "asm template modifiers are not allowed for `sym` arguments",
1376                         );
1377                         err.span_label(placeholder_span, "template modifier");
1378                         err.span_label(op_sp, "argument");
1379                         err.emit();
1380                     }
1381                 }
1382             }
1383         }
1384
1385         let mut used_input_regs = FxHashMap::default();
1386         let mut used_output_regs = FxHashMap::default();
1387         let mut required_features: Vec<&str> = vec![];
1388         for (idx, op) in operands.iter().enumerate() {
1389             let op_sp = asm.operands[idx].1;
1390             if let Some(reg) = op.reg() {
1391                 // Make sure we don't accidentally carry features from the
1392                 // previous iteration.
1393                 required_features.clear();
1394
1395                 // Validate register classes against currently enabled target
1396                 // features. We check that at least one type is available for
1397                 // the current target.
1398                 let reg_class = reg.reg_class();
1399                 for &(_, feature) in reg_class.supported_types(asm_arch) {
1400                     if let Some(feature) = feature {
1401                         if self.sess.target_features.contains(&Symbol::intern(feature)) {
1402                             required_features.clear();
1403                             break;
1404                         } else {
1405                             required_features.push(feature);
1406                         }
1407                     } else {
1408                         required_features.clear();
1409                         break;
1410                     }
1411                 }
1412                 // We are sorting primitive strs here and can use unstable sort here
1413                 required_features.sort_unstable();
1414                 required_features.dedup();
1415                 match &required_features[..] {
1416                     [] => {}
1417                     [feature] => {
1418                         let msg = format!(
1419                             "register class `{}` requires the `{}` target feature",
1420                             reg_class.name(),
1421                             feature
1422                         );
1423                         sess.struct_span_err(op_sp, &msg).emit();
1424                     }
1425                     features => {
1426                         let msg = format!(
1427                             "register class `{}` requires at least one target feature: {}",
1428                             reg_class.name(),
1429                             features.join(", ")
1430                         );
1431                         sess.struct_span_err(op_sp, &msg).emit();
1432                     }
1433                 }
1434
1435                 // Check for conflicts between explicit register operands.
1436                 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1437                     let (input, output) = match op {
1438                         hir::InlineAsmOperand::In { .. } => (true, false),
1439                         // Late output do not conflict with inputs, but normal outputs do
1440                         hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1441                         hir::InlineAsmOperand::InOut { .. }
1442                         | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1443                         hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1444                             unreachable!()
1445                         }
1446                     };
1447
1448                     // Flag to output the error only once per operand
1449                     let mut skip = false;
1450                     reg.overlapping_regs(|r| {
1451                         let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1452                                          input| {
1453                             match used_regs.entry(r) {
1454                                 Entry::Occupied(o) => {
1455                                     if skip {
1456                                         return;
1457                                     }
1458                                     skip = true;
1459
1460                                     let idx2 = *o.get();
1461                                     let op2 = &operands[idx2];
1462                                     let op_sp2 = asm.operands[idx2].1;
1463                                     let reg2 = match op2.reg() {
1464                                         Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1465                                         _ => unreachable!(),
1466                                     };
1467
1468                                     let msg = format!(
1469                                         "register `{}` conflicts with register `{}`",
1470                                         reg.name(),
1471                                         reg2.name()
1472                                     );
1473                                     let mut err = sess.struct_span_err(op_sp, &msg);
1474                                     err.span_label(op_sp, &format!("register `{}`", reg.name()));
1475                                     err.span_label(op_sp2, &format!("register `{}`", reg2.name()));
1476
1477                                     match (op, op2) {
1478                                         (
1479                                             hir::InlineAsmOperand::In { .. },
1480                                             hir::InlineAsmOperand::Out { late, .. },
1481                                         )
1482                                         | (
1483                                             hir::InlineAsmOperand::Out { late, .. },
1484                                             hir::InlineAsmOperand::In { .. },
1485                                         ) => {
1486                                             assert!(!*late);
1487                                             let out_op_sp = if input { op_sp2 } else { op_sp };
1488                                             let msg = "use `lateout` instead of \
1489                                                     `out` to avoid conflict";
1490                                             err.span_help(out_op_sp, msg);
1491                                         }
1492                                         _ => {}
1493                                     }
1494
1495                                     err.emit();
1496                                 }
1497                                 Entry::Vacant(v) => {
1498                                     v.insert(idx);
1499                                 }
1500                             }
1501                         };
1502                         if input {
1503                             check(&mut used_input_regs, true);
1504                         }
1505                         if output {
1506                             check(&mut used_output_regs, false);
1507                         }
1508                     });
1509                 }
1510             }
1511         }
1512
1513         let operands = self.arena.alloc_from_iter(operands);
1514         let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1515         let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1516         let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1517         hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1518     }
1519
1520     fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1521         let inner = hir::LlvmInlineAsmInner {
1522             inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1523             outputs: asm
1524                 .outputs
1525                 .iter()
1526                 .map(|out| hir::LlvmInlineAsmOutput {
1527                     constraint: out.constraint,
1528                     is_rw: out.is_rw,
1529                     is_indirect: out.is_indirect,
1530                     span: out.expr.span,
1531                 })
1532                 .collect(),
1533             asm: asm.asm,
1534             asm_str_style: asm.asm_str_style,
1535             clobbers: asm.clobbers.clone(),
1536             volatile: asm.volatile,
1537             alignstack: asm.alignstack,
1538             dialect: asm.dialect,
1539         };
1540         let hir_asm = hir::LlvmInlineAsm {
1541             inner,
1542             inputs_exprs: self.arena.alloc_from_iter(
1543                 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1544             ),
1545             outputs_exprs: self
1546                 .arena
1547                 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1548         };
1549         hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1550     }
1551
1552     fn lower_field(&mut self, f: &Field) -> hir::Field<'hir> {
1553         hir::Field {
1554             hir_id: self.next_id(),
1555             ident: f.ident,
1556             expr: self.lower_expr(&f.expr),
1557             span: f.span,
1558             is_shorthand: f.is_shorthand,
1559         }
1560     }
1561
1562     fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1563         match self.generator_kind {
1564             Some(hir::GeneratorKind::Gen) => {}
1565             Some(hir::GeneratorKind::Async(_)) => {
1566                 struct_span_err!(
1567                     self.sess,
1568                     span,
1569                     E0727,
1570                     "`async` generators are not yet supported"
1571                 )
1572                 .emit();
1573             }
1574             None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1575         }
1576
1577         let expr =
1578             opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1579
1580         hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1581     }
1582
1583     /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1584     /// ```rust
1585     /// {
1586     ///     let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1587     ///         mut iter => {
1588     ///             [opt_ident]: loop {
1589     ///                 let mut __next;
1590     ///                 match ::std::iter::Iterator::next(&mut iter) {
1591     ///                     ::std::option::Option::Some(val) => __next = val,
1592     ///                     ::std::option::Option::None => break
1593     ///                 };
1594     ///                 let <pat> = __next;
1595     ///                 StmtKind::Expr(<body>);
1596     ///             }
1597     ///         }
1598     ///     };
1599     ///     result
1600     /// }
1601     /// ```
1602     fn lower_expr_for(
1603         &mut self,
1604         e: &Expr,
1605         pat: &Pat,
1606         head: &Expr,
1607         body: &Block,
1608         opt_label: Option<Label>,
1609     ) -> hir::Expr<'hir> {
1610         let orig_head_span = head.span;
1611         // expand <head>
1612         let mut head = self.lower_expr_mut(head);
1613         let desugared_span = self.mark_span_with_reason(
1614             DesugaringKind::ForLoop(ForLoopLoc::Head),
1615             orig_head_span,
1616             None,
1617         );
1618         head.span = desugared_span;
1619
1620         let iter = Ident::with_dummy_span(sym::iter);
1621
1622         let next_ident = Ident::with_dummy_span(sym::__next);
1623         let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1624             desugared_span,
1625             next_ident,
1626             hir::BindingAnnotation::Mutable,
1627         );
1628
1629         // `::std::option::Option::Some(val) => __next = val`
1630         let pat_arm = {
1631             let val_ident = Ident::with_dummy_span(sym::val);
1632             let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1633             let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1634             let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1635             let assign = self.arena.alloc(self.expr(
1636                 pat.span,
1637                 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1638                 ThinVec::new(),
1639             ));
1640             let some_pat = self.pat_some(pat.span, val_pat);
1641             self.arm(some_pat, assign)
1642         };
1643
1644         // `::std::option::Option::None => break`
1645         let break_arm = {
1646             let break_expr =
1647                 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1648             let pat = self.pat_none(e.span);
1649             self.arm(pat, break_expr)
1650         };
1651
1652         // `mut iter`
1653         let (iter_pat, iter_pat_nid) =
1654             self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1655
1656         // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1657         let match_expr = {
1658             let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1659             let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1660             let next_expr = self.expr_call_lang_item_fn(
1661                 desugared_span,
1662                 hir::LangItem::IteratorNext,
1663                 arena_vec![self; ref_mut_iter],
1664             );
1665             let arms = arena_vec![self; pat_arm, break_arm];
1666
1667             self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1668         };
1669         let match_stmt = self.stmt_expr(desugared_span, match_expr);
1670
1671         let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1672
1673         // `let mut __next`
1674         let next_let = self.stmt_let_pat(
1675             ThinVec::new(),
1676             desugared_span,
1677             None,
1678             next_pat,
1679             hir::LocalSource::ForLoopDesugar,
1680         );
1681
1682         // `let <pat> = __next`
1683         let pat = self.lower_pat(pat);
1684         let pat_let = self.stmt_let_pat(
1685             ThinVec::new(),
1686             desugared_span,
1687             Some(next_expr),
1688             pat,
1689             hir::LocalSource::ForLoopDesugar,
1690         );
1691
1692         let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1693         let body_expr = self.expr_block(body_block, ThinVec::new());
1694         let body_stmt = self.stmt_expr(body.span, body_expr);
1695
1696         let loop_block = self.block_all(
1697             e.span,
1698             arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1699             None,
1700         );
1701
1702         // `[opt_ident]: loop { ... }`
1703         let kind = hir::ExprKind::Loop(loop_block, opt_label, hir::LoopSource::ForLoop);
1704         let loop_expr = self.arena.alloc(hir::Expr {
1705             hir_id: self.lower_node_id(e.id),
1706             kind,
1707             span: e.span,
1708             attrs: ThinVec::new(),
1709         });
1710
1711         // `mut iter => { ... }`
1712         let iter_arm = self.arm(iter_pat, loop_expr);
1713
1714         let into_iter_span = self.mark_span_with_reason(
1715             DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1716             orig_head_span,
1717             None,
1718         );
1719
1720         // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1721         let into_iter_expr = {
1722             self.expr_call_lang_item_fn(
1723                 into_iter_span,
1724                 hir::LangItem::IntoIterIntoIter,
1725                 arena_vec![self; head],
1726             )
1727         };
1728
1729         let match_expr = self.arena.alloc(self.expr_match(
1730             desugared_span,
1731             into_iter_expr,
1732             arena_vec![self; iter_arm],
1733             hir::MatchSource::ForLoopDesugar,
1734         ));
1735
1736         let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1737
1738         // This is effectively `{ let _result = ...; _result }`.
1739         // The construct was introduced in #21984 and is necessary to make sure that
1740         // temporaries in the `head` expression are dropped and do not leak to the
1741         // surrounding scope of the `match` since the `match` is not a terminating scope.
1742         //
1743         // Also, add the attributes to the outer returned expr node.
1744         self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
1745     }
1746
1747     /// Desugar `ExprKind::Try` from: `<expr>?` into:
1748     /// ```rust
1749     /// match Try::into_result(<expr>) {
1750     ///     Ok(val) => #[allow(unreachable_code)] val,
1751     ///     Err(err) => #[allow(unreachable_code)]
1752     ///                 // If there is an enclosing `try {...}`:
1753     ///                 break 'catch_target Try::from_error(From::from(err)),
1754     ///                 // Otherwise:
1755     ///                 return Try::from_error(From::from(err)),
1756     /// }
1757     /// ```
1758     fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1759         let unstable_span = self.mark_span_with_reason(
1760             DesugaringKind::QuestionMark,
1761             span,
1762             self.allow_try_trait.clone(),
1763         );
1764         let try_span = self.sess.source_map().end_point(span);
1765         let try_span = self.mark_span_with_reason(
1766             DesugaringKind::QuestionMark,
1767             try_span,
1768             self.allow_try_trait.clone(),
1769         );
1770
1771         // `Try::into_result(<expr>)`
1772         let scrutinee = {
1773             // expand <expr>
1774             let sub_expr = self.lower_expr_mut(sub_expr);
1775
1776             self.expr_call_lang_item_fn(
1777                 unstable_span,
1778                 hir::LangItem::TryIntoResult,
1779                 arena_vec![self; sub_expr],
1780             )
1781         };
1782
1783         // `#[allow(unreachable_code)]`
1784         let attr = {
1785             // `allow(unreachable_code)`
1786             let allow = {
1787                 let allow_ident = Ident::new(sym::allow, span);
1788                 let uc_ident = Ident::new(sym::unreachable_code, span);
1789                 let uc_nested = attr::mk_nested_word_item(uc_ident);
1790                 attr::mk_list_item(allow_ident, vec![uc_nested])
1791             };
1792             attr::mk_attr_outer(allow)
1793         };
1794         let attrs = vec![attr];
1795
1796         // `Ok(val) => #[allow(unreachable_code)] val,`
1797         let ok_arm = {
1798             let val_ident = Ident::with_dummy_span(sym::val);
1799             let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1800             let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1801                 span,
1802                 val_ident,
1803                 val_pat_nid,
1804                 ThinVec::from(attrs.clone()),
1805             ));
1806             let ok_pat = self.pat_ok(span, val_pat);
1807             self.arm(ok_pat, val_expr)
1808         };
1809
1810         // `Err(err) => #[allow(unreachable_code)]
1811         //              return Try::from_error(From::from(err)),`
1812         let err_arm = {
1813             let err_ident = Ident::with_dummy_span(sym::err);
1814             let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1815             let from_expr = {
1816                 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1817                 self.expr_call_lang_item_fn(
1818                     try_span,
1819                     hir::LangItem::FromFrom,
1820                     arena_vec![self; err_expr],
1821                 )
1822             };
1823             let from_err_expr = self.wrap_in_try_constructor(
1824                 hir::LangItem::TryFromError,
1825                 unstable_span,
1826                 from_expr,
1827                 unstable_span,
1828             );
1829             let thin_attrs = ThinVec::from(attrs);
1830             let catch_scope = self.catch_scopes.last().copied();
1831             let ret_expr = if let Some(catch_node) = catch_scope {
1832                 let target_id = Ok(self.lower_node_id(catch_node));
1833                 self.arena.alloc(self.expr(
1834                     try_span,
1835                     hir::ExprKind::Break(
1836                         hir::Destination { label: None, target_id },
1837                         Some(from_err_expr),
1838                     ),
1839                     thin_attrs,
1840                 ))
1841             } else {
1842                 self.arena.alloc(self.expr(
1843                     try_span,
1844                     hir::ExprKind::Ret(Some(from_err_expr)),
1845                     thin_attrs,
1846                 ))
1847             };
1848
1849             let err_pat = self.pat_err(try_span, err_local);
1850             self.arm(err_pat, ret_expr)
1851         };
1852
1853         hir::ExprKind::Match(
1854             scrutinee,
1855             arena_vec![self; err_arm, ok_arm],
1856             hir::MatchSource::TryDesugar,
1857         )
1858     }
1859
1860     // =========================================================================
1861     // Helper methods for building HIR.
1862     // =========================================================================
1863
1864     /// Constructs a `true` or `false` literal expression.
1865     pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1866         let lit = Spanned { span, node: LitKind::Bool(val) };
1867         self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1868     }
1869
1870     /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1871     ///
1872     /// In terms of drop order, it has the same effect as wrapping `expr` in
1873     /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1874     ///
1875     /// The drop order can be important in e.g. `if expr { .. }`.
1876     pub(super) fn expr_drop_temps(
1877         &mut self,
1878         span: Span,
1879         expr: &'hir hir::Expr<'hir>,
1880         attrs: AttrVec,
1881     ) -> &'hir hir::Expr<'hir> {
1882         self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1883     }
1884
1885     pub(super) fn expr_drop_temps_mut(
1886         &mut self,
1887         span: Span,
1888         expr: &'hir hir::Expr<'hir>,
1889         attrs: AttrVec,
1890     ) -> hir::Expr<'hir> {
1891         self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1892     }
1893
1894     fn expr_match(
1895         &mut self,
1896         span: Span,
1897         arg: &'hir hir::Expr<'hir>,
1898         arms: &'hir [hir::Arm<'hir>],
1899         source: hir::MatchSource,
1900     ) -> hir::Expr<'hir> {
1901         self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1902     }
1903
1904     fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1905         let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1906         self.arena.alloc(self.expr(span, expr_break, attrs))
1907     }
1908
1909     fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1910         self.expr(
1911             span,
1912             hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1913             ThinVec::new(),
1914         )
1915     }
1916
1917     fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1918         self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1919     }
1920
1921     fn expr_call_mut(
1922         &mut self,
1923         span: Span,
1924         e: &'hir hir::Expr<'hir>,
1925         args: &'hir [hir::Expr<'hir>],
1926     ) -> hir::Expr<'hir> {
1927         self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1928     }
1929
1930     fn expr_call(
1931         &mut self,
1932         span: Span,
1933         e: &'hir hir::Expr<'hir>,
1934         args: &'hir [hir::Expr<'hir>],
1935     ) -> &'hir hir::Expr<'hir> {
1936         self.arena.alloc(self.expr_call_mut(span, e, args))
1937     }
1938
1939     fn expr_call_lang_item_fn_mut(
1940         &mut self,
1941         span: Span,
1942         lang_item: hir::LangItem,
1943         args: &'hir [hir::Expr<'hir>],
1944     ) -> hir::Expr<'hir> {
1945         let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
1946         self.expr_call_mut(span, path, args)
1947     }
1948
1949     fn expr_call_lang_item_fn(
1950         &mut self,
1951         span: Span,
1952         lang_item: hir::LangItem,
1953         args: &'hir [hir::Expr<'hir>],
1954     ) -> &'hir hir::Expr<'hir> {
1955         self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
1956     }
1957
1958     fn expr_lang_item_path(
1959         &mut self,
1960         span: Span,
1961         lang_item: hir::LangItem,
1962         attrs: AttrVec,
1963     ) -> hir::Expr<'hir> {
1964         self.expr(span, hir::ExprKind::Path(hir::QPath::LangItem(lang_item, span)), attrs)
1965     }
1966
1967     pub(super) fn expr_ident(
1968         &mut self,
1969         sp: Span,
1970         ident: Ident,
1971         binding: hir::HirId,
1972     ) -> &'hir hir::Expr<'hir> {
1973         self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1974     }
1975
1976     pub(super) fn expr_ident_mut(
1977         &mut self,
1978         sp: Span,
1979         ident: Ident,
1980         binding: hir::HirId,
1981     ) -> hir::Expr<'hir> {
1982         self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1983     }
1984
1985     fn expr_ident_with_attrs(
1986         &mut self,
1987         span: Span,
1988         ident: Ident,
1989         binding: hir::HirId,
1990         attrs: AttrVec,
1991     ) -> hir::Expr<'hir> {
1992         let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1993             None,
1994             self.arena.alloc(hir::Path {
1995                 span,
1996                 res: Res::Local(binding),
1997                 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
1998             }),
1999         ));
2000
2001         self.expr(span, expr_path, attrs)
2002     }
2003
2004     fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2005         let hir_id = self.next_id();
2006         let span = expr.span;
2007         self.expr(
2008             span,
2009             hir::ExprKind::Block(
2010                 self.arena.alloc(hir::Block {
2011                     stmts: &[],
2012                     expr: Some(expr),
2013                     hir_id,
2014                     rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
2015                     span,
2016                     targeted_by_break: false,
2017                 }),
2018                 None,
2019             ),
2020             ThinVec::new(),
2021         )
2022     }
2023
2024     fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2025         let blk = self.block_all(span, &[], None);
2026         let expr = self.expr_block(blk, ThinVec::new());
2027         self.arena.alloc(expr)
2028     }
2029
2030     pub(super) fn expr_block(
2031         &mut self,
2032         b: &'hir hir::Block<'hir>,
2033         attrs: AttrVec,
2034     ) -> hir::Expr<'hir> {
2035         self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
2036     }
2037
2038     pub(super) fn expr(
2039         &mut self,
2040         span: Span,
2041         kind: hir::ExprKind<'hir>,
2042         attrs: AttrVec,
2043     ) -> hir::Expr<'hir> {
2044         hir::Expr { hir_id: self.next_id(), kind, span, attrs }
2045     }
2046
2047     fn field(&mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span) -> hir::Field<'hir> {
2048         hir::Field { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
2049     }
2050
2051     fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
2052         hir::Arm {
2053             hir_id: self.next_id(),
2054             attrs: &[],
2055             pat,
2056             guard: None,
2057             span: expr.span,
2058             body: expr,
2059         }
2060     }
2061 }