1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
4 use rustc_ast::ptr::P as AstP;
6 use rustc_data_structures::stack::ensure_sufficient_stack;
7 use rustc_data_structures::thin_vec::ThinVec;
8 use rustc_errors::struct_span_err;
10 use rustc_hir::def::Res;
11 use rustc_hir::definitions::DefPathData;
12 use rustc_session::parse::feature_err;
13 use rustc_span::hygiene::ExpnId;
14 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
15 use rustc_span::symbol::{sym, Ident, Symbol};
16 use rustc_span::{hygiene::ForLoopLoc, DUMMY_SP};
18 impl<'hir> LoweringContext<'_, 'hir> {
19 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
20 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
23 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
24 self.arena.alloc(self.lower_expr_mut(e))
27 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
28 ensure_sufficient_stack(|| {
29 let kind = match e.kind {
30 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
31 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
32 ExprKind::ConstBlock(ref anon_const) => {
33 let anon_const = self.lower_anon_const(anon_const);
34 hir::ExprKind::ConstBlock(anon_const)
36 ExprKind::Repeat(ref expr, ref count) => {
37 let expr = self.lower_expr(expr);
38 let count = self.lower_anon_const(count);
39 hir::ExprKind::Repeat(expr, count)
41 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
42 ExprKind::Call(ref f, ref args) => {
43 if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
44 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
46 let f = self.lower_expr(f);
47 hir::ExprKind::Call(f, self.lower_exprs(args))
50 ExprKind::MethodCall(ref seg, ref args, span) => {
51 let hir_seg = self.arena.alloc(self.lower_path_segment(
56 ParenthesizedGenericArgs::Err,
57 ImplTraitContext::disallowed(),
59 let args = self.lower_exprs(args);
60 hir::ExprKind::MethodCall(
62 self.lower_span(seg.ident.span),
64 self.lower_span(span),
67 ExprKind::Binary(binop, ref lhs, ref rhs) => {
68 let binop = self.lower_binop(binop);
69 let lhs = self.lower_expr(lhs);
70 let rhs = self.lower_expr(rhs);
71 hir::ExprKind::Binary(binop, lhs, rhs)
73 ExprKind::Unary(op, ref ohs) => {
74 let op = self.lower_unop(op);
75 let ohs = self.lower_expr(ohs);
76 hir::ExprKind::Unary(op, ohs)
78 ExprKind::Lit(ref l) => {
79 hir::ExprKind::Lit(respan(self.lower_span(l.span), l.kind.clone()))
81 ExprKind::Cast(ref expr, ref ty) => {
82 let expr = self.lower_expr(expr);
83 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
84 hir::ExprKind::Cast(expr, ty)
86 ExprKind::Type(ref expr, ref ty) => {
87 let expr = self.lower_expr(expr);
88 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
89 hir::ExprKind::Type(expr, ty)
91 ExprKind::AddrOf(k, m, ref ohs) => {
92 let ohs = self.lower_expr(ohs);
93 hir::ExprKind::AddrOf(k, m, ohs)
95 ExprKind::Let(ref pat, ref scrutinee, span) => hir::ExprKind::Let(
97 self.lower_expr(scrutinee),
98 self.lower_span(span),
100 ExprKind::If(ref cond, ref then, ref else_opt) => {
101 self.lower_expr_if(cond, then, else_opt.as_deref())
103 ExprKind::While(ref cond, ref body, opt_label) => {
104 self.with_loop_scope(e.id, |this| {
106 this.mark_span_with_reason(DesugaringKind::WhileLoop, e.span, None);
107 this.lower_expr_while_in_loop_scope(span, cond, body, opt_label)
110 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
112 this.lower_block(body, false),
113 this.lower_label(opt_label),
114 hir::LoopSource::Loop,
118 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
119 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
120 self.lower_expr(expr),
121 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
122 hir::MatchSource::Normal,
124 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
130 hir::AsyncGeneratorKind::Block,
131 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
133 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
142 if let Async::Yes { closure_id, .. } = asyncness {
143 self.lower_expr_async_closure(
151 self.lower_expr_closure(
160 ExprKind::Block(ref blk, opt_label) => {
161 let opt_label = self.lower_label(opt_label);
162 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
164 ExprKind::Assign(ref el, ref er, span) => {
165 self.lower_expr_assign(el, er, span, e.span)
167 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
168 self.lower_binop(op),
172 ExprKind::Field(ref el, ident) => {
173 hir::ExprKind::Field(self.lower_expr(el), self.lower_ident(ident))
175 ExprKind::Index(ref el, ref er) => {
176 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
178 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
179 self.lower_expr_range_closed(e.span, e1, e2)
181 ExprKind::Range(ref e1, ref e2, lims) => {
182 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
184 ExprKind::Underscore => {
188 "in expressions, `_` can only be used on the left-hand side of an assignment",
190 .span_label(e.span, "`_` not allowed here")
194 ExprKind::Path(ref qself, ref path) => {
195 let qpath = self.lower_qpath(
200 ImplTraitContext::disallowed(),
202 hir::ExprKind::Path(qpath)
204 ExprKind::Break(opt_label, ref opt_expr) => {
205 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
206 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
208 ExprKind::Continue(opt_label) => {
209 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
211 ExprKind::Ret(ref e) => {
212 let e = e.as_ref().map(|x| self.lower_expr(x));
213 hir::ExprKind::Ret(e)
215 ExprKind::InlineAsm(ref asm) => {
216 hir::ExprKind::InlineAsm(self.lower_inline_asm(e.span, asm))
218 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
219 ExprKind::Struct(ref se) => {
220 let rest = match &se.rest {
221 StructRest::Base(e) => Some(self.lower_expr(e)),
222 StructRest::Rest(sp) => {
224 .struct_span_err(*sp, "base expression required after `..`")
225 .span_label(*sp, "add a base expression here")
227 Some(&*self.arena.alloc(self.expr_err(*sp)))
229 StructRest::None => None,
231 hir::ExprKind::Struct(
232 self.arena.alloc(self.lower_qpath(
237 ImplTraitContext::disallowed(),
240 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
244 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
245 ExprKind::Err => hir::ExprKind::Err,
246 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
247 ExprKind::Paren(ref ex) => {
248 let mut ex = self.lower_expr_mut(ex);
249 // Include parens in span, but only if it is a super-span.
250 if e.span.contains(ex.span) {
251 ex.span = self.lower_span(e.span);
253 // Merge attributes into the inner expression.
254 if !e.attrs.is_empty() {
255 let old_attrs = self.attrs.get(&ex.hir_id).map(|la| *la).unwrap_or(&[]);
258 &*self.arena.alloc_from_iter(
261 .map(|a| self.lower_attr(a))
262 .chain(old_attrs.iter().cloned()),
269 // Desugar `ExprForLoop`
270 // from: `[opt_ident]: for <pat> in <head> <body>`
271 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
272 return self.lower_expr_for(e, pat, head, body, opt_label);
274 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
277 let hir_id = self.lower_node_id(e.id);
278 self.lower_attrs(hir_id, &e.attrs);
279 hir::Expr { hir_id, kind, span: self.lower_span(e.span) }
283 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
285 UnOp::Deref => hir::UnOp::Deref,
286 UnOp::Not => hir::UnOp::Not,
287 UnOp::Neg => hir::UnOp::Neg,
291 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
294 BinOpKind::Add => hir::BinOpKind::Add,
295 BinOpKind::Sub => hir::BinOpKind::Sub,
296 BinOpKind::Mul => hir::BinOpKind::Mul,
297 BinOpKind::Div => hir::BinOpKind::Div,
298 BinOpKind::Rem => hir::BinOpKind::Rem,
299 BinOpKind::And => hir::BinOpKind::And,
300 BinOpKind::Or => hir::BinOpKind::Or,
301 BinOpKind::BitXor => hir::BinOpKind::BitXor,
302 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
303 BinOpKind::BitOr => hir::BinOpKind::BitOr,
304 BinOpKind::Shl => hir::BinOpKind::Shl,
305 BinOpKind::Shr => hir::BinOpKind::Shr,
306 BinOpKind::Eq => hir::BinOpKind::Eq,
307 BinOpKind::Lt => hir::BinOpKind::Lt,
308 BinOpKind::Le => hir::BinOpKind::Le,
309 BinOpKind::Ne => hir::BinOpKind::Ne,
310 BinOpKind::Ge => hir::BinOpKind::Ge,
311 BinOpKind::Gt => hir::BinOpKind::Gt,
313 span: self.lower_span(b.span),
317 fn lower_legacy_const_generics(
320 args: Vec<AstP<Expr>>,
321 legacy_args_idx: &[usize],
322 ) -> hir::ExprKind<'hir> {
323 let path = match f.kind {
324 ExprKind::Path(None, ref mut path) => path,
328 // Split the arguments into const generics and normal arguments
329 let mut real_args = vec![];
330 let mut generic_args = vec![];
331 for (idx, arg) in args.into_iter().enumerate() {
332 if legacy_args_idx.contains(&idx) {
333 let parent_def_id = self.current_hir_id_owner;
334 let node_id = self.resolver.next_node_id();
336 // Add a definition for the in-band const def.
337 self.resolver.create_def(
340 DefPathData::AnonConst,
345 let anon_const = AnonConst { id: node_id, value: arg };
346 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
352 // Add generic args to the last element of the path.
353 let last_segment = path.segments.last_mut().unwrap();
354 assert!(last_segment.args.is_none());
355 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs {
360 // Now lower everything as normal.
361 let f = self.lower_expr(&f);
362 hir::ExprKind::Call(f, self.lower_exprs(&real_args))
369 else_opt: Option<&Expr>,
370 ) -> hir::ExprKind<'hir> {
371 let lowered_cond = self.lower_expr(cond);
372 let new_cond = self.manage_let_cond(lowered_cond);
373 let then_expr = self.lower_block_expr(then);
374 if let Some(rslt) = else_opt {
375 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), Some(self.lower_expr(rslt)))
377 hir::ExprKind::If(new_cond, self.arena.alloc(then_expr), None)
381 // If `cond` kind is `let`, returns `let`. Otherwise, wraps and returns `cond`
382 // in a temporary block.
383 fn manage_let_cond(&mut self, cond: &'hir hir::Expr<'hir>) -> &'hir hir::Expr<'hir> {
385 hir::ExprKind::Let(..) => cond,
388 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
389 self.expr_drop_temps(span_block, cond, AttrVec::new())
394 // We desugar: `'label: while $cond $body` into:
398 // if { let _t = $cond; _t } {
407 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
408 // to preserve drop semantics since `while $cond { ... }` does not
409 // let temporaries live outside of `cond`.
410 fn lower_expr_while_in_loop_scope(
415 opt_label: Option<Label>,
416 ) -> hir::ExprKind<'hir> {
417 let lowered_cond = self.with_loop_condition_scope(|t| t.lower_expr(cond));
418 let new_cond = self.manage_let_cond(lowered_cond);
419 let then = self.lower_block_expr(body);
420 let expr_break = self.expr_break(span, ThinVec::new());
421 let stmt_break = self.stmt_expr(span, expr_break);
422 let else_blk = self.block_all(span, arena_vec![self; stmt_break], None);
423 let else_expr = self.arena.alloc(self.expr_block(else_blk, ThinVec::new()));
424 let if_kind = hir::ExprKind::If(new_cond, self.arena.alloc(then), Some(else_expr));
425 let if_expr = self.expr(span, if_kind, ThinVec::new());
426 let block = self.block_expr(self.arena.alloc(if_expr));
427 let span = self.lower_span(span.with_hi(cond.span.hi()));
428 let opt_label = self.lower_label(opt_label);
429 hir::ExprKind::Loop(block, opt_label, hir::LoopSource::While, span)
432 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_output(<expr>) }`,
433 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_output(()) }`
434 /// and save the block id to use it as a break target for desugaring of the `?` operator.
435 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
436 self.with_catch_scope(body.id, |this| {
437 let mut block = this.lower_block_noalloc(body, true);
439 // Final expression of the block (if present) or `()` with span at the end of block
440 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
442 this.mark_span_with_reason(
443 DesugaringKind::TryBlock,
445 this.allow_try_trait.clone(),
450 let try_span = this.mark_span_with_reason(
451 DesugaringKind::TryBlock,
452 this.sess.source_map().end_point(body.span),
453 this.allow_try_trait.clone(),
456 (try_span, this.expr_unit(try_span))
459 let ok_wrapped_span =
460 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
462 // `::std::ops::Try::from_output($tail_expr)`
463 block.expr = Some(this.wrap_in_try_constructor(
464 hir::LangItem::TryTraitFromOutput,
470 hir::ExprKind::Block(this.arena.alloc(block), None)
474 fn wrap_in_try_constructor(
476 lang_item: hir::LangItem,
478 expr: &'hir hir::Expr<'hir>,
480 ) -> &'hir hir::Expr<'hir> {
482 self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
483 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
486 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
487 let pat = self.lower_pat(&arm.pat);
488 let guard = arm.guard.as_ref().map(|cond| {
489 if let ExprKind::Let(ref pat, ref scrutinee, _) = cond.kind {
490 hir::Guard::IfLet(self.lower_pat(pat), self.lower_expr(scrutinee))
492 hir::Guard::If(self.lower_expr(cond))
495 let hir_id = self.next_id();
496 self.lower_attrs(hir_id, &arm.attrs);
501 body: self.lower_expr(&arm.body),
502 span: self.lower_span(arm.span),
506 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
511 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
515 pub(super) fn make_async_expr(
517 capture_clause: CaptureBy,
518 closure_node_id: NodeId,
519 ret_ty: Option<AstP<Ty>>,
521 async_gen_kind: hir::AsyncGeneratorKind,
522 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
523 ) -> hir::ExprKind<'hir> {
524 let output = match ret_ty {
525 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
526 None => hir::FnRetTy::DefaultReturn(self.lower_span(span)),
529 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
530 // fully constrained by `future::from_generator`.
531 let input_ty = hir::Ty {
532 hir_id: self.next_id(),
533 kind: hir::TyKind::Infer,
534 span: self.lower_span(span),
537 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
538 let decl = self.arena.alloc(hir::FnDecl {
539 inputs: arena_vec![self; input_ty],
542 implicit_self: hir::ImplicitSelfKind::None,
545 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
546 let (pat, task_context_hid) = self.pat_ident_binding_mode(
548 Ident::with_dummy_span(sym::_task_context),
549 hir::BindingAnnotation::Mutable,
551 let param = hir::Param {
552 hir_id: self.next_id(),
554 ty_span: self.lower_span(span),
555 span: self.lower_span(span),
557 let params = arena_vec![self; param];
559 let body_id = self.lower_body(move |this| {
560 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
562 let old_ctx = this.task_context;
563 this.task_context = Some(task_context_hid);
564 let res = body(this);
565 this.task_context = old_ctx;
569 // `static |_task_context| -> <ret_ty> { body }`:
570 let generator_kind = hir::ExprKind::Closure(
574 self.lower_span(span),
575 Some(hir::Movability::Static),
577 let generator = hir::Expr {
578 hir_id: self.lower_node_id(closure_node_id),
579 kind: generator_kind,
580 span: self.lower_span(span),
583 // `future::from_generator`:
585 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
587 self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
589 // `future::from_generator(generator)`:
590 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
593 /// Desugar `<expr>.await` into:
596 /// mut pinned => loop {
597 /// match unsafe { ::std::future::Future::poll(
598 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
599 /// ::std::future::get_context(task_context),
601 /// ::std::task::Poll::Ready(result) => break result,
602 /// ::std::task::Poll::Pending => {}
604 /// task_context = yield ();
608 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
609 match self.generator_kind {
610 Some(hir::GeneratorKind::Async(_)) => {}
611 Some(hir::GeneratorKind::Gen) | None => {
612 let mut err = struct_span_err!(
616 "`await` is only allowed inside `async` functions and blocks"
618 err.span_label(await_span, "only allowed inside `async` functions and blocks");
619 if let Some(item_sp) = self.current_item {
620 err.span_label(item_sp, "this is not `async`");
625 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
626 let gen_future_span = self.mark_span_with_reason(
627 DesugaringKind::Await,
629 self.allow_gen_future.clone(),
631 let expr = self.lower_expr(expr);
633 let pinned_ident = Ident::with_dummy_span(sym::pinned);
634 let (pinned_pat, pinned_pat_hid) =
635 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
637 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
640 // ::std::future::Future::poll(
641 // ::std::pin::Pin::new_unchecked(&mut pinned),
642 // ::std::future::get_context(task_context),
646 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
647 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
648 let task_context = if let Some(task_context_hid) = self.task_context {
649 self.expr_ident_mut(span, task_context_ident, task_context_hid)
651 // Use of `await` outside of an async context, we cannot use `task_context` here.
654 let new_unchecked = self.expr_call_lang_item_fn_mut(
656 hir::LangItem::PinNewUnchecked,
657 arena_vec![self; ref_mut_pinned],
659 let get_context = self.expr_call_lang_item_fn_mut(
661 hir::LangItem::GetContext,
662 arena_vec![self; task_context],
664 let call = self.expr_call_lang_item_fn(
666 hir::LangItem::FuturePoll,
667 arena_vec![self; new_unchecked, get_context],
669 self.arena.alloc(self.expr_unsafe(call))
672 // `::std::task::Poll::Ready(result) => break result`
673 let loop_node_id = self.resolver.next_node_id();
674 let loop_hir_id = self.lower_node_id(loop_node_id);
676 let x_ident = Ident::with_dummy_span(sym::result);
677 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
678 let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
679 let ready_field = self.single_pat_field(span, x_pat);
680 let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
681 let break_x = self.with_loop_scope(loop_node_id, move |this| {
683 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
684 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
686 self.arm(ready_pat, break_x)
689 // `::std::task::Poll::Pending => {}`
691 let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
692 let empty_block = self.expr_block_empty(span);
693 self.arm(pending_pat, empty_block)
696 let inner_match_stmt = {
697 let match_expr = self.expr_match(
700 arena_vec![self; ready_arm, pending_arm],
701 hir::MatchSource::AwaitDesugar,
703 self.stmt_expr(span, match_expr)
706 // task_context = yield ();
708 let unit = self.expr_unit(span);
709 let yield_expr = self.expr(
711 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
714 let yield_expr = self.arena.alloc(yield_expr);
716 if let Some(task_context_hid) = self.task_context {
717 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
718 let assign = self.expr(
720 hir::ExprKind::Assign(lhs, yield_expr, self.lower_span(span)),
723 self.stmt_expr(span, assign)
725 // Use of `await` outside of an async context. Return `yield_expr` so that we can
726 // proceed with type checking.
727 self.stmt(span, hir::StmtKind::Semi(yield_expr))
731 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
734 let loop_expr = self.arena.alloc(hir::Expr {
736 kind: hir::ExprKind::Loop(
739 hir::LoopSource::Loop,
740 self.lower_span(span),
742 span: self.lower_span(span),
745 // mut pinned => loop { ... }
746 let pinned_arm = self.arm(pinned_pat, loop_expr);
749 // mut pinned => loop { .. }
751 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
754 fn lower_expr_closure(
756 capture_clause: CaptureBy,
757 movability: Movability,
761 ) -> hir::ExprKind<'hir> {
762 let (body_id, generator_option) = self.with_new_scopes(move |this| {
763 let prev = this.current_item;
764 this.current_item = Some(fn_decl_span);
765 let mut generator_kind = None;
766 let body_id = this.lower_fn_body(decl, |this| {
767 let e = this.lower_expr_mut(body);
768 generator_kind = this.generator_kind;
771 let generator_option =
772 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
773 this.current_item = prev;
774 (body_id, generator_option)
777 // Lower outside new scope to preserve `is_in_loop_condition`.
778 let fn_decl = self.lower_fn_decl(decl, None, false, None);
780 hir::ExprKind::Closure(
784 self.lower_span(fn_decl_span),
789 fn generator_movability_for_fn(
793 generator_kind: Option<hir::GeneratorKind>,
794 movability: Movability,
795 ) -> Option<hir::Movability> {
796 match generator_kind {
797 Some(hir::GeneratorKind::Gen) => {
798 if decl.inputs.len() > 1 {
803 "too many parameters for a generator (expected 0 or 1 parameters)"
809 Some(hir::GeneratorKind::Async(_)) => {
810 panic!("non-`async` closure body turned `async` during lowering");
813 if movability == Movability::Static {
814 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
822 fn lower_expr_async_closure(
824 capture_clause: CaptureBy,
829 ) -> hir::ExprKind<'hir> {
831 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
833 let body_id = self.with_new_scopes(|this| {
834 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
835 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
840 "`async` non-`move` closures with parameters are not currently supported",
843 "consider using `let` statements to manually capture \
844 variables by reference before entering an `async move` closure",
849 // Transform `async |x: u8| -> X { ... }` into
850 // `|x: u8| future_from_generator(|| -> X { ... })`.
851 let body_id = this.lower_fn_body(&outer_decl, |this| {
853 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
854 let async_body = this.make_async_expr(
859 hir::AsyncGeneratorKind::Closure,
860 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
862 this.expr(fn_decl_span, async_body, ThinVec::new())
867 // We need to lower the declaration outside the new scope, because we
868 // have to conserve the state of being inside a loop condition for the
869 // closure argument types.
870 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
872 hir::ExprKind::Closure(
876 self.lower_span(fn_decl_span),
881 /// Destructure the LHS of complex assignments.
882 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
883 fn lower_expr_assign(
889 ) -> hir::ExprKind<'hir> {
890 // Return early in case of an ordinary assignment.
891 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
894 | ExprKind::Struct(..)
896 | ExprKind::Underscore => false,
897 // Check for tuple struct constructor.
898 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
899 ExprKind::Paren(e) => {
901 // We special-case `(..)` for consistency with patterns.
902 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
903 _ => is_ordinary(lower_ctx, e),
909 if is_ordinary(self, lhs) {
910 return hir::ExprKind::Assign(
911 self.lower_expr(lhs),
912 self.lower_expr(rhs),
913 self.lower_span(eq_sign_span),
916 if !self.sess.features_untracked().destructuring_assignment {
918 &self.sess.parse_sess,
919 sym::destructuring_assignment,
921 "destructuring assignments are unstable",
923 .span_label(lhs.span, "cannot assign to this expression")
927 let mut assignments = vec![];
929 // The LHS becomes a pattern: `(lhs1, lhs2)`.
930 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
931 let rhs = self.lower_expr(rhs);
933 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
934 let destructure_let = self.stmt_let_pat(
939 hir::LocalSource::AssignDesugar(self.lower_span(eq_sign_span)),
942 // `a = lhs1; b = lhs2;`.
945 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
947 // Wrap everything in a block.
948 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
951 /// If the given expression is a path to a tuple struct, returns that path.
952 /// It is not a complete check, but just tries to reject most paths early
953 /// if they are not tuple structs.
954 /// Type checking will take care of the full validation later.
955 fn extract_tuple_struct_path<'a>(
958 ) -> Option<(&'a Option<QSelf>, &'a Path)> {
959 if let ExprKind::Path(qself, path) = &expr.kind {
960 // Does the path resolve to something disallowed in a tuple struct/variant pattern?
961 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
962 if partial_res.unresolved_segments() == 0
963 && !partial_res.base_res().expected_in_tuple_struct_pat()
968 return Some((qself, path));
973 /// Convert the LHS of a destructuring assignment to a pattern.
974 /// Each sub-assignment is recorded in `assignments`.
975 fn destructure_assign(
979 assignments: &mut Vec<hir::Stmt<'hir>>,
980 ) -> &'hir hir::Pat<'hir> {
981 self.arena.alloc(self.destructure_assign_mut(lhs, eq_sign_span, assignments))
984 fn destructure_assign_mut(
988 assignments: &mut Vec<hir::Stmt<'hir>>,
989 ) -> hir::Pat<'hir> {
991 // Underscore pattern.
992 ExprKind::Underscore => {
993 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
996 ExprKind::Array(elements) => {
998 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
999 let slice_pat = if let Some((i, span)) = rest {
1000 let (before, after) = pats.split_at(i);
1001 hir::PatKind::Slice(
1003 Some(self.arena.alloc(self.pat_without_dbm(span, hir::PatKind::Wild))),
1007 hir::PatKind::Slice(pats, None, &[])
1009 return self.pat_without_dbm(lhs.span, slice_pat);
1012 ExprKind::Call(callee, args) => {
1013 if let Some((qself, path)) = self.extract_tuple_struct_path(callee) {
1014 let (pats, rest) = self.destructure_sequence(
1016 "tuple struct or variant",
1020 let qpath = self.lower_qpath(
1024 ParamMode::Optional,
1025 ImplTraitContext::disallowed(),
1027 // Destructure like a tuple struct.
1028 let tuple_struct_pat =
1029 hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
1030 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1034 ExprKind::Struct(se) => {
1035 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1036 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1038 hir_id: self.next_id(),
1039 ident: self.lower_ident(f.ident),
1041 is_shorthand: f.is_shorthand,
1042 span: self.lower_span(f.span),
1045 let qpath = self.lower_qpath(
1049 ParamMode::Optional,
1050 ImplTraitContext::disallowed(),
1052 let fields_omitted = match &se.rest {
1053 StructRest::Base(e) => {
1057 "functional record updates are not allowed in destructuring \
1062 "consider removing the trailing pattern",
1064 rustc_errors::Applicability::MachineApplicable,
1069 StructRest::Rest(_) => true,
1070 StructRest::None => false,
1072 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1073 return self.pat_without_dbm(lhs.span, struct_pat);
1076 ExprKind::Tup(elements) => {
1078 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1079 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1080 return self.pat_without_dbm(lhs.span, tuple_pat);
1082 ExprKind::Paren(e) => {
1083 // We special-case `(..)` for consistency with patterns.
1084 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1085 let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1086 return self.pat_without_dbm(lhs.span, tuple_pat);
1088 return self.destructure_assign_mut(e, eq_sign_span, assignments);
1093 // Treat all other cases as normal lvalue.
1094 let ident = Ident::new(sym::lhs, self.lower_span(lhs.span));
1095 let (pat, binding) = self.pat_ident_mut(lhs.span, ident);
1096 let ident = self.expr_ident(lhs.span, ident, binding);
1098 hir::ExprKind::Assign(self.lower_expr(lhs), ident, self.lower_span(eq_sign_span));
1099 let expr = self.expr(lhs.span, assign, ThinVec::new());
1100 assignments.push(self.stmt_expr(lhs.span, expr));
1104 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1105 /// Such a sequence occurs in a tuple (struct)/slice.
1106 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1108 /// Each sub-assignment is recorded in `assignments`.
1109 fn destructure_sequence(
1111 elements: &[AstP<Expr>],
1114 assignments: &mut Vec<hir::Stmt<'hir>>,
1115 ) -> (&'hir [hir::Pat<'hir>], Option<(usize, Span)>) {
1116 let mut rest = None;
1118 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1119 // Check for `..` pattern.
1120 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1121 if let Some((_, prev_span)) = rest {
1122 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1124 rest = Some((i, e.span));
1128 Some(self.destructure_assign_mut(e, eq_sign_span, assignments))
1134 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1135 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1136 let e1 = self.lower_expr_mut(e1);
1137 let e2 = self.lower_expr_mut(e2);
1138 let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, self.lower_span(span));
1140 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1141 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1144 fn lower_expr_range(
1150 ) -> hir::ExprKind<'hir> {
1151 use rustc_ast::RangeLimits::*;
1153 let lang_item = match (e1, e2, lims) {
1154 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1155 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1156 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1157 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1158 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1159 (Some(..), Some(..), Closed) => unreachable!(),
1160 (_, None, Closed) => self.diagnostic().span_fatal(span, "inclusive range with no end"),
1163 let fields = self.arena.alloc_from_iter(
1164 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
1165 let expr = self.lower_expr(&e);
1166 let ident = Ident::new(Symbol::intern(s), self.lower_span(e.span));
1167 self.expr_field(ident, expr, e.span)
1171 hir::ExprKind::Struct(
1172 self.arena.alloc(hir::QPath::LangItem(lang_item, self.lower_span(span))),
1178 fn lower_label(&self, opt_label: Option<Label>) -> Option<Label> {
1179 let label = opt_label?;
1180 Some(Label { ident: self.lower_ident(label.ident) })
1183 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1184 let target_id = match destination {
1186 if let Some(loop_id) = self.resolver.get_label_res(id) {
1187 Ok(self.lower_node_id(loop_id))
1189 Err(hir::LoopIdError::UnresolvedLabel)
1194 .map(|id| Ok(self.lower_node_id(id)))
1195 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1197 let label = self.lower_label(destination.map(|(_, label)| label));
1198 hir::Destination { label, target_id }
1201 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1202 if self.is_in_loop_condition && opt_label.is_none() {
1205 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1208 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1212 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1213 let old_scope = self.catch_scope.replace(catch_id);
1214 let result = f(self);
1215 self.catch_scope = old_scope;
1219 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1220 // We're no longer in the base loop's condition; we're in another loop.
1221 let was_in_loop_condition = self.is_in_loop_condition;
1222 self.is_in_loop_condition = false;
1224 let old_scope = self.loop_scope.replace(loop_id);
1225 let result = f(self);
1226 self.loop_scope = old_scope;
1228 self.is_in_loop_condition = was_in_loop_condition;
1233 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1234 let was_in_loop_condition = self.is_in_loop_condition;
1235 self.is_in_loop_condition = true;
1237 let result = f(self);
1239 self.is_in_loop_condition = was_in_loop_condition;
1244 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1245 let inner = hir::LlvmInlineAsmInner {
1246 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1250 .map(|out| hir::LlvmInlineAsmOutput {
1251 constraint: out.constraint,
1253 is_indirect: out.is_indirect,
1254 span: self.lower_span(out.expr.span),
1258 asm_str_style: asm.asm_str_style,
1259 clobbers: asm.clobbers.clone(),
1260 volatile: asm.volatile,
1261 alignstack: asm.alignstack,
1262 dialect: asm.dialect,
1264 let hir_asm = hir::LlvmInlineAsm {
1266 inputs_exprs: self.arena.alloc_from_iter(
1267 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1271 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1273 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1276 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1278 hir_id: self.next_id(),
1279 ident: self.lower_ident(f.ident),
1280 expr: self.lower_expr(&f.expr),
1281 span: self.lower_span(f.span),
1282 is_shorthand: f.is_shorthand,
1286 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1287 match self.generator_kind {
1288 Some(hir::GeneratorKind::Gen) => {}
1289 Some(hir::GeneratorKind::Async(_)) => {
1294 "`async` generators are not yet supported"
1298 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1302 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1304 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1307 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1310 /// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1312 /// [opt_ident]: loop {
1314 /// match ::std::iter::Iterator::next(&mut iter) {
1315 /// ::std::option::Option::Some(val) => __next = val,
1316 /// ::std::option::Option::None => break
1318 /// let <pat> = __next;
1319 /// StmtKind::Expr(<body>);
1332 opt_label: Option<Label>,
1333 ) -> hir::Expr<'hir> {
1334 let orig_head_span = head.span;
1336 let mut head = self.lower_expr_mut(head);
1337 let desugared_span = self.mark_span_with_reason(
1338 DesugaringKind::ForLoop(ForLoopLoc::Head),
1342 head.span = self.lower_span(desugared_span);
1344 let iter = Ident::with_dummy_span(sym::iter);
1346 let next_ident = Ident::with_dummy_span(sym::__next);
1347 let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1350 hir::BindingAnnotation::Mutable,
1353 // `::std::option::Option::Some(val) => __next = val`
1355 let val_ident = Ident::with_dummy_span(sym::val);
1356 let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1357 let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1358 let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1359 let assign = self.arena.alloc(self.expr(
1361 hir::ExprKind::Assign(next_expr, val_expr, self.lower_span(pat.span)),
1364 let some_pat = self.pat_some(pat.span, val_pat);
1365 self.arm(some_pat, assign)
1368 // `::std::option::Option::None => break`
1371 self.with_loop_scope(e.id, |this| this.expr_break_alloc(e.span, ThinVec::new()));
1372 let pat = self.pat_none(e.span);
1373 self.arm(pat, break_expr)
1377 let (iter_pat, iter_pat_nid) =
1378 self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1380 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1382 let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1383 let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1384 let next_expr = self.expr_call_lang_item_fn(
1386 hir::LangItem::IteratorNext,
1387 arena_vec![self; ref_mut_iter],
1389 let arms = arena_vec![self; pat_arm, break_arm];
1391 self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1393 let match_stmt = self.stmt_expr(desugared_span, match_expr);
1395 let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1398 let next_let = self.stmt_let_pat(
1403 hir::LocalSource::ForLoopDesugar,
1406 // `let <pat> = __next`
1407 let pat = self.lower_pat(pat);
1408 let pat_let = self.stmt_let_pat(
1413 hir::LocalSource::ForLoopDesugar,
1416 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1417 let body_expr = self.expr_block(body_block, ThinVec::new());
1418 let body_stmt = self.stmt_expr(body.span, body_expr);
1420 let loop_block = self.block_all(
1422 arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1426 // `[opt_ident]: loop { ... }`
1427 let kind = hir::ExprKind::Loop(
1429 self.lower_label(opt_label),
1430 hir::LoopSource::ForLoop,
1431 self.lower_span(e.span.with_hi(orig_head_span.hi())),
1433 let loop_expr = self.arena.alloc(hir::Expr {
1434 hir_id: self.lower_node_id(e.id),
1436 span: self.lower_span(e.span),
1439 // `mut iter => { ... }`
1440 let iter_arm = self.arm(iter_pat, loop_expr);
1442 let into_iter_span = self.mark_span_with_reason(
1443 DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1448 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1449 let into_iter_expr = {
1450 self.expr_call_lang_item_fn(
1452 hir::LangItem::IntoIterIntoIter,
1453 arena_vec![self; head],
1457 // #82462: to correctly diagnose borrow errors, the block that contains
1458 // the iter expr needs to have a span that covers the loop body.
1459 let desugared_full_span =
1460 self.mark_span_with_reason(DesugaringKind::ForLoop(ForLoopLoc::Head), e.span, None);
1462 let match_expr = self.arena.alloc(self.expr_match(
1463 desugared_full_span,
1465 arena_vec![self; iter_arm],
1466 hir::MatchSource::ForLoopDesugar,
1469 let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1471 // This is effectively `{ let _result = ...; _result }`.
1472 // The construct was introduced in #21984 and is necessary to make sure that
1473 // temporaries in the `head` expression are dropped and do not leak to the
1474 // surrounding scope of the `match` since the `match` is not a terminating scope.
1476 // Also, add the attributes to the outer returned expr node.
1477 self.expr_drop_temps_mut(desugared_full_span, match_expr, attrs.into())
1480 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1482 /// match Try::branch(<expr>) {
1483 /// ControlFlow::Continue(val) => #[allow(unreachable_code)] val,,
1484 /// ControlFlow::Break(residual) =>
1485 /// #[allow(unreachable_code)]
1486 /// // If there is an enclosing `try {...}`:
1487 /// break 'catch_target Try::from_residual(residual),
1489 /// return Try::from_residual(residual),
1492 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1493 let unstable_span = self.mark_span_with_reason(
1494 DesugaringKind::QuestionMark,
1496 self.allow_try_trait.clone(),
1498 let try_span = self.sess.source_map().end_point(span);
1499 let try_span = self.mark_span_with_reason(
1500 DesugaringKind::QuestionMark,
1502 self.allow_try_trait.clone(),
1505 // `Try::branch(<expr>)`
1508 let sub_expr = self.lower_expr_mut(sub_expr);
1510 self.expr_call_lang_item_fn(
1512 hir::LangItem::TryTraitBranch,
1513 arena_vec![self; sub_expr],
1517 // `#[allow(unreachable_code)]`
1519 // `allow(unreachable_code)`
1521 let allow_ident = Ident::new(sym::allow, self.lower_span(span));
1522 let uc_ident = Ident::new(sym::unreachable_code, self.lower_span(span));
1523 let uc_nested = attr::mk_nested_word_item(uc_ident);
1524 attr::mk_list_item(allow_ident, vec![uc_nested])
1526 attr::mk_attr_outer(allow)
1528 let attrs = vec![attr];
1530 // `ControlFlow::Continue(val) => #[allow(unreachable_code)] val,`
1531 let continue_arm = {
1532 let val_ident = Ident::with_dummy_span(sym::val);
1533 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1534 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1538 ThinVec::from(attrs.clone()),
1540 let continue_pat = self.pat_cf_continue(unstable_span, val_pat);
1541 self.arm(continue_pat, val_expr)
1544 // `ControlFlow::Break(residual) =>
1545 // #[allow(unreachable_code)]
1546 // return Try::from_residual(residual),`
1548 let residual_ident = Ident::with_dummy_span(sym::residual);
1549 let (residual_local, residual_local_nid) = self.pat_ident(try_span, residual_ident);
1550 let residual_expr = self.expr_ident_mut(try_span, residual_ident, residual_local_nid);
1551 let from_residual_expr = self.wrap_in_try_constructor(
1552 hir::LangItem::TryTraitFromResidual,
1554 self.arena.alloc(residual_expr),
1557 let thin_attrs = ThinVec::from(attrs);
1558 let ret_expr = if let Some(catch_node) = self.catch_scope {
1559 let target_id = Ok(self.lower_node_id(catch_node));
1560 self.arena.alloc(self.expr(
1562 hir::ExprKind::Break(
1563 hir::Destination { label: None, target_id },
1564 Some(from_residual_expr),
1569 self.arena.alloc(self.expr(
1571 hir::ExprKind::Ret(Some(from_residual_expr)),
1576 let break_pat = self.pat_cf_break(try_span, residual_local);
1577 self.arm(break_pat, ret_expr)
1580 hir::ExprKind::Match(
1582 arena_vec![self; break_arm, continue_arm],
1583 hir::MatchSource::TryDesugar,
1587 // =========================================================================
1588 // Helper methods for building HIR.
1589 // =========================================================================
1591 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1593 /// In terms of drop order, it has the same effect as wrapping `expr` in
1594 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1596 /// The drop order can be important in e.g. `if expr { .. }`.
1597 pub(super) fn expr_drop_temps(
1600 expr: &'hir hir::Expr<'hir>,
1602 ) -> &'hir hir::Expr<'hir> {
1603 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1606 pub(super) fn expr_drop_temps_mut(
1609 expr: &'hir hir::Expr<'hir>,
1611 ) -> hir::Expr<'hir> {
1612 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1618 arg: &'hir hir::Expr<'hir>,
1619 arms: &'hir [hir::Arm<'hir>],
1620 source: hir::MatchSource,
1621 ) -> hir::Expr<'hir> {
1622 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1625 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> hir::Expr<'hir> {
1626 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1627 self.expr(span, expr_break, attrs)
1630 fn expr_break_alloc(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1631 let expr_break = self.expr_break(span, attrs);
1632 self.arena.alloc(expr_break)
1635 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1638 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1643 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1644 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1650 e: &'hir hir::Expr<'hir>,
1651 args: &'hir [hir::Expr<'hir>],
1652 ) -> hir::Expr<'hir> {
1653 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1659 e: &'hir hir::Expr<'hir>,
1660 args: &'hir [hir::Expr<'hir>],
1661 ) -> &'hir hir::Expr<'hir> {
1662 self.arena.alloc(self.expr_call_mut(span, e, args))
1665 fn expr_call_lang_item_fn_mut(
1668 lang_item: hir::LangItem,
1669 args: &'hir [hir::Expr<'hir>],
1670 ) -> hir::Expr<'hir> {
1671 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
1672 self.expr_call_mut(span, path, args)
1675 fn expr_call_lang_item_fn(
1678 lang_item: hir::LangItem,
1679 args: &'hir [hir::Expr<'hir>],
1680 ) -> &'hir hir::Expr<'hir> {
1681 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
1684 fn expr_lang_item_path(
1687 lang_item: hir::LangItem,
1689 ) -> hir::Expr<'hir> {
1692 hir::ExprKind::Path(hir::QPath::LangItem(lang_item, self.lower_span(span))),
1697 pub(super) fn expr_ident(
1701 binding: hir::HirId,
1702 ) -> &'hir hir::Expr<'hir> {
1703 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1706 pub(super) fn expr_ident_mut(
1710 binding: hir::HirId,
1711 ) -> hir::Expr<'hir> {
1712 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1715 fn expr_ident_with_attrs(
1719 binding: hir::HirId,
1721 ) -> hir::Expr<'hir> {
1722 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1724 self.arena.alloc(hir::Path {
1725 span: self.lower_span(span),
1726 res: Res::Local(binding),
1727 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
1731 self.expr(span, expr_path, attrs)
1734 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1735 let hir_id = self.next_id();
1736 let span = expr.span;
1739 hir::ExprKind::Block(
1740 self.arena.alloc(hir::Block {
1744 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1745 span: self.lower_span(span),
1746 targeted_by_break: false,
1754 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1755 let blk = self.block_all(span, &[], None);
1756 let expr = self.expr_block(blk, ThinVec::new());
1757 self.arena.alloc(expr)
1760 pub(super) fn expr_block(
1762 b: &'hir hir::Block<'hir>,
1764 ) -> hir::Expr<'hir> {
1765 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
1771 kind: hir::ExprKind<'hir>,
1773 ) -> hir::Expr<'hir> {
1774 let hir_id = self.next_id();
1775 self.lower_attrs(hir_id, &attrs);
1776 hir::Expr { hir_id, kind, span: self.lower_span(span) }
1782 expr: &'hir hir::Expr<'hir>,
1784 ) -> hir::ExprField<'hir> {
1786 hir_id: self.next_id(),
1788 span: self.lower_span(span),
1790 is_shorthand: false,
1794 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
1796 hir_id: self.next_id(),
1799 span: self.lower_span(expr.span),