1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
4 use rustc_ast::ptr::P as AstP;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
11 use rustc_hir::def::Res;
12 use rustc_session::parse::feature_err;
13 use rustc_span::hygiene::ForLoopLoc;
14 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
15 use rustc_span::symbol::{sym, Ident, Symbol};
16 use rustc_target::asm;
17 use std::collections::hash_map::Entry;
20 impl<'hir> LoweringContext<'_, 'hir> {
21 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
22 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
25 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
26 self.arena.alloc(self.lower_expr_mut(e))
29 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
30 ensure_sufficient_stack(|| {
31 let kind = match e.kind {
32 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
33 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
34 ExprKind::ConstBlock(ref anon_const) => {
35 let anon_const = self.lower_anon_const(anon_const);
36 hir::ExprKind::ConstBlock(anon_const)
38 ExprKind::Repeat(ref expr, ref count) => {
39 let expr = self.lower_expr(expr);
40 let count = self.lower_anon_const(count);
41 hir::ExprKind::Repeat(expr, count)
43 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
44 ExprKind::Call(ref f, ref args) => {
45 let f = self.lower_expr(f);
46 hir::ExprKind::Call(f, self.lower_exprs(args))
48 ExprKind::MethodCall(ref seg, ref args, span) => {
49 let hir_seg = self.arena.alloc(self.lower_path_segment(
54 ParenthesizedGenericArgs::Err,
55 ImplTraitContext::disallowed(),
58 let args = self.lower_exprs(args);
59 hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
61 ExprKind::Binary(binop, ref lhs, ref rhs) => {
62 let binop = self.lower_binop(binop);
63 let lhs = self.lower_expr(lhs);
64 let rhs = self.lower_expr(rhs);
65 hir::ExprKind::Binary(binop, lhs, rhs)
67 ExprKind::Unary(op, ref ohs) => {
68 let op = self.lower_unop(op);
69 let ohs = self.lower_expr(ohs);
70 hir::ExprKind::Unary(op, ohs)
72 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
73 ExprKind::Cast(ref expr, ref ty) => {
74 let expr = self.lower_expr(expr);
75 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
76 hir::ExprKind::Cast(expr, ty)
78 ExprKind::Type(ref expr, ref ty) => {
79 let expr = self.lower_expr(expr);
80 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
81 hir::ExprKind::Type(expr, ty)
83 ExprKind::AddrOf(k, m, ref ohs) => {
84 let ohs = self.lower_expr(ohs);
85 hir::ExprKind::AddrOf(k, m, ohs)
87 ExprKind::Let(ref pat, ref scrutinee) => {
88 self.lower_expr_let(e.span, pat, scrutinee)
90 ExprKind::If(ref cond, ref then, ref else_opt) => {
91 self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
93 ExprKind::While(ref cond, ref body, opt_label) => self
94 .with_loop_scope(e.id, |this| {
95 this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
97 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
99 this.lower_block(body, false),
101 hir::LoopSource::Loop,
104 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
105 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
106 self.lower_expr(expr),
107 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
108 hir::MatchSource::Normal,
110 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
116 hir::AsyncGeneratorKind::Block,
117 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
119 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
128 if let Async::Yes { closure_id, .. } = asyncness {
129 self.lower_expr_async_closure(
137 self.lower_expr_closure(
146 ExprKind::Block(ref blk, opt_label) => {
147 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
149 ExprKind::Assign(ref el, ref er, span) => {
150 self.lower_expr_assign(el, er, span, e.span)
152 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
153 self.lower_binop(op),
157 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
158 ExprKind::Index(ref el, ref er) => {
159 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
161 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
162 self.lower_expr_range_closed(e.span, e1, e2)
164 ExprKind::Range(ref e1, ref e2, lims) => {
165 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
167 ExprKind::Underscore => {
171 "in expressions, `_` can only be used on the left-hand side of an assignment",
173 .span_label(e.span, "`_` not allowed here")
177 ExprKind::Path(ref qself, ref path) => {
178 let qpath = self.lower_qpath(
183 ImplTraitContext::disallowed(),
185 hir::ExprKind::Path(qpath)
187 ExprKind::Break(opt_label, ref opt_expr) => {
188 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
189 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
191 ExprKind::Continue(opt_label) => {
192 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
194 ExprKind::Ret(ref e) => {
195 let e = e.as_ref().map(|x| self.lower_expr(x));
196 hir::ExprKind::Ret(e)
198 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
199 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
200 ExprKind::Struct(ref path, ref fields, ref rest) => {
201 let rest = match rest {
202 StructRest::Base(e) => Some(self.lower_expr(e)),
203 StructRest::Rest(sp) => {
205 .struct_span_err(*sp, "base expression required after `..`")
206 .span_label(*sp, "add a base expression here")
208 Some(&*self.arena.alloc(self.expr_err(*sp)))
210 StructRest::None => None,
212 hir::ExprKind::Struct(
213 self.arena.alloc(self.lower_qpath(
218 ImplTraitContext::disallowed(),
220 self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
224 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
225 ExprKind::Err => hir::ExprKind::Err,
226 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
227 ExprKind::Paren(ref ex) => {
228 let mut ex = self.lower_expr_mut(ex);
229 // Include parens in span, but only if it is a super-span.
230 if e.span.contains(ex.span) {
233 // Merge attributes into the inner expression.
234 let mut attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
235 attrs.extend::<Vec<_>>(ex.attrs.into());
236 ex.attrs = attrs.into();
240 // Desugar `ExprForLoop`
241 // from: `[opt_ident]: for <pat> in <head> <body>`
242 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
243 return self.lower_expr_for(e, pat, head, body, opt_label);
245 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
249 hir_id: self.lower_node_id(e.id),
252 attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
257 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
259 UnOp::Deref => hir::UnOp::UnDeref,
260 UnOp::Not => hir::UnOp::UnNot,
261 UnOp::Neg => hir::UnOp::UnNeg,
265 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
268 BinOpKind::Add => hir::BinOpKind::Add,
269 BinOpKind::Sub => hir::BinOpKind::Sub,
270 BinOpKind::Mul => hir::BinOpKind::Mul,
271 BinOpKind::Div => hir::BinOpKind::Div,
272 BinOpKind::Rem => hir::BinOpKind::Rem,
273 BinOpKind::And => hir::BinOpKind::And,
274 BinOpKind::Or => hir::BinOpKind::Or,
275 BinOpKind::BitXor => hir::BinOpKind::BitXor,
276 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
277 BinOpKind::BitOr => hir::BinOpKind::BitOr,
278 BinOpKind::Shl => hir::BinOpKind::Shl,
279 BinOpKind::Shr => hir::BinOpKind::Shr,
280 BinOpKind::Eq => hir::BinOpKind::Eq,
281 BinOpKind::Lt => hir::BinOpKind::Lt,
282 BinOpKind::Le => hir::BinOpKind::Le,
283 BinOpKind::Ne => hir::BinOpKind::Ne,
284 BinOpKind::Ge => hir::BinOpKind::Ge,
285 BinOpKind::Gt => hir::BinOpKind::Gt,
291 /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
293 /// match scrutinee { pats => true, _ => false }
295 fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
296 // If we got here, the `let` expression is not allowed.
298 if self.sess.opts.unstable_features.is_nightly_build() {
300 .struct_span_err(span, "`let` expressions are not supported here")
301 .note("only supported directly in conditions of `if`- and `while`-expressions")
302 .note("as well as when nested within `&&` and parenthesis in those conditions")
306 .struct_span_err(span, "expected expression, found statement (`let`)")
307 .note("variable declaration using `let` is a statement")
311 // For better recovery, we emit:
313 // match scrutinee { pat => true, _ => false }
315 // While this doesn't fully match the user's intent, it has key advantages:
316 // 1. We can avoid using `abort_if_errors`.
317 // 2. We can typeck both `pat` and `scrutinee`.
318 // 3. `pat` is allowed to be refutable.
319 // 4. The return type of the block is `bool` which seems like what the user wanted.
320 let scrutinee = self.lower_expr(scrutinee);
322 let pat = self.lower_pat(pat);
323 let expr = self.expr_bool(span, true);
327 let pat = self.pat_wild(span);
328 let expr = self.expr_bool(span, false);
331 hir::ExprKind::Match(
333 arena_vec![self; then_arm, else_arm],
334 hir::MatchSource::Normal,
343 else_opt: Option<&Expr>,
344 ) -> hir::ExprKind<'hir> {
345 // FIXME(#53667): handle lowering of && and parens.
347 // `_ => else_block` where `else_block` is `{}` if there's `None`:
348 let else_pat = self.pat_wild(span);
349 let (else_expr, contains_else_clause) = match else_opt {
350 None => (self.expr_block_empty(span), false),
351 Some(els) => (self.lower_expr(els), true),
353 let else_arm = self.arm(else_pat, else_expr);
355 // Handle then + scrutinee:
356 let (then_pat, scrutinee, desugar) = match cond.kind {
357 // `<pat> => <then>`:
358 ExprKind::Let(ref pat, ref scrutinee) => {
359 let scrutinee = self.lower_expr(scrutinee);
360 let pat = self.lower_pat(pat);
361 (pat, scrutinee, hir::MatchSource::IfLetDesugar { contains_else_clause })
366 let cond = self.lower_expr(cond);
368 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
369 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
370 // to preserve drop semantics since `if cond { ... }` does not
371 // let temporaries live outside of `cond`.
372 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
373 let pat = self.pat_bool(span, true);
374 (pat, cond, hir::MatchSource::IfDesugar { contains_else_clause })
377 let then_expr = self.lower_block_expr(then);
378 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
380 hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
383 fn lower_expr_while_in_loop_scope(
388 opt_label: Option<Label>,
389 ) -> hir::ExprKind<'hir> {
390 // FIXME(#53667): handle lowering of && and parens.
392 // Note that the block AND the condition are evaluated in the loop scope.
393 // This is done to allow `break` from inside the condition of the loop.
397 let else_pat = self.pat_wild(span);
398 let else_expr = self.expr_break(span, ThinVec::new());
399 self.arm(else_pat, else_expr)
402 // Handle then + scrutinee:
403 let (then_pat, scrutinee, desugar, source) = match cond.kind {
404 ExprKind::Let(ref pat, ref scrutinee) => {
407 // [opt_ident]: loop {
408 // match <sub_expr> {
413 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
414 let pat = self.lower_pat(pat);
415 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
418 // We desugar: `'label: while $cond $body` into:
422 // match drop-temps { $cond } {
430 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
432 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
433 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
434 // to preserve drop semantics since `while cond { ... }` does not
435 // let temporaries live outside of `cond`.
436 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
438 let pat = self.pat_bool(span, true);
439 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
442 let then_expr = self.lower_block_expr(body);
443 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
445 // `match <scrutinee> { ... }`
447 self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
449 // `[opt_ident]: loop { ... }`
450 hir::ExprKind::Loop(self.block_expr(self.arena.alloc(match_expr)), opt_label, source)
453 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
454 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
455 /// and save the block id to use it as a break target for desugaring of the `?` operator.
456 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
457 self.with_catch_scope(body.id, |this| {
458 let mut block = this.lower_block_noalloc(body, true);
460 // Final expression of the block (if present) or `()` with span at the end of block
461 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
463 this.mark_span_with_reason(
464 DesugaringKind::TryBlock,
466 this.allow_try_trait.clone(),
471 let try_span = this.mark_span_with_reason(
472 DesugaringKind::TryBlock,
473 this.sess.source_map().end_point(body.span),
474 this.allow_try_trait.clone(),
477 (try_span, this.expr_unit(try_span))
480 let ok_wrapped_span =
481 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
483 // `::std::ops::Try::from_ok($tail_expr)`
484 block.expr = Some(this.wrap_in_try_constructor(
485 hir::LangItem::TryFromOk,
491 hir::ExprKind::Block(this.arena.alloc(block), None)
495 fn wrap_in_try_constructor(
497 lang_item: hir::LangItem,
499 expr: &'hir hir::Expr<'hir>,
501 ) -> &'hir hir::Expr<'hir> {
503 self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
504 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
507 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
509 hir_id: self.next_id(),
510 attrs: self.lower_attrs(&arm.attrs),
511 pat: self.lower_pat(&arm.pat),
512 guard: match arm.guard {
513 Some(ref x) => Some(hir::Guard::If(self.lower_expr(x))),
516 body: self.lower_expr(&arm.body),
521 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
526 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
530 pub(super) fn make_async_expr(
532 capture_clause: CaptureBy,
533 closure_node_id: NodeId,
534 ret_ty: Option<AstP<Ty>>,
536 async_gen_kind: hir::AsyncGeneratorKind,
537 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
538 ) -> hir::ExprKind<'hir> {
539 let output = match ret_ty {
540 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
541 None => hir::FnRetTy::DefaultReturn(span),
544 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
545 // fully constrained by `future::from_generator`.
546 let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
548 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
549 let decl = self.arena.alloc(hir::FnDecl {
550 inputs: arena_vec![self; input_ty],
553 implicit_self: hir::ImplicitSelfKind::None,
556 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
557 let (pat, task_context_hid) = self.pat_ident_binding_mode(
559 Ident::with_dummy_span(sym::_task_context),
560 hir::BindingAnnotation::Mutable,
562 let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, ty_span: span, span };
563 let params = arena_vec![self; param];
565 let body_id = self.lower_body(move |this| {
566 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
568 let old_ctx = this.task_context;
569 this.task_context = Some(task_context_hid);
570 let res = body(this);
571 this.task_context = old_ctx;
575 // `static |_task_context| -> <ret_ty> { body }`:
576 let generator_kind = hir::ExprKind::Closure(
581 Some(hir::Movability::Static),
583 let generator = hir::Expr {
584 hir_id: self.lower_node_id(closure_node_id),
585 kind: generator_kind,
587 attrs: ThinVec::new(),
590 // `future::from_generator`:
592 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
594 self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
596 // `future::from_generator(generator)`:
597 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
600 /// Desugar `<expr>.await` into:
603 /// mut pinned => loop {
604 /// match unsafe { ::std::future::Future::poll(
605 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
606 /// ::std::future::get_context(task_context),
608 /// ::std::task::Poll::Ready(result) => break result,
609 /// ::std::task::Poll::Pending => {}
611 /// task_context = yield ();
615 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
616 match self.generator_kind {
617 Some(hir::GeneratorKind::Async(_)) => {}
618 Some(hir::GeneratorKind::Gen) | None => {
619 let mut err = struct_span_err!(
623 "`await` is only allowed inside `async` functions and blocks"
625 err.span_label(await_span, "only allowed inside `async` functions and blocks");
626 if let Some(item_sp) = self.current_item {
627 err.span_label(item_sp, "this is not `async`");
632 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
633 let gen_future_span = self.mark_span_with_reason(
634 DesugaringKind::Await,
636 self.allow_gen_future.clone(),
638 let expr = self.lower_expr(expr);
640 let pinned_ident = Ident::with_dummy_span(sym::pinned);
641 let (pinned_pat, pinned_pat_hid) =
642 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
644 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
647 // ::std::future::Future::poll(
648 // ::std::pin::Pin::new_unchecked(&mut pinned),
649 // ::std::future::get_context(task_context),
653 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
654 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
655 let task_context = if let Some(task_context_hid) = self.task_context {
656 self.expr_ident_mut(span, task_context_ident, task_context_hid)
658 // Use of `await` outside of an async context, we cannot use `task_context` here.
661 let new_unchecked = self.expr_call_lang_item_fn_mut(
663 hir::LangItem::PinNewUnchecked,
664 arena_vec![self; ref_mut_pinned],
666 let get_context = self.expr_call_lang_item_fn_mut(
668 hir::LangItem::GetContext,
669 arena_vec![self; task_context],
671 let call = self.expr_call_lang_item_fn(
673 hir::LangItem::FuturePoll,
674 arena_vec![self; new_unchecked, get_context],
676 self.arena.alloc(self.expr_unsafe(call))
679 // `::std::task::Poll::Ready(result) => break result`
680 let loop_node_id = self.resolver.next_node_id();
681 let loop_hir_id = self.lower_node_id(loop_node_id);
683 let x_ident = Ident::with_dummy_span(sym::result);
684 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
685 let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
686 let ready_field = self.single_pat_field(span, x_pat);
687 let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
688 let break_x = self.with_loop_scope(loop_node_id, move |this| {
690 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
691 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
693 self.arm(ready_pat, break_x)
696 // `::std::task::Poll::Pending => {}`
698 let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
699 let empty_block = self.expr_block_empty(span);
700 self.arm(pending_pat, empty_block)
703 let inner_match_stmt = {
704 let match_expr = self.expr_match(
707 arena_vec![self; ready_arm, pending_arm],
708 hir::MatchSource::AwaitDesugar,
710 self.stmt_expr(span, match_expr)
713 // task_context = yield ();
715 let unit = self.expr_unit(span);
716 let yield_expr = self.expr(
718 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
721 let yield_expr = self.arena.alloc(yield_expr);
723 if let Some(task_context_hid) = self.task_context {
724 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
726 self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
727 self.stmt_expr(span, assign)
729 // Use of `await` outside of an async context. Return `yield_expr` so that we can
730 // proceed with type checking.
731 self.stmt(span, hir::StmtKind::Semi(yield_expr))
735 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
738 let loop_expr = self.arena.alloc(hir::Expr {
740 kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop),
742 attrs: ThinVec::new(),
745 // mut pinned => loop { ... }
746 let pinned_arm = self.arm(pinned_pat, loop_expr);
749 // mut pinned => loop { .. }
751 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
754 fn lower_expr_closure(
756 capture_clause: CaptureBy,
757 movability: Movability,
761 ) -> hir::ExprKind<'hir> {
762 // Lower outside new scope to preserve `is_in_loop_condition`.
763 let fn_decl = self.lower_fn_decl(decl, None, false, None);
765 self.with_new_scopes(move |this| {
766 let prev = this.current_item;
767 this.current_item = Some(fn_decl_span);
768 let mut generator_kind = None;
769 let body_id = this.lower_fn_body(decl, |this| {
770 let e = this.lower_expr_mut(body);
771 generator_kind = this.generator_kind;
774 let generator_option =
775 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
776 this.current_item = prev;
777 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
781 fn generator_movability_for_fn(
785 generator_kind: Option<hir::GeneratorKind>,
786 movability: Movability,
787 ) -> Option<hir::Movability> {
788 match generator_kind {
789 Some(hir::GeneratorKind::Gen) => {
790 if decl.inputs.len() > 1 {
795 "too many parameters for a generator (expected 0 or 1 parameters)"
801 Some(hir::GeneratorKind::Async(_)) => {
802 panic!("non-`async` closure body turned `async` during lowering");
805 if movability == Movability::Static {
806 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
814 fn lower_expr_async_closure(
816 capture_clause: CaptureBy,
821 ) -> hir::ExprKind<'hir> {
823 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
824 // We need to lower the declaration outside the new scope, because we
825 // have to conserve the state of being inside a loop condition for the
826 // closure argument types.
827 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
829 self.with_new_scopes(move |this| {
830 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
831 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
836 "`async` non-`move` closures with parameters are not currently supported",
839 "consider using `let` statements to manually capture \
840 variables by reference before entering an `async move` closure",
845 // Transform `async |x: u8| -> X { ... }` into
846 // `|x: u8| future_from_generator(|| -> X { ... })`.
847 let body_id = this.lower_fn_body(&outer_decl, |this| {
849 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
850 let async_body = this.make_async_expr(
855 hir::AsyncGeneratorKind::Closure,
856 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
858 this.expr(fn_decl_span, async_body, ThinVec::new())
860 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
864 /// Destructure the LHS of complex assignments.
865 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
866 fn lower_expr_assign(
872 ) -> hir::ExprKind<'hir> {
873 // Return early in case of an ordinary assignment.
874 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
877 | ExprKind::Struct(..)
879 | ExprKind::Underscore => false,
880 // Check for tuple struct constructor.
881 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
882 ExprKind::Paren(e) => {
884 // We special-case `(..)` for consistency with patterns.
885 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
886 _ => is_ordinary(lower_ctx, e),
892 if is_ordinary(self, lhs) {
893 return hir::ExprKind::Assign(self.lower_expr(lhs), self.lower_expr(rhs), eq_sign_span);
895 if !self.sess.features_untracked().destructuring_assignment {
897 &self.sess.parse_sess,
898 sym::destructuring_assignment,
900 "destructuring assignments are unstable",
902 .span_label(lhs.span, "cannot assign to this expression")
906 let mut assignments = vec![];
908 // The LHS becomes a pattern: `(lhs1, lhs2)`.
909 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
910 let rhs = self.lower_expr(rhs);
912 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
913 let destructure_let = self.stmt_let_pat(
918 hir::LocalSource::AssignDesugar(eq_sign_span),
921 // `a = lhs1; b = lhs2;`.
924 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
926 // Wrap everything in a block.
927 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
930 /// If the given expression is a path to a tuple struct, returns that path.
931 /// It is not a complete check, but just tries to reject most paths early
932 /// if they are not tuple structs.
933 /// Type checking will take care of the full validation later.
934 fn extract_tuple_struct_path<'a>(&mut self, expr: &'a Expr) -> Option<&'a Path> {
935 // For tuple struct destructuring, it must be a non-qualified path (like in patterns).
936 if let ExprKind::Path(None, path) = &expr.kind {
937 // Does the path resolves to something disallowed in a tuple struct/variant pattern?
938 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
939 if partial_res.unresolved_segments() == 0
940 && !partial_res.base_res().expected_in_tuple_struct_pat()
950 /// Convert the LHS of a destructuring assignment to a pattern.
951 /// Each sub-assignment is recorded in `assignments`.
952 fn destructure_assign(
956 assignments: &mut Vec<hir::Stmt<'hir>>,
957 ) -> &'hir hir::Pat<'hir> {
959 // Underscore pattern.
960 ExprKind::Underscore => {
961 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
964 ExprKind::Array(elements) => {
966 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
967 let slice_pat = if let Some((i, span)) = rest {
968 let (before, after) = pats.split_at(i);
971 Some(self.pat_without_dbm(span, hir::PatKind::Wild)),
975 hir::PatKind::Slice(pats, None, &[])
977 return self.pat_without_dbm(lhs.span, slice_pat);
980 ExprKind::Call(callee, args) => {
981 if let Some(path) = self.extract_tuple_struct_path(callee) {
982 let (pats, rest) = self.destructure_sequence(
984 "tuple struct or variant",
988 let qpath = self.lower_qpath(
993 ImplTraitContext::disallowed(),
995 // Destructure like a tuple struct.
996 let tuple_struct_pat =
997 hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
998 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1002 ExprKind::Struct(path, fields, rest) => {
1003 let field_pats = self.arena.alloc_from_iter(fields.iter().map(|f| {
1004 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1006 hir_id: self.next_id(),
1009 is_shorthand: f.is_shorthand,
1013 let qpath = self.lower_qpath(
1017 ParamMode::Optional,
1018 ImplTraitContext::disallowed(),
1020 let fields_omitted = match rest {
1021 StructRest::Base(e) => {
1025 "functional record updates are not allowed in destructuring \
1030 "consider removing the trailing pattern",
1032 rustc_errors::Applicability::MachineApplicable,
1037 StructRest::Rest(_) => true,
1038 StructRest::None => false,
1040 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1041 return self.pat_without_dbm(lhs.span, struct_pat);
1044 ExprKind::Tup(elements) => {
1046 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1047 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1048 return self.pat_without_dbm(lhs.span, tuple_pat);
1050 ExprKind::Paren(e) => {
1051 // We special-case `(..)` for consistency with patterns.
1052 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1053 let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1054 return self.pat_without_dbm(lhs.span, tuple_pat);
1056 return self.destructure_assign(e, eq_sign_span, assignments);
1061 // Treat all other cases as normal lvalue.
1062 let ident = Ident::new(sym::lhs, lhs.span);
1063 let (pat, binding) = self.pat_ident(lhs.span, ident);
1064 let ident = self.expr_ident(lhs.span, ident, binding);
1065 let assign = hir::ExprKind::Assign(self.lower_expr(lhs), ident, eq_sign_span);
1066 let expr = self.expr(lhs.span, assign, ThinVec::new());
1067 assignments.push(self.stmt_expr(lhs.span, expr));
1071 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1072 /// Such a sequence occurs in a tuple (struct)/slice.
1073 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1075 /// Each sub-assignment is recorded in `assignments`.
1076 fn destructure_sequence(
1078 elements: &[AstP<Expr>],
1081 assignments: &mut Vec<hir::Stmt<'hir>>,
1082 ) -> (&'hir [&'hir hir::Pat<'hir>], Option<(usize, Span)>) {
1083 let mut rest = None;
1085 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1086 // Check for `..` pattern.
1087 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1088 if let Some((_, prev_span)) = rest {
1089 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1091 rest = Some((i, e.span));
1095 Some(self.destructure_assign(e, eq_sign_span, assignments))
1101 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1102 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1103 let e1 = self.lower_expr_mut(e1);
1104 let e2 = self.lower_expr_mut(e2);
1105 let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, span);
1107 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1108 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1111 fn lower_expr_range(
1117 ) -> hir::ExprKind<'hir> {
1118 use rustc_ast::RangeLimits::*;
1120 let lang_item = match (e1, e2, lims) {
1121 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1122 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1123 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1124 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1125 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1126 (Some(..), Some(..), Closed) => unreachable!(),
1127 (_, None, Closed) => {
1128 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
1132 let fields = self.arena.alloc_from_iter(
1133 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
1134 let expr = self.lower_expr(&e);
1135 let ident = Ident::new(Symbol::intern(s), e.span);
1136 self.field(ident, expr, e.span)
1140 hir::ExprKind::Struct(self.arena.alloc(hir::QPath::LangItem(lang_item, span)), fields, None)
1143 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1144 let target_id = match destination {
1146 if let Some(loop_id) = self.resolver.get_label_res(id) {
1147 Ok(self.lower_node_id(loop_id))
1149 Err(hir::LoopIdError::UnresolvedLabel)
1156 .map(|id| Ok(self.lower_node_id(id)))
1157 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1159 hir::Destination { label: destination.map(|(_, label)| label), target_id }
1162 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1163 if self.is_in_loop_condition && opt_label.is_none() {
1166 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1169 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1173 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1174 let len = self.catch_scopes.len();
1175 self.catch_scopes.push(catch_id);
1177 let result = f(self);
1180 self.catch_scopes.len(),
1181 "catch scopes should be added and removed in stack order"
1184 self.catch_scopes.pop().unwrap();
1189 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1190 // We're no longer in the base loop's condition; we're in another loop.
1191 let was_in_loop_condition = self.is_in_loop_condition;
1192 self.is_in_loop_condition = false;
1194 let len = self.loop_scopes.len();
1195 self.loop_scopes.push(loop_id);
1197 let result = f(self);
1200 self.loop_scopes.len(),
1201 "loop scopes should be added and removed in stack order"
1204 self.loop_scopes.pop().unwrap();
1206 self.is_in_loop_condition = was_in_loop_condition;
1211 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1212 let was_in_loop_condition = self.is_in_loop_condition;
1213 self.is_in_loop_condition = true;
1215 let result = f(self);
1217 self.is_in_loop_condition = was_in_loop_condition;
1222 fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
1223 if self.sess.asm_arch.is_none() {
1224 struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
1226 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
1229 Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)
1233 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
1237 // Lower operands to HIR, filter_map skips any operands with invalid
1238 // register classes.
1239 let sess = self.sess;
1240 let operands: Vec<_> = asm
1243 .filter_map(|(op, op_sp)| {
1244 let lower_reg = |reg| {
1246 InlineAsmRegOrRegClass::Reg(s) => asm::InlineAsmRegOrRegClass::Reg(
1247 asm::InlineAsmReg::parse(
1249 |feature| sess.target_features.contains(&Symbol::intern(feature)),
1254 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
1255 sess.struct_span_err(*op_sp, &msg).emit();
1259 InlineAsmRegOrRegClass::RegClass(s) => {
1260 asm::InlineAsmRegOrRegClass::RegClass(
1261 asm::InlineAsmRegClass::parse(sess.asm_arch?, s)
1264 "invalid register class `{}`: {}",
1268 sess.struct_span_err(*op_sp, &msg).emit();
1276 // lower_reg is executed last because we need to lower all
1277 // sub-expressions even if we throw them away later.
1278 let op = match *op {
1279 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1280 expr: self.lower_expr_mut(expr),
1281 reg: lower_reg(reg)?,
1283 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1285 expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1286 reg: lower_reg(reg)?,
1288 InlineAsmOperand::InOut { reg, late, ref expr } => {
1289 hir::InlineAsmOperand::InOut {
1291 expr: self.lower_expr_mut(expr),
1292 reg: lower_reg(reg)?,
1295 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1296 hir::InlineAsmOperand::SplitInOut {
1298 in_expr: self.lower_expr_mut(in_expr),
1299 out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1300 reg: lower_reg(reg)?,
1303 InlineAsmOperand::Const { ref expr } => {
1304 hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1306 InlineAsmOperand::Sym { ref expr } => {
1307 hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1314 // Stop if there were any errors when lowering the register classes
1315 if operands.len() != asm.operands.len() || sess.asm_arch.is_none() {
1316 return hir::ExprKind::Err;
1319 // Validate template modifiers against the register classes for the operands
1320 let asm_arch = sess.asm_arch.unwrap();
1321 for p in &asm.template {
1322 if let InlineAsmTemplatePiece::Placeholder {
1324 modifier: Some(modifier),
1325 span: placeholder_span,
1328 let op_sp = asm.operands[operand_idx].1;
1329 match &operands[operand_idx] {
1330 hir::InlineAsmOperand::In { reg, .. }
1331 | hir::InlineAsmOperand::Out { reg, .. }
1332 | hir::InlineAsmOperand::InOut { reg, .. }
1333 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1334 let class = reg.reg_class();
1335 let valid_modifiers = class.valid_modifiers(asm_arch);
1336 if !valid_modifiers.contains(&modifier) {
1337 let mut err = sess.struct_span_err(
1339 "invalid asm template modifier for this register class",
1341 err.span_label(placeholder_span, "template modifier");
1342 err.span_label(op_sp, "argument");
1343 if !valid_modifiers.is_empty() {
1344 let mut mods = format!("`{}`", valid_modifiers[0]);
1345 for m in &valid_modifiers[1..] {
1346 let _ = write!(mods, ", `{}`", m);
1349 "the `{}` register class supports \
1350 the following template modifiers: {}",
1356 "the `{}` register class does not support template modifiers",
1363 hir::InlineAsmOperand::Const { .. } => {
1364 let mut err = sess.struct_span_err(
1366 "asm template modifiers are not allowed for `const` arguments",
1368 err.span_label(placeholder_span, "template modifier");
1369 err.span_label(op_sp, "argument");
1372 hir::InlineAsmOperand::Sym { .. } => {
1373 let mut err = sess.struct_span_err(
1375 "asm template modifiers are not allowed for `sym` arguments",
1377 err.span_label(placeholder_span, "template modifier");
1378 err.span_label(op_sp, "argument");
1385 let mut used_input_regs = FxHashMap::default();
1386 let mut used_output_regs = FxHashMap::default();
1387 let mut required_features: Vec<&str> = vec![];
1388 for (idx, op) in operands.iter().enumerate() {
1389 let op_sp = asm.operands[idx].1;
1390 if let Some(reg) = op.reg() {
1391 // Make sure we don't accidentally carry features from the
1392 // previous iteration.
1393 required_features.clear();
1395 // Validate register classes against currently enabled target
1396 // features. We check that at least one type is available for
1397 // the current target.
1398 let reg_class = reg.reg_class();
1399 for &(_, feature) in reg_class.supported_types(asm_arch) {
1400 if let Some(feature) = feature {
1401 if self.sess.target_features.contains(&Symbol::intern(feature)) {
1402 required_features.clear();
1405 required_features.push(feature);
1408 required_features.clear();
1412 // We are sorting primitive strs here and can use unstable sort here
1413 required_features.sort_unstable();
1414 required_features.dedup();
1415 match &required_features[..] {
1419 "register class `{}` requires the `{}` target feature",
1423 sess.struct_span_err(op_sp, &msg).emit();
1427 "register class `{}` requires at least one target feature: {}",
1431 sess.struct_span_err(op_sp, &msg).emit();
1435 // Check for conflicts between explicit register operands.
1436 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1437 let (input, output) = match op {
1438 hir::InlineAsmOperand::In { .. } => (true, false),
1439 // Late output do not conflict with inputs, but normal outputs do
1440 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1441 hir::InlineAsmOperand::InOut { .. }
1442 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1443 hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1448 // Flag to output the error only once per operand
1449 let mut skip = false;
1450 reg.overlapping_regs(|r| {
1451 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1453 match used_regs.entry(r) {
1454 Entry::Occupied(o) => {
1460 let idx2 = *o.get();
1461 let op2 = &operands[idx2];
1462 let op_sp2 = asm.operands[idx2].1;
1463 let reg2 = match op2.reg() {
1464 Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1465 _ => unreachable!(),
1469 "register `{}` conflicts with register `{}`",
1473 let mut err = sess.struct_span_err(op_sp, &msg);
1474 err.span_label(op_sp, &format!("register `{}`", reg.name()));
1475 err.span_label(op_sp2, &format!("register `{}`", reg2.name()));
1479 hir::InlineAsmOperand::In { .. },
1480 hir::InlineAsmOperand::Out { late, .. },
1483 hir::InlineAsmOperand::Out { late, .. },
1484 hir::InlineAsmOperand::In { .. },
1487 let out_op_sp = if input { op_sp2 } else { op_sp };
1488 let msg = "use `lateout` instead of \
1489 `out` to avoid conflict";
1490 err.span_help(out_op_sp, msg);
1497 Entry::Vacant(v) => {
1503 check(&mut used_input_regs, true);
1506 check(&mut used_output_regs, false);
1513 let operands = self.arena.alloc_from_iter(operands);
1514 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1515 let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1516 let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1517 hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1520 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1521 let inner = hir::LlvmInlineAsmInner {
1522 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1526 .map(|out| hir::LlvmInlineAsmOutput {
1527 constraint: out.constraint,
1529 is_indirect: out.is_indirect,
1530 span: out.expr.span,
1534 asm_str_style: asm.asm_str_style,
1535 clobbers: asm.clobbers.clone(),
1536 volatile: asm.volatile,
1537 alignstack: asm.alignstack,
1538 dialect: asm.dialect,
1540 let hir_asm = hir::LlvmInlineAsm {
1542 inputs_exprs: self.arena.alloc_from_iter(
1543 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1547 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1549 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1552 fn lower_field(&mut self, f: &Field) -> hir::Field<'hir> {
1554 hir_id: self.next_id(),
1556 expr: self.lower_expr(&f.expr),
1558 is_shorthand: f.is_shorthand,
1562 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1563 match self.generator_kind {
1564 Some(hir::GeneratorKind::Gen) => {}
1565 Some(hir::GeneratorKind::Async(_)) => {
1570 "`async` generators are not yet supported"
1574 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1578 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1580 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1583 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1586 /// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1588 /// [opt_ident]: loop {
1590 /// match ::std::iter::Iterator::next(&mut iter) {
1591 /// ::std::option::Option::Some(val) => __next = val,
1592 /// ::std::option::Option::None => break
1594 /// let <pat> = __next;
1595 /// StmtKind::Expr(<body>);
1608 opt_label: Option<Label>,
1609 ) -> hir::Expr<'hir> {
1610 let orig_head_span = head.span;
1612 let mut head = self.lower_expr_mut(head);
1613 let desugared_span = self.mark_span_with_reason(
1614 DesugaringKind::ForLoop(ForLoopLoc::Head),
1618 head.span = desugared_span;
1620 let iter = Ident::with_dummy_span(sym::iter);
1622 let next_ident = Ident::with_dummy_span(sym::__next);
1623 let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1626 hir::BindingAnnotation::Mutable,
1629 // `::std::option::Option::Some(val) => __next = val`
1631 let val_ident = Ident::with_dummy_span(sym::val);
1632 let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1633 let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1634 let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1635 let assign = self.arena.alloc(self.expr(
1637 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1640 let some_pat = self.pat_some(pat.span, val_pat);
1641 self.arm(some_pat, assign)
1644 // `::std::option::Option::None => break`
1647 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1648 let pat = self.pat_none(e.span);
1649 self.arm(pat, break_expr)
1653 let (iter_pat, iter_pat_nid) =
1654 self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1656 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1658 let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1659 let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1660 let next_expr = self.expr_call_lang_item_fn(
1662 hir::LangItem::IteratorNext,
1663 arena_vec![self; ref_mut_iter],
1665 let arms = arena_vec![self; pat_arm, break_arm];
1667 self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1669 let match_stmt = self.stmt_expr(desugared_span, match_expr);
1671 let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1674 let next_let = self.stmt_let_pat(
1679 hir::LocalSource::ForLoopDesugar,
1682 // `let <pat> = __next`
1683 let pat = self.lower_pat(pat);
1684 let pat_let = self.stmt_let_pat(
1689 hir::LocalSource::ForLoopDesugar,
1692 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1693 let body_expr = self.expr_block(body_block, ThinVec::new());
1694 let body_stmt = self.stmt_expr(body.span, body_expr);
1696 let loop_block = self.block_all(
1698 arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1702 // `[opt_ident]: loop { ... }`
1703 let kind = hir::ExprKind::Loop(loop_block, opt_label, hir::LoopSource::ForLoop);
1704 let loop_expr = self.arena.alloc(hir::Expr {
1705 hir_id: self.lower_node_id(e.id),
1708 attrs: ThinVec::new(),
1711 // `mut iter => { ... }`
1712 let iter_arm = self.arm(iter_pat, loop_expr);
1714 let into_iter_span = self.mark_span_with_reason(
1715 DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1720 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1721 let into_iter_expr = {
1722 self.expr_call_lang_item_fn(
1724 hir::LangItem::IntoIterIntoIter,
1725 arena_vec![self; head],
1729 let match_expr = self.arena.alloc(self.expr_match(
1732 arena_vec![self; iter_arm],
1733 hir::MatchSource::ForLoopDesugar,
1736 let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1738 // This is effectively `{ let _result = ...; _result }`.
1739 // The construct was introduced in #21984 and is necessary to make sure that
1740 // temporaries in the `head` expression are dropped and do not leak to the
1741 // surrounding scope of the `match` since the `match` is not a terminating scope.
1743 // Also, add the attributes to the outer returned expr node.
1744 self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
1747 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1749 /// match Try::into_result(<expr>) {
1750 /// Ok(val) => #[allow(unreachable_code)] val,
1751 /// Err(err) => #[allow(unreachable_code)]
1752 /// // If there is an enclosing `try {...}`:
1753 /// break 'catch_target Try::from_error(From::from(err)),
1755 /// return Try::from_error(From::from(err)),
1758 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1759 let unstable_span = self.mark_span_with_reason(
1760 DesugaringKind::QuestionMark,
1762 self.allow_try_trait.clone(),
1764 let try_span = self.sess.source_map().end_point(span);
1765 let try_span = self.mark_span_with_reason(
1766 DesugaringKind::QuestionMark,
1768 self.allow_try_trait.clone(),
1771 // `Try::into_result(<expr>)`
1774 let sub_expr = self.lower_expr_mut(sub_expr);
1776 self.expr_call_lang_item_fn(
1778 hir::LangItem::TryIntoResult,
1779 arena_vec![self; sub_expr],
1783 // `#[allow(unreachable_code)]`
1785 // `allow(unreachable_code)`
1787 let allow_ident = Ident::new(sym::allow, span);
1788 let uc_ident = Ident::new(sym::unreachable_code, span);
1789 let uc_nested = attr::mk_nested_word_item(uc_ident);
1790 attr::mk_list_item(allow_ident, vec![uc_nested])
1792 attr::mk_attr_outer(allow)
1794 let attrs = vec![attr];
1796 // `Ok(val) => #[allow(unreachable_code)] val,`
1798 let val_ident = Ident::with_dummy_span(sym::val);
1799 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1800 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1804 ThinVec::from(attrs.clone()),
1806 let ok_pat = self.pat_ok(span, val_pat);
1807 self.arm(ok_pat, val_expr)
1810 // `Err(err) => #[allow(unreachable_code)]
1811 // return Try::from_error(From::from(err)),`
1813 let err_ident = Ident::with_dummy_span(sym::err);
1814 let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1816 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1817 self.expr_call_lang_item_fn(
1819 hir::LangItem::FromFrom,
1820 arena_vec![self; err_expr],
1823 let from_err_expr = self.wrap_in_try_constructor(
1824 hir::LangItem::TryFromError,
1829 let thin_attrs = ThinVec::from(attrs);
1830 let catch_scope = self.catch_scopes.last().copied();
1831 let ret_expr = if let Some(catch_node) = catch_scope {
1832 let target_id = Ok(self.lower_node_id(catch_node));
1833 self.arena.alloc(self.expr(
1835 hir::ExprKind::Break(
1836 hir::Destination { label: None, target_id },
1837 Some(from_err_expr),
1842 self.arena.alloc(self.expr(
1844 hir::ExprKind::Ret(Some(from_err_expr)),
1849 let err_pat = self.pat_err(try_span, err_local);
1850 self.arm(err_pat, ret_expr)
1853 hir::ExprKind::Match(
1855 arena_vec![self; err_arm, ok_arm],
1856 hir::MatchSource::TryDesugar,
1860 // =========================================================================
1861 // Helper methods for building HIR.
1862 // =========================================================================
1864 /// Constructs a `true` or `false` literal expression.
1865 pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1866 let lit = Spanned { span, node: LitKind::Bool(val) };
1867 self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1870 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1872 /// In terms of drop order, it has the same effect as wrapping `expr` in
1873 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1875 /// The drop order can be important in e.g. `if expr { .. }`.
1876 pub(super) fn expr_drop_temps(
1879 expr: &'hir hir::Expr<'hir>,
1881 ) -> &'hir hir::Expr<'hir> {
1882 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1885 pub(super) fn expr_drop_temps_mut(
1888 expr: &'hir hir::Expr<'hir>,
1890 ) -> hir::Expr<'hir> {
1891 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1897 arg: &'hir hir::Expr<'hir>,
1898 arms: &'hir [hir::Arm<'hir>],
1899 source: hir::MatchSource,
1900 ) -> hir::Expr<'hir> {
1901 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1904 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1905 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1906 self.arena.alloc(self.expr(span, expr_break, attrs))
1909 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1912 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1917 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1918 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1924 e: &'hir hir::Expr<'hir>,
1925 args: &'hir [hir::Expr<'hir>],
1926 ) -> hir::Expr<'hir> {
1927 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1933 e: &'hir hir::Expr<'hir>,
1934 args: &'hir [hir::Expr<'hir>],
1935 ) -> &'hir hir::Expr<'hir> {
1936 self.arena.alloc(self.expr_call_mut(span, e, args))
1939 fn expr_call_lang_item_fn_mut(
1942 lang_item: hir::LangItem,
1943 args: &'hir [hir::Expr<'hir>],
1944 ) -> hir::Expr<'hir> {
1945 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
1946 self.expr_call_mut(span, path, args)
1949 fn expr_call_lang_item_fn(
1952 lang_item: hir::LangItem,
1953 args: &'hir [hir::Expr<'hir>],
1954 ) -> &'hir hir::Expr<'hir> {
1955 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
1958 fn expr_lang_item_path(
1961 lang_item: hir::LangItem,
1963 ) -> hir::Expr<'hir> {
1964 self.expr(span, hir::ExprKind::Path(hir::QPath::LangItem(lang_item, span)), attrs)
1967 pub(super) fn expr_ident(
1971 binding: hir::HirId,
1972 ) -> &'hir hir::Expr<'hir> {
1973 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1976 pub(super) fn expr_ident_mut(
1980 binding: hir::HirId,
1981 ) -> hir::Expr<'hir> {
1982 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1985 fn expr_ident_with_attrs(
1989 binding: hir::HirId,
1991 ) -> hir::Expr<'hir> {
1992 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1994 self.arena.alloc(hir::Path {
1996 res: Res::Local(binding),
1997 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
2001 self.expr(span, expr_path, attrs)
2004 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2005 let hir_id = self.next_id();
2006 let span = expr.span;
2009 hir::ExprKind::Block(
2010 self.arena.alloc(hir::Block {
2014 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
2016 targeted_by_break: false,
2024 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2025 let blk = self.block_all(span, &[], None);
2026 let expr = self.expr_block(blk, ThinVec::new());
2027 self.arena.alloc(expr)
2030 pub(super) fn expr_block(
2032 b: &'hir hir::Block<'hir>,
2034 ) -> hir::Expr<'hir> {
2035 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
2041 kind: hir::ExprKind<'hir>,
2043 ) -> hir::Expr<'hir> {
2044 hir::Expr { hir_id: self.next_id(), kind, span, attrs }
2047 fn field(&mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span) -> hir::Field<'hir> {
2048 hir::Field { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
2051 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
2053 hir_id: self.next_id(),