1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
5 use rustc_ast::ptr::P as AstP;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
11 use rustc_hir::def::Res;
12 use rustc_span::source_map::{respan, DesugaringKind, ForLoopLoc, Span, Spanned};
13 use rustc_span::symbol::{sym, Ident, Symbol};
14 use rustc_target::asm;
15 use std::collections::hash_map::Entry;
18 impl<'hir> LoweringContext<'_, 'hir> {
19 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
20 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
23 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
24 self.arena.alloc(self.lower_expr_mut(e))
27 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
28 ensure_sufficient_stack(|| {
29 let kind = match e.kind {
30 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
31 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
32 ExprKind::Repeat(ref expr, ref count) => {
33 let expr = self.lower_expr(expr);
34 let count = self.lower_anon_const(count);
35 hir::ExprKind::Repeat(expr, count)
37 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
38 ExprKind::Call(ref f, ref args) => {
39 let f = self.lower_expr(f);
40 hir::ExprKind::Call(f, self.lower_exprs(args))
42 ExprKind::MethodCall(ref seg, ref args, span) => {
43 let hir_seg = self.arena.alloc(self.lower_path_segment(
48 ParenthesizedGenericArgs::Err,
49 ImplTraitContext::disallowed(),
52 let args = self.lower_exprs(args);
53 hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
55 ExprKind::Binary(binop, ref lhs, ref rhs) => {
56 let binop = self.lower_binop(binop);
57 let lhs = self.lower_expr(lhs);
58 let rhs = self.lower_expr(rhs);
59 hir::ExprKind::Binary(binop, lhs, rhs)
61 ExprKind::Unary(op, ref ohs) => {
62 let op = self.lower_unop(op);
63 let ohs = self.lower_expr(ohs);
64 hir::ExprKind::Unary(op, ohs)
66 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
67 ExprKind::Cast(ref expr, ref ty) => {
68 let expr = self.lower_expr(expr);
69 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
70 hir::ExprKind::Cast(expr, ty)
72 ExprKind::Type(ref expr, ref ty) => {
73 let expr = self.lower_expr(expr);
74 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
75 hir::ExprKind::Type(expr, ty)
77 ExprKind::AddrOf(k, m, ref ohs) => {
78 let ohs = self.lower_expr(ohs);
79 hir::ExprKind::AddrOf(k, m, ohs)
81 ExprKind::Let(ref pat, ref scrutinee) => {
82 self.lower_expr_let(e.span, pat, scrutinee)
84 ExprKind::If(ref cond, ref then, ref else_opt) => {
85 self.lower_expr_if(e.span, cond, then, else_opt.as_deref())
87 ExprKind::While(ref cond, ref body, opt_label) => self
88 .with_loop_scope(e.id, |this| {
89 this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
91 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
93 this.lower_block(body, false),
95 hir::LoopSource::Loop,
98 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
99 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
100 self.lower_expr(expr),
101 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
102 hir::MatchSource::Normal,
104 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
110 hir::AsyncGeneratorKind::Block,
111 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
113 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
122 if let Async::Yes { closure_id, .. } = asyncness {
123 self.lower_expr_async_closure(
131 self.lower_expr_closure(
140 ExprKind::Block(ref blk, opt_label) => {
141 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
143 ExprKind::Assign(ref el, ref er, span) => {
144 hir::ExprKind::Assign(self.lower_expr(el), self.lower_expr(er), span)
146 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
147 self.lower_binop(op),
151 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
152 ExprKind::Index(ref el, ref er) => {
153 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
155 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
156 self.lower_expr_range_closed(e.span, e1, e2)
158 ExprKind::Range(ref e1, ref e2, lims) => {
159 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
161 ExprKind::Path(ref qself, ref path) => {
162 let qpath = self.lower_qpath(
167 ImplTraitContext::disallowed(),
169 hir::ExprKind::Path(qpath)
171 ExprKind::Break(opt_label, ref opt_expr) => {
172 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
173 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
175 ExprKind::Continue(opt_label) => {
176 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
178 ExprKind::Ret(ref e) => {
179 let e = e.as_ref().map(|x| self.lower_expr(x));
180 hir::ExprKind::Ret(e)
182 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
183 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
184 ExprKind::Struct(ref path, ref fields, ref maybe_expr) => {
185 let maybe_expr = maybe_expr.as_ref().map(|x| self.lower_expr(x));
186 hir::ExprKind::Struct(
187 self.arena.alloc(self.lower_qpath(
192 ImplTraitContext::disallowed(),
194 self.arena.alloc_from_iter(fields.iter().map(|x| self.lower_field(x))),
198 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
199 ExprKind::Err => hir::ExprKind::Err,
200 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
201 ExprKind::Paren(ref ex) => {
202 let mut ex = self.lower_expr_mut(ex);
203 // Include parens in span, but only if it is a super-span.
204 if e.span.contains(ex.span) {
207 // Merge attributes into the inner expression.
208 let mut attrs = e.attrs.clone();
209 attrs.extend::<Vec<_>>(ex.attrs.into());
214 // Desugar `ExprForLoop`
215 // from: `[opt_ident]: for <pat> in <head> <body>`
216 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
217 return self.lower_expr_for(e, pat, head, body, opt_label);
219 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
223 hir_id: self.lower_node_id(e.id),
226 attrs: e.attrs.iter().map(|a| self.lower_attr(a)).collect::<Vec<_>>().into(),
231 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
233 UnOp::Deref => hir::UnOp::UnDeref,
234 UnOp::Not => hir::UnOp::UnNot,
235 UnOp::Neg => hir::UnOp::UnNeg,
239 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
242 BinOpKind::Add => hir::BinOpKind::Add,
243 BinOpKind::Sub => hir::BinOpKind::Sub,
244 BinOpKind::Mul => hir::BinOpKind::Mul,
245 BinOpKind::Div => hir::BinOpKind::Div,
246 BinOpKind::Rem => hir::BinOpKind::Rem,
247 BinOpKind::And => hir::BinOpKind::And,
248 BinOpKind::Or => hir::BinOpKind::Or,
249 BinOpKind::BitXor => hir::BinOpKind::BitXor,
250 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
251 BinOpKind::BitOr => hir::BinOpKind::BitOr,
252 BinOpKind::Shl => hir::BinOpKind::Shl,
253 BinOpKind::Shr => hir::BinOpKind::Shr,
254 BinOpKind::Eq => hir::BinOpKind::Eq,
255 BinOpKind::Lt => hir::BinOpKind::Lt,
256 BinOpKind::Le => hir::BinOpKind::Le,
257 BinOpKind::Ne => hir::BinOpKind::Ne,
258 BinOpKind::Ge => hir::BinOpKind::Ge,
259 BinOpKind::Gt => hir::BinOpKind::Gt,
265 /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
267 /// match scrutinee { pats => true, _ => false }
269 fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
270 // If we got here, the `let` expression is not allowed.
272 if self.sess.opts.unstable_features.is_nightly_build() {
274 .struct_span_err(span, "`let` expressions are not supported here")
275 .note("only supported directly in conditions of `if`- and `while`-expressions")
276 .note("as well as when nested within `&&` and parenthesis in those conditions")
280 .struct_span_err(span, "expected expression, found statement (`let`)")
281 .note("variable declaration using `let` is a statement")
285 // For better recovery, we emit:
287 // match scrutinee { pat => true, _ => false }
289 // While this doesn't fully match the user's intent, it has key advantages:
290 // 1. We can avoid using `abort_if_errors`.
291 // 2. We can typeck both `pat` and `scrutinee`.
292 // 3. `pat` is allowed to be refutable.
293 // 4. The return type of the block is `bool` which seems like what the user wanted.
294 let scrutinee = self.lower_expr(scrutinee);
296 let pat = self.lower_pat(pat);
297 let expr = self.expr_bool(span, true);
301 let pat = self.pat_wild(span);
302 let expr = self.expr_bool(span, false);
305 hir::ExprKind::Match(
307 arena_vec![self; then_arm, else_arm],
308 hir::MatchSource::Normal,
317 else_opt: Option<&Expr>,
318 ) -> hir::ExprKind<'hir> {
319 // FIXME(#53667): handle lowering of && and parens.
321 // `_ => else_block` where `else_block` is `{}` if there's `None`:
322 let else_pat = self.pat_wild(span);
323 let (else_expr, contains_else_clause) = match else_opt {
324 None => (self.expr_block_empty(span), false),
325 Some(els) => (self.lower_expr(els), true),
327 let else_arm = self.arm(else_pat, else_expr);
329 // Handle then + scrutinee:
330 let then_expr = self.lower_block_expr(then);
331 let (then_pat, scrutinee, desugar) = match cond.kind {
332 // `<pat> => <then>`:
333 ExprKind::Let(ref pat, ref scrutinee) => {
334 let scrutinee = self.lower_expr(scrutinee);
335 let pat = self.lower_pat(pat);
336 (pat, scrutinee, hir::MatchSource::IfLetDesugar { contains_else_clause })
341 let cond = self.lower_expr(cond);
343 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
344 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
345 // to preserve drop semantics since `if cond { ... }` does not
346 // let temporaries live outside of `cond`.
347 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
348 let pat = self.pat_bool(span, true);
349 (pat, cond, hir::MatchSource::IfDesugar { contains_else_clause })
352 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
354 hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
357 fn lower_expr_while_in_loop_scope(
362 opt_label: Option<Label>,
363 ) -> hir::ExprKind<'hir> {
364 // FIXME(#53667): handle lowering of && and parens.
366 // Note that the block AND the condition are evaluated in the loop scope.
367 // This is done to allow `break` from inside the condition of the loop.
371 let else_pat = self.pat_wild(span);
372 let else_expr = self.expr_break(span, ThinVec::new());
373 self.arm(else_pat, else_expr)
376 // Handle then + scrutinee:
377 let then_expr = self.lower_block_expr(body);
378 let (then_pat, scrutinee, desugar, source) = match cond.kind {
379 ExprKind::Let(ref pat, ref scrutinee) => {
382 // [opt_ident]: loop {
383 // match <sub_expr> {
388 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
389 let pat = self.lower_pat(pat);
390 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
393 // We desugar: `'label: while $cond $body` into:
397 // match drop-temps { $cond } {
405 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
407 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
408 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
409 // to preserve drop semantics since `while cond { ... }` does not
410 // let temporaries live outside of `cond`.
411 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
413 let pat = self.pat_bool(span, true);
414 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
417 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
419 // `match <scrutinee> { ... }`
421 self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
423 // `[opt_ident]: loop { ... }`
424 hir::ExprKind::Loop(self.block_expr(self.arena.alloc(match_expr)), opt_label, source)
427 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
428 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
429 /// and save the block id to use it as a break target for desugaring of the `?` operator.
430 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
431 self.with_catch_scope(body.id, |this| {
432 let mut block = this.lower_block_noalloc(body, true);
434 let try_span = this.mark_span_with_reason(
435 DesugaringKind::TryBlock,
437 this.allow_try_trait.clone(),
440 // Final expression of the block (if present) or `()` with span at the end of block
441 let tail_expr = block
444 .unwrap_or_else(|| this.expr_unit(this.sess.source_map().end_point(try_span)));
446 let ok_wrapped_span =
447 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
449 // `::std::ops::Try::from_ok($tail_expr)`
450 block.expr = Some(this.wrap_in_try_constructor(
457 hir::ExprKind::Block(this.arena.alloc(block), None)
461 fn wrap_in_try_constructor(
465 expr: &'hir hir::Expr<'hir>,
467 ) -> &'hir hir::Expr<'hir> {
468 let path = &[sym::ops, sym::Try, method];
470 self.arena.alloc(self.expr_std_path(method_span, path, None, ThinVec::new()));
471 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
474 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
476 hir_id: self.next_id(),
477 attrs: self.lower_attrs(&arm.attrs),
478 pat: self.lower_pat(&arm.pat),
479 guard: match arm.guard {
480 Some(ref x) => Some(hir::Guard::If(self.lower_expr(x))),
483 body: self.lower_expr(&arm.body),
488 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
493 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
497 pub(super) fn make_async_expr(
499 capture_clause: CaptureBy,
500 closure_node_id: NodeId,
501 ret_ty: Option<AstP<Ty>>,
503 async_gen_kind: hir::AsyncGeneratorKind,
504 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
505 ) -> hir::ExprKind<'hir> {
506 let output = match ret_ty {
507 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
508 None => hir::FnRetTy::DefaultReturn(span),
511 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
512 // fully constrained by `future::from_generator`.
513 let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
515 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
516 let decl = self.arena.alloc(hir::FnDecl {
517 inputs: arena_vec![self; input_ty],
520 implicit_self: hir::ImplicitSelfKind::None,
523 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
524 let (pat, task_context_hid) = self.pat_ident_binding_mode(
526 Ident::with_dummy_span(sym::_task_context),
527 hir::BindingAnnotation::Mutable,
529 let param = hir::Param { attrs: &[], hir_id: self.next_id(), pat, span };
530 let params = arena_vec![self; param];
532 let body_id = self.lower_body(move |this| {
533 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
535 let old_ctx = this.task_context;
536 this.task_context = Some(task_context_hid);
537 let res = body(this);
538 this.task_context = old_ctx;
542 // `static |_task_context| -> <ret_ty> { body }`:
543 let generator_kind = hir::ExprKind::Closure(
548 Some(hir::Movability::Static),
550 let generator = hir::Expr {
551 hir_id: self.lower_node_id(closure_node_id),
552 kind: generator_kind,
554 attrs: ThinVec::new(),
557 // `future::from_generator`:
559 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
560 let gen_future = self.expr_std_path(
562 &[sym::future, sym::from_generator],
567 // `future::from_generator(generator)`:
568 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
571 /// Desugar `<expr>.await` into:
574 /// mut pinned => loop {
575 /// match unsafe { ::std::future::Future::poll(
576 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
577 /// ::std::future::get_context(task_context),
579 /// ::std::task::Poll::Ready(result) => break result,
580 /// ::std::task::Poll::Pending => {}
582 /// task_context = yield ();
586 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
587 match self.generator_kind {
588 Some(hir::GeneratorKind::Async(_)) => {}
589 Some(hir::GeneratorKind::Gen) | None => {
590 let mut err = struct_span_err!(
594 "`await` is only allowed inside `async` functions and blocks"
596 err.span_label(await_span, "only allowed inside `async` functions and blocks");
597 if let Some(item_sp) = self.current_item {
598 err.span_label(item_sp, "this is not `async`");
603 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
604 let gen_future_span = self.mark_span_with_reason(
605 DesugaringKind::Await,
607 self.allow_gen_future.clone(),
609 let expr = self.lower_expr(expr);
611 let pinned_ident = Ident::with_dummy_span(sym::pinned);
612 let (pinned_pat, pinned_pat_hid) =
613 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
615 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
618 // ::std::future::Future::poll(
619 // ::std::pin::Pin::new_unchecked(&mut pinned),
620 // ::std::future::get_context(task_context),
624 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
625 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
626 let task_context = if let Some(task_context_hid) = self.task_context {
627 self.expr_ident_mut(span, task_context_ident, task_context_hid)
629 // Use of `await` outside of an async context, we cannot use `task_context` here.
632 let pin_ty_id = self.next_id();
633 let new_unchecked_expr_kind = self.expr_call_std_assoc_fn(
636 &[sym::pin, sym::Pin],
638 arena_vec![self; ref_mut_pinned],
640 let new_unchecked = self.expr(span, new_unchecked_expr_kind, ThinVec::new());
641 let get_context = self.expr_call_std_path_mut(
643 &[sym::future, sym::get_context],
644 arena_vec![self; task_context],
646 let call = self.expr_call_std_path(
648 &[sym::future, sym::Future, sym::poll],
649 arena_vec![self; new_unchecked, get_context],
651 self.arena.alloc(self.expr_unsafe(call))
654 // `::std::task::Poll::Ready(result) => break result`
655 let loop_node_id = self.resolver.next_node_id();
656 let loop_hir_id = self.lower_node_id(loop_node_id);
658 let x_ident = Ident::with_dummy_span(sym::result);
659 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
660 let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
661 let ready_pat = self.pat_std_enum(
663 &[sym::task, sym::Poll, sym::Ready],
664 arena_vec![self; x_pat],
666 let break_x = self.with_loop_scope(loop_node_id, move |this| {
668 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
669 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
671 self.arm(ready_pat, break_x)
674 // `::std::task::Poll::Pending => {}`
676 let pending_pat = self.pat_std_enum(span, &[sym::task, sym::Poll, sym::Pending], &[]);
677 let empty_block = self.expr_block_empty(span);
678 self.arm(pending_pat, empty_block)
681 let inner_match_stmt = {
682 let match_expr = self.expr_match(
685 arena_vec![self; ready_arm, pending_arm],
686 hir::MatchSource::AwaitDesugar,
688 self.stmt_expr(span, match_expr)
691 // task_context = yield ();
693 let unit = self.expr_unit(span);
694 let yield_expr = self.expr(
696 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
699 let yield_expr = self.arena.alloc(yield_expr);
701 if let Some(task_context_hid) = self.task_context {
702 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
704 self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
705 self.stmt_expr(span, assign)
707 // Use of `await` outside of an async context. Return `yield_expr` so that we can
708 // proceed with type checking.
709 self.stmt(span, hir::StmtKind::Semi(yield_expr))
713 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
716 let loop_expr = self.arena.alloc(hir::Expr {
718 kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop),
720 attrs: ThinVec::new(),
723 // mut pinned => loop { ... }
724 let pinned_arm = self.arm(pinned_pat, loop_expr);
727 // mut pinned => loop { .. }
729 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
732 fn lower_expr_closure(
734 capture_clause: CaptureBy,
735 movability: Movability,
739 ) -> hir::ExprKind<'hir> {
740 // Lower outside new scope to preserve `is_in_loop_condition`.
741 let fn_decl = self.lower_fn_decl(decl, None, false, None);
743 self.with_new_scopes(move |this| {
744 let prev = this.current_item;
745 this.current_item = Some(fn_decl_span);
746 let mut generator_kind = None;
747 let body_id = this.lower_fn_body(decl, |this| {
748 let e = this.lower_expr_mut(body);
749 generator_kind = this.generator_kind;
752 let generator_option =
753 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
754 this.current_item = prev;
755 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
759 fn generator_movability_for_fn(
763 generator_kind: Option<hir::GeneratorKind>,
764 movability: Movability,
765 ) -> Option<hir::Movability> {
766 match generator_kind {
767 Some(hir::GeneratorKind::Gen) => {
768 if decl.inputs.len() > 1 {
773 "too many parameters for a generator (expected 0 or 1 parameters)"
779 Some(hir::GeneratorKind::Async(_)) => {
780 panic!("non-`async` closure body turned `async` during lowering");
783 if movability == Movability::Static {
784 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
792 fn lower_expr_async_closure(
794 capture_clause: CaptureBy,
799 ) -> hir::ExprKind<'hir> {
801 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
802 // We need to lower the declaration outside the new scope, because we
803 // have to conserve the state of being inside a loop condition for the
804 // closure argument types.
805 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
807 self.with_new_scopes(move |this| {
808 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
809 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
814 "`async` non-`move` closures with parameters are not currently supported",
817 "consider using `let` statements to manually capture \
818 variables by reference before entering an `async move` closure",
823 // Transform `async |x: u8| -> X { ... }` into
824 // `|x: u8| future_from_generator(|| -> X { ... })`.
825 let body_id = this.lower_fn_body(&outer_decl, |this| {
827 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
828 let async_body = this.make_async_expr(
833 hir::AsyncGeneratorKind::Closure,
834 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
836 this.expr(fn_decl_span, async_body, ThinVec::new())
838 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
842 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
843 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
844 let id = self.next_id();
845 let e1 = self.lower_expr_mut(e1);
846 let e2 = self.lower_expr_mut(e2);
847 self.expr_call_std_assoc_fn(
850 &[sym::ops, sym::RangeInclusive],
852 arena_vec![self; e1, e2],
862 ) -> hir::ExprKind<'hir> {
863 use rustc_ast::ast::RangeLimits::*;
865 let path = match (e1, e2, lims) {
866 (None, None, HalfOpen) => sym::RangeFull,
867 (Some(..), None, HalfOpen) => sym::RangeFrom,
868 (None, Some(..), HalfOpen) => sym::RangeTo,
869 (Some(..), Some(..), HalfOpen) => sym::Range,
870 (None, Some(..), Closed) => sym::RangeToInclusive,
871 (Some(..), Some(..), Closed) => unreachable!(),
872 (_, None, Closed) => {
873 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
877 let fields = self.arena.alloc_from_iter(
878 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
879 let expr = self.lower_expr(&e);
880 let ident = Ident::new(Symbol::intern(s), e.span);
881 self.field(ident, expr, e.span)
885 let is_unit = fields.is_empty();
886 let struct_path = [sym::ops, path];
887 let struct_path = self.std_path(span, &struct_path, None, is_unit);
888 let struct_path = hir::QPath::Resolved(None, struct_path);
891 hir::ExprKind::Path(struct_path)
893 hir::ExprKind::Struct(self.arena.alloc(struct_path), fields, None)
897 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
898 let target_id = match destination {
900 if let Some(loop_id) = self.resolver.get_label_res(id) {
901 Ok(self.lower_node_id(loop_id))
903 Err(hir::LoopIdError::UnresolvedLabel)
910 .map(|id| Ok(self.lower_node_id(id)))
911 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
913 hir::Destination { label: destination.map(|(_, label)| label), target_id }
916 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
917 if self.is_in_loop_condition && opt_label.is_none() {
920 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
923 self.lower_loop_destination(opt_label.map(|label| (id, label)))
927 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
928 let len = self.catch_scopes.len();
929 self.catch_scopes.push(catch_id);
931 let result = f(self);
934 self.catch_scopes.len(),
935 "catch scopes should be added and removed in stack order"
938 self.catch_scopes.pop().unwrap();
943 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
944 // We're no longer in the base loop's condition; we're in another loop.
945 let was_in_loop_condition = self.is_in_loop_condition;
946 self.is_in_loop_condition = false;
948 let len = self.loop_scopes.len();
949 self.loop_scopes.push(loop_id);
951 let result = f(self);
954 self.loop_scopes.len(),
955 "loop scopes should be added and removed in stack order"
958 self.loop_scopes.pop().unwrap();
960 self.is_in_loop_condition = was_in_loop_condition;
965 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
966 let was_in_loop_condition = self.is_in_loop_condition;
967 self.is_in_loop_condition = true;
969 let result = f(self);
971 self.is_in_loop_condition = was_in_loop_condition;
976 fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
977 if self.sess.asm_arch.is_none() {
978 struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
980 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
983 Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64)
987 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
991 // Lower operands to HIR, filter_map skips any operands with invalid
993 let sess = self.sess;
994 let operands: Vec<_> = asm
997 .filter_map(|(op, op_sp)| {
998 let lower_reg = |reg| {
1000 InlineAsmRegOrRegClass::Reg(s) => asm::InlineAsmRegOrRegClass::Reg(
1001 asm::InlineAsmReg::parse(
1003 |feature| sess.target_features.contains(&Symbol::intern(feature)),
1004 &sess.target.target,
1008 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
1009 sess.struct_span_err(*op_sp, &msg).emit();
1013 InlineAsmRegOrRegClass::RegClass(s) => {
1014 asm::InlineAsmRegOrRegClass::RegClass(
1015 asm::InlineAsmRegClass::parse(sess.asm_arch?, s)
1018 "invalid register class `{}`: {}",
1022 sess.struct_span_err(*op_sp, &msg).emit();
1030 // lower_reg is executed last because we need to lower all
1031 // sub-expressions even if we throw them away later.
1032 let op = match *op {
1033 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1034 expr: self.lower_expr_mut(expr),
1035 reg: lower_reg(reg)?,
1037 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1039 expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1040 reg: lower_reg(reg)?,
1042 InlineAsmOperand::InOut { reg, late, ref expr } => {
1043 hir::InlineAsmOperand::InOut {
1045 expr: self.lower_expr_mut(expr),
1046 reg: lower_reg(reg)?,
1049 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1050 hir::InlineAsmOperand::SplitInOut {
1052 in_expr: self.lower_expr_mut(in_expr),
1053 out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1054 reg: lower_reg(reg)?,
1057 InlineAsmOperand::Const { ref expr } => {
1058 hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1060 InlineAsmOperand::Sym { ref expr } => {
1061 hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1068 // Stop if there were any errors when lowering the register classes
1069 if operands.len() != asm.operands.len() {
1070 return hir::ExprKind::Err;
1073 // Validate template modifiers against the register classes for the operands
1074 let asm_arch = sess.asm_arch.unwrap();
1075 for p in &asm.template {
1076 if let InlineAsmTemplatePiece::Placeholder {
1078 modifier: Some(modifier),
1079 span: placeholder_span,
1082 let op_sp = asm.operands[operand_idx].1;
1083 match &operands[operand_idx] {
1084 hir::InlineAsmOperand::In { reg, .. }
1085 | hir::InlineAsmOperand::Out { reg, .. }
1086 | hir::InlineAsmOperand::InOut { reg, .. }
1087 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1088 let class = reg.reg_class();
1089 let valid_modifiers = class.valid_modifiers(asm_arch);
1090 if !valid_modifiers.contains(&modifier) {
1091 let mut err = sess.struct_span_err(
1093 "invalid asm template modifier for this register class",
1095 err.span_label(placeholder_span, "template modifier");
1096 err.span_label(op_sp, "argument");
1097 if !valid_modifiers.is_empty() {
1098 let mut mods = format!("`{}`", valid_modifiers[0]);
1099 for m in &valid_modifiers[1..] {
1100 let _ = write!(mods, ", `{}`", m);
1103 "the `{}` register class supports \
1104 the following template modifiers: {}",
1110 "the `{}` register class does not support template modifiers",
1117 hir::InlineAsmOperand::Const { .. } => {
1118 let mut err = sess.struct_span_err(
1120 "asm template modifiers are not allowed for `const` arguments",
1122 err.span_label(placeholder_span, "template modifier");
1123 err.span_label(op_sp, "argument");
1126 hir::InlineAsmOperand::Sym { .. } => {
1127 let mut err = sess.struct_span_err(
1129 "asm template modifiers are not allowed for `sym` arguments",
1131 err.span_label(placeholder_span, "template modifier");
1132 err.span_label(op_sp, "argument");
1139 let mut used_input_regs = FxHashMap::default();
1140 let mut used_output_regs = FxHashMap::default();
1141 for (idx, op) in operands.iter().enumerate() {
1142 let op_sp = asm.operands[idx].1;
1143 if let Some(reg) = op.reg() {
1144 // Validate register classes against currently enabled target
1145 // features. We check that at least one type is available for
1146 // the current target.
1147 let reg_class = reg.reg_class();
1148 let mut required_features = vec![];
1149 for &(_, feature) in reg_class.supported_types(asm_arch) {
1150 if let Some(feature) = feature {
1151 if self.sess.target_features.contains(&Symbol::intern(feature)) {
1152 required_features.clear();
1155 required_features.push(feature);
1158 required_features.clear();
1162 required_features.sort();
1163 required_features.dedup();
1164 match &required_features[..] {
1168 "register class `{}` requires the `{}` target feature",
1172 sess.struct_span_err(op_sp, &msg).emit();
1176 "register class `{}` requires at least one target feature: {}",
1180 sess.struct_span_err(op_sp, &msg).emit();
1184 // Check for conflicts between explicit register operands.
1185 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1186 let (input, output) = match op {
1187 hir::InlineAsmOperand::In { .. } => (true, false),
1188 // Late output do not conflict with inputs, but normal outputs do
1189 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1190 hir::InlineAsmOperand::InOut { .. }
1191 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1192 hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1197 // Flag to output the error only once per operand
1198 let mut skip = false;
1199 reg.overlapping_regs(|r| {
1200 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1202 match used_regs.entry(r) {
1203 Entry::Occupied(o) => {
1207 let idx2 = *o.get();
1208 let op2 = &operands[idx2];
1209 let op_sp2 = asm.operands[idx2].1;
1210 let reg2 = match op2.reg() {
1211 Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1212 _ => unreachable!(),
1216 "register `{}` conflicts with register `{}`",
1220 let mut err = sess.struct_span_err(op_sp, &msg);
1223 &format!("register `{}`", reg.name()),
1227 &format!("register `{}`", reg2.name()),
1232 hir::InlineAsmOperand::In { .. },
1233 hir::InlineAsmOperand::Out { late, .. },
1236 hir::InlineAsmOperand::Out { late, .. },
1237 hir::InlineAsmOperand::In { .. },
1240 let out_op_sp = if input { op_sp2 } else { op_sp };
1241 let msg = "use `lateout` instead of \
1242 `out` to avoid conflict";
1243 err.span_help(out_op_sp, msg);
1251 Entry::Vacant(v) => {
1257 check(&mut used_input_regs, true);
1260 check(&mut used_output_regs, false);
1267 let operands = self.arena.alloc_from_iter(operands);
1268 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1269 let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1270 let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1271 hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1274 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1275 let inner = hir::LlvmInlineAsmInner {
1276 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1280 .map(|out| hir::LlvmInlineAsmOutput {
1281 constraint: out.constraint,
1283 is_indirect: out.is_indirect,
1284 span: out.expr.span,
1288 asm_str_style: asm.asm_str_style,
1289 clobbers: asm.clobbers.clone(),
1290 volatile: asm.volatile,
1291 alignstack: asm.alignstack,
1292 dialect: asm.dialect,
1294 let hir_asm = hir::LlvmInlineAsm {
1296 inputs_exprs: self.arena.alloc_from_iter(
1297 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1301 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1303 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1306 fn lower_field(&mut self, f: &Field) -> hir::Field<'hir> {
1308 hir_id: self.next_id(),
1310 expr: self.lower_expr(&f.expr),
1312 is_shorthand: f.is_shorthand,
1316 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1317 match self.generator_kind {
1318 Some(hir::GeneratorKind::Gen) => {}
1319 Some(hir::GeneratorKind::Async(_)) => {
1324 "`async` generators are not yet supported"
1328 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1332 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1334 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1337 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1340 /// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1342 /// [opt_ident]: loop {
1344 /// match ::std::iter::Iterator::next(&mut iter) {
1345 /// ::std::option::Option::Some(val) => __next = val,
1346 /// ::std::option::Option::None => break
1348 /// let <pat> = __next;
1349 /// StmtKind::Expr(<body>);
1362 opt_label: Option<Label>,
1363 ) -> hir::Expr<'hir> {
1364 let orig_head_span = head.span;
1366 let mut head = self.lower_expr_mut(head);
1367 let desugared_span = self.mark_span_with_reason(
1368 DesugaringKind::ForLoop(ForLoopLoc::Head),
1372 head.span = desugared_span;
1374 let iter = Ident::with_dummy_span(sym::iter);
1376 let next_ident = Ident::with_dummy_span(sym::__next);
1377 let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1380 hir::BindingAnnotation::Mutable,
1383 // `::std::option::Option::Some(val) => __next = val`
1385 let val_ident = Ident::with_dummy_span(sym::val);
1386 let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1387 let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1388 let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1389 let assign = self.arena.alloc(self.expr(
1391 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1394 let some_pat = self.pat_some(pat.span, val_pat);
1395 self.arm(some_pat, assign)
1398 // `::std::option::Option::None => break`
1401 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1402 let pat = self.pat_none(e.span);
1403 self.arm(pat, break_expr)
1407 let (iter_pat, iter_pat_nid) =
1408 self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1410 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1412 let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1413 let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1414 let next_path = &[sym::iter, sym::Iterator, sym::next];
1416 self.expr_call_std_path(desugared_span, next_path, arena_vec![self; ref_mut_iter]);
1417 let arms = arena_vec![self; pat_arm, break_arm];
1419 self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1421 let match_stmt = self.stmt_expr(desugared_span, match_expr);
1423 let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1426 let next_let = self.stmt_let_pat(
1431 hir::LocalSource::ForLoopDesugar,
1434 // `let <pat> = __next`
1435 let pat = self.lower_pat(pat);
1436 let pat_let = self.stmt_let_pat(
1441 hir::LocalSource::ForLoopDesugar,
1444 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1445 let body_expr = self.expr_block(body_block, ThinVec::new());
1446 let body_stmt = self.stmt_expr(body.span, body_expr);
1448 let loop_block = self.block_all(
1450 arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1454 // `[opt_ident]: loop { ... }`
1455 let kind = hir::ExprKind::Loop(loop_block, opt_label, hir::LoopSource::ForLoop);
1456 let loop_expr = self.arena.alloc(hir::Expr {
1457 hir_id: self.lower_node_id(e.id),
1460 attrs: ThinVec::new(),
1463 // `mut iter => { ... }`
1464 let iter_arm = self.arm(iter_pat, loop_expr);
1466 let into_iter_span = self.mark_span_with_reason(
1467 DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1472 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1473 let into_iter_expr = {
1474 let into_iter_path = &[sym::iter, sym::IntoIterator, sym::into_iter];
1475 self.expr_call_std_path(into_iter_span, into_iter_path, arena_vec![self; head])
1478 let match_expr = self.arena.alloc(self.expr_match(
1481 arena_vec![self; iter_arm],
1482 hir::MatchSource::ForLoopDesugar,
1485 // This is effectively `{ let _result = ...; _result }`.
1486 // The construct was introduced in #21984 and is necessary to make sure that
1487 // temporaries in the `head` expression are dropped and do not leak to the
1488 // surrounding scope of the `match` since the `match` is not a terminating scope.
1490 // Also, add the attributes to the outer returned expr node.
1491 self.expr_drop_temps_mut(desugared_span, match_expr, e.attrs.clone())
1494 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1496 /// match Try::into_result(<expr>) {
1497 /// Ok(val) => #[allow(unreachable_code)] val,
1498 /// Err(err) => #[allow(unreachable_code)]
1499 /// // If there is an enclosing `try {...}`:
1500 /// break 'catch_target Try::from_error(From::from(err)),
1502 /// return Try::from_error(From::from(err)),
1505 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1506 let unstable_span = self.mark_span_with_reason(
1507 DesugaringKind::QuestionMark,
1509 self.allow_try_trait.clone(),
1511 let try_span = self.sess.source_map().end_point(span);
1512 let try_span = self.mark_span_with_reason(
1513 DesugaringKind::QuestionMark,
1515 self.allow_try_trait.clone(),
1518 // `Try::into_result(<expr>)`
1521 let sub_expr = self.lower_expr_mut(sub_expr);
1523 let path = &[sym::ops, sym::Try, sym::into_result];
1524 self.expr_call_std_path(unstable_span, path, arena_vec![self; sub_expr])
1527 // `#[allow(unreachable_code)]`
1529 // `allow(unreachable_code)`
1531 let allow_ident = Ident::new(sym::allow, span);
1532 let uc_ident = Ident::new(sym::unreachable_code, span);
1533 let uc_nested = attr::mk_nested_word_item(uc_ident);
1534 attr::mk_list_item(allow_ident, vec![uc_nested])
1536 attr::mk_attr_outer(allow)
1538 let attrs = vec![attr];
1540 // `Ok(val) => #[allow(unreachable_code)] val,`
1542 let val_ident = Ident::with_dummy_span(sym::val);
1543 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1544 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1548 ThinVec::from(attrs.clone()),
1550 let ok_pat = self.pat_ok(span, val_pat);
1551 self.arm(ok_pat, val_expr)
1554 // `Err(err) => #[allow(unreachable_code)]
1555 // return Try::from_error(From::from(err)),`
1557 let err_ident = Ident::with_dummy_span(sym::err);
1558 let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1560 let from_path = &[sym::convert, sym::From, sym::from];
1561 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1562 self.expr_call_std_path(try_span, from_path, arena_vec![self; err_expr])
1565 self.wrap_in_try_constructor(sym::from_error, unstable_span, from_expr, try_span);
1566 let thin_attrs = ThinVec::from(attrs);
1567 let catch_scope = self.catch_scopes.last().copied();
1568 let ret_expr = if let Some(catch_node) = catch_scope {
1569 let target_id = Ok(self.lower_node_id(catch_node));
1570 self.arena.alloc(self.expr(
1572 hir::ExprKind::Break(
1573 hir::Destination { label: None, target_id },
1574 Some(from_err_expr),
1579 self.arena.alloc(self.expr(
1581 hir::ExprKind::Ret(Some(from_err_expr)),
1586 let err_pat = self.pat_err(try_span, err_local);
1587 self.arm(err_pat, ret_expr)
1590 hir::ExprKind::Match(
1592 arena_vec![self; err_arm, ok_arm],
1593 hir::MatchSource::TryDesugar,
1597 // =========================================================================
1598 // Helper methods for building HIR.
1599 // =========================================================================
1601 /// Constructs a `true` or `false` literal expression.
1602 pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1603 let lit = Spanned { span, node: LitKind::Bool(val) };
1604 self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1607 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1609 /// In terms of drop order, it has the same effect as wrapping `expr` in
1610 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1612 /// The drop order can be important in e.g. `if expr { .. }`.
1613 pub(super) fn expr_drop_temps(
1616 expr: &'hir hir::Expr<'hir>,
1618 ) -> &'hir hir::Expr<'hir> {
1619 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1622 pub(super) fn expr_drop_temps_mut(
1625 expr: &'hir hir::Expr<'hir>,
1627 ) -> hir::Expr<'hir> {
1628 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
1634 arg: &'hir hir::Expr<'hir>,
1635 arms: &'hir [hir::Arm<'hir>],
1636 source: hir::MatchSource,
1637 ) -> hir::Expr<'hir> {
1638 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
1641 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
1642 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
1643 self.arena.alloc(self.expr(span, expr_break, attrs))
1646 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1649 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
1654 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
1655 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
1661 e: &'hir hir::Expr<'hir>,
1662 args: &'hir [hir::Expr<'hir>],
1663 ) -> hir::Expr<'hir> {
1664 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
1670 e: &'hir hir::Expr<'hir>,
1671 args: &'hir [hir::Expr<'hir>],
1672 ) -> &'hir hir::Expr<'hir> {
1673 self.arena.alloc(self.expr_call_mut(span, e, args))
1676 // Note: associated functions must use `expr_call_std_path`.
1677 fn expr_call_std_path_mut(
1680 path_components: &[Symbol],
1681 args: &'hir [hir::Expr<'hir>],
1682 ) -> hir::Expr<'hir> {
1684 self.arena.alloc(self.expr_std_path(span, path_components, None, ThinVec::new()));
1685 self.expr_call_mut(span, path, args)
1688 fn expr_call_std_path(
1691 path_components: &[Symbol],
1692 args: &'hir [hir::Expr<'hir>],
1693 ) -> &'hir hir::Expr<'hir> {
1694 self.arena.alloc(self.expr_call_std_path_mut(span, path_components, args))
1697 // Create an expression calling an associated function of an std type.
1699 // Associated functions cannot be resolved through the normal `std_path` function,
1700 // as they are resolved differently and so cannot use `expr_call_std_path`.
1702 // This function accepts the path component (`ty_path_components`) separately from
1703 // the name of the associated function (`assoc_fn_name`) in order to facilitate
1704 // separate resolution of the type and creation of a path referring to its associated
1706 fn expr_call_std_assoc_fn(
1708 ty_path_id: hir::HirId,
1710 ty_path_components: &[Symbol],
1711 assoc_fn_name: &str,
1712 args: &'hir [hir::Expr<'hir>],
1713 ) -> hir::ExprKind<'hir> {
1714 let ty_path = self.std_path(span, ty_path_components, None, false);
1716 self.arena.alloc(self.ty_path(ty_path_id, span, hir::QPath::Resolved(None, ty_path)));
1717 let fn_seg = self.arena.alloc(hir::PathSegment::from_ident(Ident::from_str(assoc_fn_name)));
1718 let fn_path = hir::QPath::TypeRelative(ty, fn_seg);
1720 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1721 hir::ExprKind::Call(fn_expr, args)
1727 components: &[Symbol],
1728 params: Option<&'hir hir::GenericArgs<'hir>>,
1730 ) -> hir::Expr<'hir> {
1731 let path = self.std_path(span, components, params, true);
1732 self.expr(span, hir::ExprKind::Path(hir::QPath::Resolved(None, path)), attrs)
1735 pub(super) fn expr_ident(
1739 binding: hir::HirId,
1740 ) -> &'hir hir::Expr<'hir> {
1741 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
1744 pub(super) fn expr_ident_mut(
1748 binding: hir::HirId,
1749 ) -> hir::Expr<'hir> {
1750 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
1753 fn expr_ident_with_attrs(
1757 binding: hir::HirId,
1759 ) -> hir::Expr<'hir> {
1760 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
1762 self.arena.alloc(hir::Path {
1764 res: Res::Local(binding),
1765 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
1769 self.expr(span, expr_path, attrs)
1772 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
1773 let hir_id = self.next_id();
1774 let span = expr.span;
1777 hir::ExprKind::Block(
1778 self.arena.alloc(hir::Block {
1782 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
1784 targeted_by_break: false,
1792 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
1793 let blk = self.block_all(span, &[], None);
1794 let expr = self.expr_block(blk, ThinVec::new());
1795 self.arena.alloc(expr)
1798 pub(super) fn expr_block(
1800 b: &'hir hir::Block<'hir>,
1802 ) -> hir::Expr<'hir> {
1803 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
1809 kind: hir::ExprKind<'hir>,
1811 ) -> hir::Expr<'hir> {
1812 hir::Expr { hir_id: self.next_id(), kind, span, attrs }
1815 fn field(&mut self, ident: Ident, expr: &'hir hir::Expr<'hir>, span: Span) -> hir::Field<'hir> {
1816 hir::Field { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
1819 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
1821 hir_id: self.next_id(),