1 use super::{ImplTraitContext, LoweringContext, ParamMode, ParenthesizedGenericArgs};
4 use rustc_ast::ptr::P as AstP;
6 use rustc_data_structures::fx::FxHashMap;
7 use rustc_data_structures::stack::ensure_sufficient_stack;
8 use rustc_data_structures::thin_vec::ThinVec;
9 use rustc_errors::struct_span_err;
11 use rustc_hir::def::Res;
12 use rustc_hir::definitions::DefPathData;
13 use rustc_session::parse::feature_err;
14 use rustc_span::hygiene::ExpnId;
15 use rustc_span::source_map::{respan, DesugaringKind, Span, Spanned};
16 use rustc_span::symbol::{sym, Ident, Symbol};
17 use rustc_span::{hygiene::ForLoopLoc, DUMMY_SP};
18 use rustc_target::asm;
19 use std::collections::hash_map::Entry;
22 impl<'hir> LoweringContext<'_, 'hir> {
23 fn lower_exprs(&mut self, exprs: &[AstP<Expr>]) -> &'hir [hir::Expr<'hir>] {
24 self.arena.alloc_from_iter(exprs.iter().map(|x| self.lower_expr_mut(x)))
27 pub(super) fn lower_expr(&mut self, e: &Expr) -> &'hir hir::Expr<'hir> {
28 self.arena.alloc(self.lower_expr_mut(e))
31 pub(super) fn lower_expr_mut(&mut self, e: &Expr) -> hir::Expr<'hir> {
32 ensure_sufficient_stack(|| {
33 let kind = match e.kind {
34 ExprKind::Box(ref inner) => hir::ExprKind::Box(self.lower_expr(inner)),
35 ExprKind::Array(ref exprs) => hir::ExprKind::Array(self.lower_exprs(exprs)),
36 ExprKind::ConstBlock(ref anon_const) => {
37 let anon_const = self.lower_anon_const(anon_const);
38 hir::ExprKind::ConstBlock(anon_const)
40 ExprKind::Repeat(ref expr, ref count) => {
41 let expr = self.lower_expr(expr);
42 let count = self.lower_anon_const(count);
43 hir::ExprKind::Repeat(expr, count)
45 ExprKind::Tup(ref elts) => hir::ExprKind::Tup(self.lower_exprs(elts)),
46 ExprKind::Call(ref f, ref args) => {
47 if let Some(legacy_args) = self.resolver.legacy_const_generic_args(f) {
48 self.lower_legacy_const_generics((**f).clone(), args.clone(), &legacy_args)
50 let f = self.lower_expr(f);
51 hir::ExprKind::Call(f, self.lower_exprs(args))
54 ExprKind::MethodCall(ref seg, ref args, span) => {
55 let hir_seg = self.arena.alloc(self.lower_path_segment(
60 ParenthesizedGenericArgs::Err,
61 ImplTraitContext::disallowed(),
64 let args = self.lower_exprs(args);
65 hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args, span)
67 ExprKind::Binary(binop, ref lhs, ref rhs) => {
68 let binop = self.lower_binop(binop);
69 let lhs = self.lower_expr(lhs);
70 let rhs = self.lower_expr(rhs);
71 hir::ExprKind::Binary(binop, lhs, rhs)
73 ExprKind::Unary(op, ref ohs) => {
74 let op = self.lower_unop(op);
75 let ohs = self.lower_expr(ohs);
76 hir::ExprKind::Unary(op, ohs)
78 ExprKind::Lit(ref l) => hir::ExprKind::Lit(respan(l.span, l.kind.clone())),
79 ExprKind::Cast(ref expr, ref ty) => {
80 let expr = self.lower_expr(expr);
81 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
82 hir::ExprKind::Cast(expr, ty)
84 ExprKind::Type(ref expr, ref ty) => {
85 let expr = self.lower_expr(expr);
86 let ty = self.lower_ty(ty, ImplTraitContext::disallowed());
87 hir::ExprKind::Type(expr, ty)
89 ExprKind::AddrOf(k, m, ref ohs) => {
90 let ohs = self.lower_expr(ohs);
91 hir::ExprKind::AddrOf(k, m, ohs)
93 ExprKind::Let(ref pat, ref scrutinee) => {
94 self.lower_expr_let(e.span, pat, scrutinee)
96 ExprKind::If(ref cond, ref then, ref else_opt) => match cond.kind {
97 ExprKind::Let(ref pat, ref scrutinee) => {
98 self.lower_expr_if_let(e.span, pat, scrutinee, then, else_opt.as_deref())
100 ExprKind::Paren(ref paren) => match paren.peel_parens().kind {
101 ExprKind::Let(ref pat, ref scrutinee) => {
102 // A user has written `if (let Some(x) = foo) {`, we want to avoid
103 // confusing them with mentions of nightly features.
104 // If this logic is changed, you will also likely need to touch
105 // `unused::UnusedParens::check_expr`.
106 self.if_let_expr_with_parens(cond, &paren.peel_parens());
107 self.lower_expr_if_let(
115 _ => self.lower_expr_if(cond, then, else_opt.as_deref()),
117 _ => self.lower_expr_if(cond, then, else_opt.as_deref()),
119 ExprKind::While(ref cond, ref body, opt_label) => self
120 .with_loop_scope(e.id, |this| {
121 this.lower_expr_while_in_loop_scope(e.span, cond, body, opt_label)
123 ExprKind::Loop(ref body, opt_label) => self.with_loop_scope(e.id, |this| {
125 this.lower_block(body, false),
127 hir::LoopSource::Loop,
131 ExprKind::TryBlock(ref body) => self.lower_expr_try_block(body),
132 ExprKind::Match(ref expr, ref arms) => hir::ExprKind::Match(
133 self.lower_expr(expr),
134 self.arena.alloc_from_iter(arms.iter().map(|x| self.lower_arm(x))),
135 hir::MatchSource::Normal,
137 ExprKind::Async(capture_clause, closure_node_id, ref block) => self
143 hir::AsyncGeneratorKind::Block,
144 |this| this.with_new_scopes(|this| this.lower_block_expr(block)),
146 ExprKind::Await(ref expr) => self.lower_expr_await(e.span, expr),
155 if let Async::Yes { closure_id, .. } = asyncness {
156 self.lower_expr_async_closure(
164 self.lower_expr_closure(
173 ExprKind::Block(ref blk, opt_label) => {
174 hir::ExprKind::Block(self.lower_block(blk, opt_label.is_some()), opt_label)
176 ExprKind::Assign(ref el, ref er, span) => {
177 self.lower_expr_assign(el, er, span, e.span)
179 ExprKind::AssignOp(op, ref el, ref er) => hir::ExprKind::AssignOp(
180 self.lower_binop(op),
184 ExprKind::Field(ref el, ident) => hir::ExprKind::Field(self.lower_expr(el), ident),
185 ExprKind::Index(ref el, ref er) => {
186 hir::ExprKind::Index(self.lower_expr(el), self.lower_expr(er))
188 ExprKind::Range(Some(ref e1), Some(ref e2), RangeLimits::Closed) => {
189 self.lower_expr_range_closed(e.span, e1, e2)
191 ExprKind::Range(ref e1, ref e2, lims) => {
192 self.lower_expr_range(e.span, e1.as_deref(), e2.as_deref(), lims)
194 ExprKind::Underscore => {
198 "in expressions, `_` can only be used on the left-hand side of an assignment",
200 .span_label(e.span, "`_` not allowed here")
204 ExprKind::Path(ref qself, ref path) => {
205 let qpath = self.lower_qpath(
210 ImplTraitContext::disallowed(),
212 hir::ExprKind::Path(qpath)
214 ExprKind::Break(opt_label, ref opt_expr) => {
215 let opt_expr = opt_expr.as_ref().map(|x| self.lower_expr(x));
216 hir::ExprKind::Break(self.lower_jump_destination(e.id, opt_label), opt_expr)
218 ExprKind::Continue(opt_label) => {
219 hir::ExprKind::Continue(self.lower_jump_destination(e.id, opt_label))
221 ExprKind::Ret(ref e) => {
222 let e = e.as_ref().map(|x| self.lower_expr(x));
223 hir::ExprKind::Ret(e)
225 ExprKind::InlineAsm(ref asm) => self.lower_expr_asm(e.span, asm),
226 ExprKind::LlvmInlineAsm(ref asm) => self.lower_expr_llvm_asm(asm),
227 ExprKind::Struct(ref se) => {
228 let rest = match &se.rest {
229 StructRest::Base(e) => Some(self.lower_expr(e)),
230 StructRest::Rest(sp) => {
232 .struct_span_err(*sp, "base expression required after `..`")
233 .span_label(*sp, "add a base expression here")
235 Some(&*self.arena.alloc(self.expr_err(*sp)))
237 StructRest::None => None,
239 hir::ExprKind::Struct(
240 self.arena.alloc(self.lower_qpath(
245 ImplTraitContext::disallowed(),
248 .alloc_from_iter(se.fields.iter().map(|x| self.lower_expr_field(x))),
252 ExprKind::Yield(ref opt_expr) => self.lower_expr_yield(e.span, opt_expr.as_deref()),
253 ExprKind::Err => hir::ExprKind::Err,
254 ExprKind::Try(ref sub_expr) => self.lower_expr_try(e.span, sub_expr),
255 ExprKind::Paren(ref ex) => {
256 let mut ex = self.lower_expr_mut(ex);
257 // Include parens in span, but only if it is a super-span.
258 if e.span.contains(ex.span) {
261 // Merge attributes into the inner expression.
262 if !e.attrs.is_empty() {
263 let old_attrs = self.attrs.get(&ex.hir_id).map(|la| *la).unwrap_or(&[]);
266 &*self.arena.alloc_from_iter(
269 .map(|a| self.lower_attr(a))
270 .chain(old_attrs.iter().cloned()),
277 // Desugar `ExprForLoop`
278 // from: `[opt_ident]: for <pat> in <head> <body>`
279 ExprKind::ForLoop(ref pat, ref head, ref body, opt_label) => {
280 return self.lower_expr_for(e, pat, head, body, opt_label);
282 ExprKind::MacCall(_) => panic!("{:?} shouldn't exist here", e.span),
285 let hir_id = self.lower_node_id(e.id);
286 self.lower_attrs(hir_id, &e.attrs);
287 hir::Expr { hir_id, kind, span: e.span }
291 fn lower_unop(&mut self, u: UnOp) -> hir::UnOp {
293 UnOp::Deref => hir::UnOp::Deref,
294 UnOp::Not => hir::UnOp::Not,
295 UnOp::Neg => hir::UnOp::Neg,
299 fn lower_binop(&mut self, b: BinOp) -> hir::BinOp {
302 BinOpKind::Add => hir::BinOpKind::Add,
303 BinOpKind::Sub => hir::BinOpKind::Sub,
304 BinOpKind::Mul => hir::BinOpKind::Mul,
305 BinOpKind::Div => hir::BinOpKind::Div,
306 BinOpKind::Rem => hir::BinOpKind::Rem,
307 BinOpKind::And => hir::BinOpKind::And,
308 BinOpKind::Or => hir::BinOpKind::Or,
309 BinOpKind::BitXor => hir::BinOpKind::BitXor,
310 BinOpKind::BitAnd => hir::BinOpKind::BitAnd,
311 BinOpKind::BitOr => hir::BinOpKind::BitOr,
312 BinOpKind::Shl => hir::BinOpKind::Shl,
313 BinOpKind::Shr => hir::BinOpKind::Shr,
314 BinOpKind::Eq => hir::BinOpKind::Eq,
315 BinOpKind::Lt => hir::BinOpKind::Lt,
316 BinOpKind::Le => hir::BinOpKind::Le,
317 BinOpKind::Ne => hir::BinOpKind::Ne,
318 BinOpKind::Ge => hir::BinOpKind::Ge,
319 BinOpKind::Gt => hir::BinOpKind::Gt,
325 fn lower_legacy_const_generics(
328 args: Vec<AstP<Expr>>,
329 legacy_args_idx: &[usize],
330 ) -> hir::ExprKind<'hir> {
331 let path = match f.kind {
332 ExprKind::Path(None, ref mut path) => path,
336 // Split the arguments into const generics and normal arguments
337 let mut real_args = vec![];
338 let mut generic_args = vec![];
339 for (idx, arg) in args.into_iter().enumerate() {
340 if legacy_args_idx.contains(&idx) {
341 let parent_def_id = self.current_hir_id_owner.last().unwrap().0;
342 let node_id = self.resolver.next_node_id();
344 // Add a definition for the in-band const def.
345 self.resolver.create_def(
348 DefPathData::AnonConst,
353 let anon_const = AnonConst { id: node_id, value: arg };
354 generic_args.push(AngleBracketedArg::Arg(GenericArg::Const(anon_const)));
360 // Add generic args to the last element of the path.
361 let last_segment = path.segments.last_mut().unwrap();
362 assert!(last_segment.args.is_none());
363 last_segment.args = Some(AstP(GenericArgs::AngleBracketed(AngleBracketedArgs {
368 // Now lower everything as normal.
369 let f = self.lower_expr(&f);
370 hir::ExprKind::Call(f, self.lower_exprs(&real_args))
373 fn if_let_expr_with_parens(&mut self, cond: &Expr, paren: &Expr) {
374 let start = cond.span.until(paren.span);
375 let end = paren.span.shrink_to_hi().until(cond.span.shrink_to_hi());
379 "invalid parentheses around `let` expression in `if let`",
381 .multipart_suggestion(
382 "`if let` needs to be written without parentheses",
383 vec![(start, String::new()), (end, String::new())],
384 rustc_errors::Applicability::MachineApplicable,
387 // Ideally, we'd remove the feature gating of a `let` expression since we are already
388 // complaining about it here, but `feature_gate::check_crate` has already run by now:
389 // self.sess.parse_sess.gated_spans.ungate_last(sym::let_chains, paren.span);
392 /// Emit an error and lower `ast::ExprKind::Let(pat, scrutinee)` into:
394 /// match scrutinee { pats => true, _ => false }
396 fn lower_expr_let(&mut self, span: Span, pat: &Pat, scrutinee: &Expr) -> hir::ExprKind<'hir> {
397 // If we got here, the `let` expression is not allowed.
399 if self.sess.opts.unstable_features.is_nightly_build() {
401 .struct_span_err(span, "`let` expressions are not supported here")
403 "only supported directly without parentheses in conditions of `if`- and \
404 `while`-expressions, as well as in `let` chains within parentheses",
409 .struct_span_err(span, "expected expression, found statement (`let`)")
410 .note("variable declaration using `let` is a statement")
414 // For better recovery, we emit:
416 // match scrutinee { pat => true, _ => false }
418 // While this doesn't fully match the user's intent, it has key advantages:
419 // 1. We can avoid using `abort_if_errors`.
420 // 2. We can typeck both `pat` and `scrutinee`.
421 // 3. `pat` is allowed to be refutable.
422 // 4. The return type of the block is `bool` which seems like what the user wanted.
423 let scrutinee = self.lower_expr(scrutinee);
425 let pat = self.lower_pat(pat);
426 let expr = self.expr_bool(span, true);
430 let pat = self.pat_wild(span);
431 let expr = self.expr_bool(span, false);
434 hir::ExprKind::Match(
436 arena_vec![self; then_arm, else_arm],
437 hir::MatchSource::Normal,
445 else_opt: Option<&Expr>,
446 ) -> hir::ExprKind<'hir> {
447 macro_rules! make_if {
449 let cond = self.lower_expr(cond);
450 let then_expr = self.lower_block_expr(then);
451 hir::ExprKind::If(cond, self.arena.alloc(then_expr), $opt)
454 if let Some(rslt) = else_opt {
455 make_if!(Some(self.lower_expr(rslt)))
461 fn lower_expr_if_let(
467 else_opt: Option<&Expr>,
468 ) -> hir::ExprKind<'hir> {
469 // FIXME(#53667): handle lowering of && and parens.
471 // `_ => else_block` where `else_block` is `{}` if there's `None`:
472 let else_pat = self.pat_wild(span);
473 let (else_expr, contains_else_clause) = match else_opt {
474 None => (self.expr_block_empty(span.shrink_to_hi()), false),
475 Some(els) => (self.lower_expr(els), true),
477 let else_arm = self.arm(else_pat, else_expr);
479 // Handle then + scrutinee:
480 let scrutinee = self.lower_expr(scrutinee);
481 let then_pat = self.lower_pat(pat);
483 let then_expr = self.lower_block_expr(then);
484 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
486 let desugar = hir::MatchSource::IfLetDesugar { contains_else_clause };
487 hir::ExprKind::Match(scrutinee, arena_vec![self; then_arm, else_arm], desugar)
490 fn lower_expr_while_in_loop_scope(
495 opt_label: Option<Label>,
496 ) -> hir::ExprKind<'hir> {
497 // FIXME(#53667): handle lowering of && and parens.
499 // Note that the block AND the condition are evaluated in the loop scope.
500 // This is done to allow `break` from inside the condition of the loop.
504 let else_pat = self.pat_wild(span);
505 let else_expr = self.expr_break(span, ThinVec::new());
506 self.arm(else_pat, else_expr)
509 // Handle then + scrutinee:
510 let (then_pat, scrutinee, desugar, source) = match cond.kind {
511 ExprKind::Let(ref pat, ref scrutinee) => {
514 // [opt_ident]: loop {
515 // match <sub_expr> {
520 let scrutinee = self.with_loop_condition_scope(|t| t.lower_expr(scrutinee));
521 let pat = self.lower_pat(pat);
522 (pat, scrutinee, hir::MatchSource::WhileLetDesugar, hir::LoopSource::WhileLet)
525 // We desugar: `'label: while $cond $body` into:
529 // match drop-temps { $cond } {
537 let cond = self.with_loop_condition_scope(|this| this.lower_expr(cond));
539 self.mark_span_with_reason(DesugaringKind::CondTemporary, cond.span, None);
540 // Wrap in a construct equivalent to `{ let _t = $cond; _t }`
541 // to preserve drop semantics since `while cond { ... }` does not
542 // let temporaries live outside of `cond`.
543 let cond = self.expr_drop_temps(span_block, cond, ThinVec::new());
545 let pat = self.pat_bool(span, true);
546 (pat, cond, hir::MatchSource::WhileDesugar, hir::LoopSource::While)
549 let then_expr = self.lower_block_expr(body);
550 let then_arm = self.arm(then_pat, self.arena.alloc(then_expr));
552 // `match <scrutinee> { ... }`
554 self.expr_match(span, scrutinee, arena_vec![self; then_arm, else_arm], desugar);
556 // `[opt_ident]: loop { ... }`
558 self.block_expr(self.arena.alloc(match_expr)),
561 span.with_hi(cond.span.hi()),
565 /// Desugar `try { <stmts>; <expr> }` into `{ <stmts>; ::std::ops::Try::from_ok(<expr>) }`,
566 /// `try { <stmts>; }` into `{ <stmts>; ::std::ops::Try::from_ok(()) }`
567 /// and save the block id to use it as a break target for desugaring of the `?` operator.
568 fn lower_expr_try_block(&mut self, body: &Block) -> hir::ExprKind<'hir> {
569 self.with_catch_scope(body.id, |this| {
570 let mut block = this.lower_block_noalloc(body, true);
572 // Final expression of the block (if present) or `()` with span at the end of block
573 let (try_span, tail_expr) = if let Some(expr) = block.expr.take() {
575 this.mark_span_with_reason(
576 DesugaringKind::TryBlock,
578 this.allow_try_trait.clone(),
583 let try_span = this.mark_span_with_reason(
584 DesugaringKind::TryBlock,
585 this.sess.source_map().end_point(body.span),
586 this.allow_try_trait.clone(),
589 (try_span, this.expr_unit(try_span))
592 let ok_wrapped_span =
593 this.mark_span_with_reason(DesugaringKind::TryBlock, tail_expr.span, None);
595 // `::std::ops::Try::from_ok($tail_expr)`
596 block.expr = Some(this.wrap_in_try_constructor(
597 hir::LangItem::TryFromOk,
603 hir::ExprKind::Block(this.arena.alloc(block), None)
607 fn wrap_in_try_constructor(
609 lang_item: hir::LangItem,
611 expr: &'hir hir::Expr<'hir>,
613 ) -> &'hir hir::Expr<'hir> {
615 self.arena.alloc(self.expr_lang_item_path(method_span, lang_item, ThinVec::new()));
616 self.expr_call(overall_span, constructor, std::slice::from_ref(expr))
619 fn lower_arm(&mut self, arm: &Arm) -> hir::Arm<'hir> {
620 let pat = self.lower_pat(&arm.pat);
621 let guard = arm.guard.as_ref().map(|cond| {
622 if let ExprKind::Let(ref pat, ref scrutinee) = cond.kind {
623 hir::Guard::IfLet(self.lower_pat(pat), self.lower_expr(scrutinee))
625 hir::Guard::If(self.lower_expr(cond))
628 let hir_id = self.next_id();
629 self.lower_attrs(hir_id, &arm.attrs);
630 hir::Arm { hir_id, pat, guard, body: self.lower_expr(&arm.body), span: arm.span }
633 /// Lower an `async` construct to a generator that is then wrapped so it implements `Future`.
638 /// std::future::from_generator(static move? |_task_context| -> <ret_ty> {
642 pub(super) fn make_async_expr(
644 capture_clause: CaptureBy,
645 closure_node_id: NodeId,
646 ret_ty: Option<AstP<Ty>>,
648 async_gen_kind: hir::AsyncGeneratorKind,
649 body: impl FnOnce(&mut Self) -> hir::Expr<'hir>,
650 ) -> hir::ExprKind<'hir> {
651 let output = match ret_ty {
652 Some(ty) => hir::FnRetTy::Return(self.lower_ty(&ty, ImplTraitContext::disallowed())),
653 None => hir::FnRetTy::DefaultReturn(span),
656 // Resume argument type. We let the compiler infer this to simplify the lowering. It is
657 // fully constrained by `future::from_generator`.
658 let input_ty = hir::Ty { hir_id: self.next_id(), kind: hir::TyKind::Infer, span };
660 // The closure/generator `FnDecl` takes a single (resume) argument of type `input_ty`.
661 let decl = self.arena.alloc(hir::FnDecl {
662 inputs: arena_vec![self; input_ty],
665 implicit_self: hir::ImplicitSelfKind::None,
668 // Lower the argument pattern/ident. The ident is used again in the `.await` lowering.
669 let (pat, task_context_hid) = self.pat_ident_binding_mode(
671 Ident::with_dummy_span(sym::_task_context),
672 hir::BindingAnnotation::Mutable,
674 let param = hir::Param { hir_id: self.next_id(), pat, ty_span: span, span };
675 let params = arena_vec![self; param];
677 let body_id = self.lower_body(move |this| {
678 this.generator_kind = Some(hir::GeneratorKind::Async(async_gen_kind));
680 let old_ctx = this.task_context;
681 this.task_context = Some(task_context_hid);
682 let res = body(this);
683 this.task_context = old_ctx;
687 // `static |_task_context| -> <ret_ty> { body }`:
688 let generator_kind = hir::ExprKind::Closure(
693 Some(hir::Movability::Static),
696 hir::Expr { hir_id: self.lower_node_id(closure_node_id), kind: generator_kind, span };
698 // `future::from_generator`:
700 self.mark_span_with_reason(DesugaringKind::Async, span, self.allow_gen_future.clone());
702 self.expr_lang_item_path(unstable_span, hir::LangItem::FromGenerator, ThinVec::new());
704 // `future::from_generator(generator)`:
705 hir::ExprKind::Call(self.arena.alloc(gen_future), arena_vec![self; generator])
708 /// Desugar `<expr>.await` into:
711 /// mut pinned => loop {
712 /// match unsafe { ::std::future::Future::poll(
713 /// <::std::pin::Pin>::new_unchecked(&mut pinned),
714 /// ::std::future::get_context(task_context),
716 /// ::std::task::Poll::Ready(result) => break result,
717 /// ::std::task::Poll::Pending => {}
719 /// task_context = yield ();
723 fn lower_expr_await(&mut self, await_span: Span, expr: &Expr) -> hir::ExprKind<'hir> {
724 match self.generator_kind {
725 Some(hir::GeneratorKind::Async(_)) => {}
726 Some(hir::GeneratorKind::Gen) | None => {
727 let mut err = struct_span_err!(
731 "`await` is only allowed inside `async` functions and blocks"
733 err.span_label(await_span, "only allowed inside `async` functions and blocks");
734 if let Some(item_sp) = self.current_item {
735 err.span_label(item_sp, "this is not `async`");
740 let span = self.mark_span_with_reason(DesugaringKind::Await, await_span, None);
741 let gen_future_span = self.mark_span_with_reason(
742 DesugaringKind::Await,
744 self.allow_gen_future.clone(),
746 let expr = self.lower_expr(expr);
748 let pinned_ident = Ident::with_dummy_span(sym::pinned);
749 let (pinned_pat, pinned_pat_hid) =
750 self.pat_ident_binding_mode(span, pinned_ident, hir::BindingAnnotation::Mutable);
752 let task_context_ident = Ident::with_dummy_span(sym::_task_context);
755 // ::std::future::Future::poll(
756 // ::std::pin::Pin::new_unchecked(&mut pinned),
757 // ::std::future::get_context(task_context),
761 let pinned = self.expr_ident(span, pinned_ident, pinned_pat_hid);
762 let ref_mut_pinned = self.expr_mut_addr_of(span, pinned);
763 let task_context = if let Some(task_context_hid) = self.task_context {
764 self.expr_ident_mut(span, task_context_ident, task_context_hid)
766 // Use of `await` outside of an async context, we cannot use `task_context` here.
769 let new_unchecked = self.expr_call_lang_item_fn_mut(
771 hir::LangItem::PinNewUnchecked,
772 arena_vec![self; ref_mut_pinned],
774 let get_context = self.expr_call_lang_item_fn_mut(
776 hir::LangItem::GetContext,
777 arena_vec![self; task_context],
779 let call = self.expr_call_lang_item_fn(
781 hir::LangItem::FuturePoll,
782 arena_vec![self; new_unchecked, get_context],
784 self.arena.alloc(self.expr_unsafe(call))
787 // `::std::task::Poll::Ready(result) => break result`
788 let loop_node_id = self.resolver.next_node_id();
789 let loop_hir_id = self.lower_node_id(loop_node_id);
791 let x_ident = Ident::with_dummy_span(sym::result);
792 let (x_pat, x_pat_hid) = self.pat_ident(span, x_ident);
793 let x_expr = self.expr_ident(span, x_ident, x_pat_hid);
794 let ready_field = self.single_pat_field(span, x_pat);
795 let ready_pat = self.pat_lang_item_variant(span, hir::LangItem::PollReady, ready_field);
796 let break_x = self.with_loop_scope(loop_node_id, move |this| {
798 hir::ExprKind::Break(this.lower_loop_destination(None), Some(x_expr));
799 this.arena.alloc(this.expr(await_span, expr_break, ThinVec::new()))
801 self.arm(ready_pat, break_x)
804 // `::std::task::Poll::Pending => {}`
806 let pending_pat = self.pat_lang_item_variant(span, hir::LangItem::PollPending, &[]);
807 let empty_block = self.expr_block_empty(span);
808 self.arm(pending_pat, empty_block)
811 let inner_match_stmt = {
812 let match_expr = self.expr_match(
815 arena_vec![self; ready_arm, pending_arm],
816 hir::MatchSource::AwaitDesugar,
818 self.stmt_expr(span, match_expr)
821 // task_context = yield ();
823 let unit = self.expr_unit(span);
824 let yield_expr = self.expr(
826 hir::ExprKind::Yield(unit, hir::YieldSource::Await { expr: Some(expr.hir_id) }),
829 let yield_expr = self.arena.alloc(yield_expr);
831 if let Some(task_context_hid) = self.task_context {
832 let lhs = self.expr_ident(span, task_context_ident, task_context_hid);
834 self.expr(span, hir::ExprKind::Assign(lhs, yield_expr, span), AttrVec::new());
835 self.stmt_expr(span, assign)
837 // Use of `await` outside of an async context. Return `yield_expr` so that we can
838 // proceed with type checking.
839 self.stmt(span, hir::StmtKind::Semi(yield_expr))
843 let loop_block = self.block_all(span, arena_vec![self; inner_match_stmt, yield_stmt], None);
846 let loop_expr = self.arena.alloc(hir::Expr {
848 kind: hir::ExprKind::Loop(loop_block, None, hir::LoopSource::Loop, span),
852 // mut pinned => loop { ... }
853 let pinned_arm = self.arm(pinned_pat, loop_expr);
856 // mut pinned => loop { .. }
858 hir::ExprKind::Match(expr, arena_vec![self; pinned_arm], hir::MatchSource::AwaitDesugar)
861 fn lower_expr_closure(
863 capture_clause: CaptureBy,
864 movability: Movability,
868 ) -> hir::ExprKind<'hir> {
869 let (body_id, generator_option) = self.with_new_scopes(move |this| {
870 let prev = this.current_item;
871 this.current_item = Some(fn_decl_span);
872 let mut generator_kind = None;
873 let body_id = this.lower_fn_body(decl, |this| {
874 let e = this.lower_expr_mut(body);
875 generator_kind = this.generator_kind;
878 let generator_option =
879 this.generator_movability_for_fn(&decl, fn_decl_span, generator_kind, movability);
880 this.current_item = prev;
881 (body_id, generator_option)
884 // Lower outside new scope to preserve `is_in_loop_condition`.
885 let fn_decl = self.lower_fn_decl(decl, None, false, None);
887 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, generator_option)
890 fn generator_movability_for_fn(
894 generator_kind: Option<hir::GeneratorKind>,
895 movability: Movability,
896 ) -> Option<hir::Movability> {
897 match generator_kind {
898 Some(hir::GeneratorKind::Gen) => {
899 if decl.inputs.len() > 1 {
904 "too many parameters for a generator (expected 0 or 1 parameters)"
910 Some(hir::GeneratorKind::Async(_)) => {
911 panic!("non-`async` closure body turned `async` during lowering");
914 if movability == Movability::Static {
915 struct_span_err!(self.sess, fn_decl_span, E0697, "closures cannot be static")
923 fn lower_expr_async_closure(
925 capture_clause: CaptureBy,
930 ) -> hir::ExprKind<'hir> {
932 FnDecl { inputs: decl.inputs.clone(), output: FnRetTy::Default(fn_decl_span) };
934 let body_id = self.with_new_scopes(|this| {
935 // FIXME(cramertj): allow `async` non-`move` closures with arguments.
936 if capture_clause == CaptureBy::Ref && !decl.inputs.is_empty() {
941 "`async` non-`move` closures with parameters are not currently supported",
944 "consider using `let` statements to manually capture \
945 variables by reference before entering an `async move` closure",
950 // Transform `async |x: u8| -> X { ... }` into
951 // `|x: u8| future_from_generator(|| -> X { ... })`.
952 let body_id = this.lower_fn_body(&outer_decl, |this| {
954 if let FnRetTy::Ty(ty) = &decl.output { Some(ty.clone()) } else { None };
955 let async_body = this.make_async_expr(
960 hir::AsyncGeneratorKind::Closure,
961 |this| this.with_new_scopes(|this| this.lower_expr_mut(body)),
963 this.expr(fn_decl_span, async_body, ThinVec::new())
968 // We need to lower the declaration outside the new scope, because we
969 // have to conserve the state of being inside a loop condition for the
970 // closure argument types.
971 let fn_decl = self.lower_fn_decl(&outer_decl, None, false, None);
973 hir::ExprKind::Closure(capture_clause, fn_decl, body_id, fn_decl_span, None)
976 /// Destructure the LHS of complex assignments.
977 /// For instance, lower `(a, b) = t` to `{ let (lhs1, lhs2) = t; a = lhs1; b = lhs2; }`.
978 fn lower_expr_assign(
984 ) -> hir::ExprKind<'hir> {
985 // Return early in case of an ordinary assignment.
986 fn is_ordinary(lower_ctx: &mut LoweringContext<'_, '_>, lhs: &Expr) -> bool {
989 | ExprKind::Struct(..)
991 | ExprKind::Underscore => false,
992 // Check for tuple struct constructor.
993 ExprKind::Call(callee, ..) => lower_ctx.extract_tuple_struct_path(callee).is_none(),
994 ExprKind::Paren(e) => {
996 // We special-case `(..)` for consistency with patterns.
997 ExprKind::Range(None, None, RangeLimits::HalfOpen) => false,
998 _ => is_ordinary(lower_ctx, e),
1004 if is_ordinary(self, lhs) {
1005 return hir::ExprKind::Assign(self.lower_expr(lhs), self.lower_expr(rhs), eq_sign_span);
1007 if !self.sess.features_untracked().destructuring_assignment {
1009 &self.sess.parse_sess,
1010 sym::destructuring_assignment,
1012 "destructuring assignments are unstable",
1014 .span_label(lhs.span, "cannot assign to this expression")
1018 let mut assignments = vec![];
1020 // The LHS becomes a pattern: `(lhs1, lhs2)`.
1021 let pat = self.destructure_assign(lhs, eq_sign_span, &mut assignments);
1022 let rhs = self.lower_expr(rhs);
1024 // Introduce a `let` for destructuring: `let (lhs1, lhs2) = t`.
1025 let destructure_let = self.stmt_let_pat(
1030 hir::LocalSource::AssignDesugar(eq_sign_span),
1033 // `a = lhs1; b = lhs2;`.
1036 .alloc_from_iter(std::iter::once(destructure_let).chain(assignments.into_iter()));
1038 // Wrap everything in a block.
1039 hir::ExprKind::Block(&self.block_all(whole_span, stmts, None), None)
1042 /// If the given expression is a path to a tuple struct, returns that path.
1043 /// It is not a complete check, but just tries to reject most paths early
1044 /// if they are not tuple structs.
1045 /// Type checking will take care of the full validation later.
1046 fn extract_tuple_struct_path<'a>(&mut self, expr: &'a Expr) -> Option<&'a Path> {
1047 // For tuple struct destructuring, it must be a non-qualified path (like in patterns).
1048 if let ExprKind::Path(None, path) = &expr.kind {
1049 // Does the path resolves to something disallowed in a tuple struct/variant pattern?
1050 if let Some(partial_res) = self.resolver.get_partial_res(expr.id) {
1051 if partial_res.unresolved_segments() == 0
1052 && !partial_res.base_res().expected_in_tuple_struct_pat()
1062 /// Convert the LHS of a destructuring assignment to a pattern.
1063 /// Each sub-assignment is recorded in `assignments`.
1064 fn destructure_assign(
1068 assignments: &mut Vec<hir::Stmt<'hir>>,
1069 ) -> &'hir hir::Pat<'hir> {
1071 // Underscore pattern.
1072 ExprKind::Underscore => {
1073 return self.pat_without_dbm(lhs.span, hir::PatKind::Wild);
1076 ExprKind::Array(elements) => {
1078 self.destructure_sequence(elements, "slice", eq_sign_span, assignments);
1079 let slice_pat = if let Some((i, span)) = rest {
1080 let (before, after) = pats.split_at(i);
1081 hir::PatKind::Slice(
1083 Some(self.pat_without_dbm(span, hir::PatKind::Wild)),
1087 hir::PatKind::Slice(pats, None, &[])
1089 return self.pat_without_dbm(lhs.span, slice_pat);
1092 ExprKind::Call(callee, args) => {
1093 if let Some(path) = self.extract_tuple_struct_path(callee) {
1094 let (pats, rest) = self.destructure_sequence(
1096 "tuple struct or variant",
1100 let qpath = self.lower_qpath(
1104 ParamMode::Optional,
1105 ImplTraitContext::disallowed(),
1107 // Destructure like a tuple struct.
1108 let tuple_struct_pat =
1109 hir::PatKind::TupleStruct(qpath, pats, rest.map(|r| r.0));
1110 return self.pat_without_dbm(lhs.span, tuple_struct_pat);
1114 ExprKind::Struct(se) => {
1115 let field_pats = self.arena.alloc_from_iter(se.fields.iter().map(|f| {
1116 let pat = self.destructure_assign(&f.expr, eq_sign_span, assignments);
1118 hir_id: self.next_id(),
1121 is_shorthand: f.is_shorthand,
1125 let qpath = self.lower_qpath(
1129 ParamMode::Optional,
1130 ImplTraitContext::disallowed(),
1132 let fields_omitted = match &se.rest {
1133 StructRest::Base(e) => {
1137 "functional record updates are not allowed in destructuring \
1142 "consider removing the trailing pattern",
1144 rustc_errors::Applicability::MachineApplicable,
1149 StructRest::Rest(_) => true,
1150 StructRest::None => false,
1152 let struct_pat = hir::PatKind::Struct(qpath, field_pats, fields_omitted);
1153 return self.pat_without_dbm(lhs.span, struct_pat);
1156 ExprKind::Tup(elements) => {
1158 self.destructure_sequence(elements, "tuple", eq_sign_span, assignments);
1159 let tuple_pat = hir::PatKind::Tuple(pats, rest.map(|r| r.0));
1160 return self.pat_without_dbm(lhs.span, tuple_pat);
1162 ExprKind::Paren(e) => {
1163 // We special-case `(..)` for consistency with patterns.
1164 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1165 let tuple_pat = hir::PatKind::Tuple(&[], Some(0));
1166 return self.pat_without_dbm(lhs.span, tuple_pat);
1168 return self.destructure_assign(e, eq_sign_span, assignments);
1173 // Treat all other cases as normal lvalue.
1174 let ident = Ident::new(sym::lhs, lhs.span);
1175 let (pat, binding) = self.pat_ident(lhs.span, ident);
1176 let ident = self.expr_ident(lhs.span, ident, binding);
1177 let assign = hir::ExprKind::Assign(self.lower_expr(lhs), ident, eq_sign_span);
1178 let expr = self.expr(lhs.span, assign, ThinVec::new());
1179 assignments.push(self.stmt_expr(lhs.span, expr));
1183 /// Destructure a sequence of expressions occurring on the LHS of an assignment.
1184 /// Such a sequence occurs in a tuple (struct)/slice.
1185 /// Return a sequence of corresponding patterns, and the index and the span of `..` if it
1187 /// Each sub-assignment is recorded in `assignments`.
1188 fn destructure_sequence(
1190 elements: &[AstP<Expr>],
1193 assignments: &mut Vec<hir::Stmt<'hir>>,
1194 ) -> (&'hir [&'hir hir::Pat<'hir>], Option<(usize, Span)>) {
1195 let mut rest = None;
1197 self.arena.alloc_from_iter(elements.iter().enumerate().filter_map(|(i, e)| {
1198 // Check for `..` pattern.
1199 if let ExprKind::Range(None, None, RangeLimits::HalfOpen) = e.kind {
1200 if let Some((_, prev_span)) = rest {
1201 self.ban_extra_rest_pat(e.span, prev_span, ctx);
1203 rest = Some((i, e.span));
1207 Some(self.destructure_assign(e, eq_sign_span, assignments))
1213 /// Desugar `<start>..=<end>` into `std::ops::RangeInclusive::new(<start>, <end>)`.
1214 fn lower_expr_range_closed(&mut self, span: Span, e1: &Expr, e2: &Expr) -> hir::ExprKind<'hir> {
1215 let e1 = self.lower_expr_mut(e1);
1216 let e2 = self.lower_expr_mut(e2);
1217 let fn_path = hir::QPath::LangItem(hir::LangItem::RangeInclusiveNew, span);
1219 self.arena.alloc(self.expr(span, hir::ExprKind::Path(fn_path), ThinVec::new()));
1220 hir::ExprKind::Call(fn_expr, arena_vec![self; e1, e2])
1223 fn lower_expr_range(
1229 ) -> hir::ExprKind<'hir> {
1230 use rustc_ast::RangeLimits::*;
1232 let lang_item = match (e1, e2, lims) {
1233 (None, None, HalfOpen) => hir::LangItem::RangeFull,
1234 (Some(..), None, HalfOpen) => hir::LangItem::RangeFrom,
1235 (None, Some(..), HalfOpen) => hir::LangItem::RangeTo,
1236 (Some(..), Some(..), HalfOpen) => hir::LangItem::Range,
1237 (None, Some(..), Closed) => hir::LangItem::RangeToInclusive,
1238 (Some(..), Some(..), Closed) => unreachable!(),
1239 (_, None, Closed) => {
1240 self.diagnostic().span_fatal(span, "inclusive range with no end").raise()
1244 let fields = self.arena.alloc_from_iter(
1245 e1.iter().map(|e| ("start", e)).chain(e2.iter().map(|e| ("end", e))).map(|(s, e)| {
1246 let expr = self.lower_expr(&e);
1247 let ident = Ident::new(Symbol::intern(s), e.span);
1248 self.expr_field(ident, expr, e.span)
1252 hir::ExprKind::Struct(self.arena.alloc(hir::QPath::LangItem(lang_item, span)), fields, None)
1255 fn lower_loop_destination(&mut self, destination: Option<(NodeId, Label)>) -> hir::Destination {
1256 let target_id = match destination {
1258 if let Some(loop_id) = self.resolver.get_label_res(id) {
1259 Ok(self.lower_node_id(loop_id))
1261 Err(hir::LoopIdError::UnresolvedLabel)
1268 .map(|id| Ok(self.lower_node_id(id)))
1269 .unwrap_or(Err(hir::LoopIdError::OutsideLoopScope)),
1271 hir::Destination { label: destination.map(|(_, label)| label), target_id }
1274 fn lower_jump_destination(&mut self, id: NodeId, opt_label: Option<Label>) -> hir::Destination {
1275 if self.is_in_loop_condition && opt_label.is_none() {
1278 target_id: Err(hir::LoopIdError::UnlabeledCfInWhileCondition),
1281 self.lower_loop_destination(opt_label.map(|label| (id, label)))
1285 fn with_catch_scope<T>(&mut self, catch_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1286 let len = self.catch_scopes.len();
1287 self.catch_scopes.push(catch_id);
1289 let result = f(self);
1292 self.catch_scopes.len(),
1293 "catch scopes should be added and removed in stack order"
1296 self.catch_scopes.pop().unwrap();
1301 fn with_loop_scope<T>(&mut self, loop_id: NodeId, f: impl FnOnce(&mut Self) -> T) -> T {
1302 // We're no longer in the base loop's condition; we're in another loop.
1303 let was_in_loop_condition = self.is_in_loop_condition;
1304 self.is_in_loop_condition = false;
1306 let len = self.loop_scopes.len();
1307 self.loop_scopes.push(loop_id);
1309 let result = f(self);
1312 self.loop_scopes.len(),
1313 "loop scopes should be added and removed in stack order"
1316 self.loop_scopes.pop().unwrap();
1318 self.is_in_loop_condition = was_in_loop_condition;
1323 fn with_loop_condition_scope<T>(&mut self, f: impl FnOnce(&mut Self) -> T) -> T {
1324 let was_in_loop_condition = self.is_in_loop_condition;
1325 self.is_in_loop_condition = true;
1327 let result = f(self);
1329 self.is_in_loop_condition = was_in_loop_condition;
1334 fn lower_expr_asm(&mut self, sp: Span, asm: &InlineAsm) -> hir::ExprKind<'hir> {
1335 // Rustdoc needs to support asm! from foriegn architectures: don't try
1336 // lowering the register contraints in this case.
1337 let asm_arch = if self.sess.opts.actually_rustdoc { None } else { self.sess.asm_arch };
1338 if asm_arch.is_none() && !self.sess.opts.actually_rustdoc {
1339 struct_span_err!(self.sess, sp, E0472, "asm! is unsupported on this target").emit();
1341 if asm.options.contains(InlineAsmOptions::ATT_SYNTAX)
1342 && !matches!(asm_arch, Some(asm::InlineAsmArch::X86 | asm::InlineAsmArch::X86_64))
1343 && !self.sess.opts.actually_rustdoc
1346 .struct_span_err(sp, "the `att_syntax` option is only supported on x86")
1350 // Lower operands to HIR. We use dummy register classes if an error
1351 // occurs during lowering because we still need to be able to produce a
1353 let sess = self.sess;
1354 let operands: Vec<_> = asm
1357 .map(|(op, op_sp)| {
1358 let lower_reg = |reg| match reg {
1359 InlineAsmRegOrRegClass::Reg(s) => {
1360 asm::InlineAsmRegOrRegClass::Reg(if let Some(asm_arch) = asm_arch {
1361 asm::InlineAsmReg::parse(
1363 |feature| sess.target_features.contains(&Symbol::intern(feature)),
1367 .unwrap_or_else(|e| {
1368 let msg = format!("invalid register `{}`: {}", s.as_str(), e);
1369 sess.struct_span_err(*op_sp, &msg).emit();
1370 asm::InlineAsmReg::Err
1373 asm::InlineAsmReg::Err
1376 InlineAsmRegOrRegClass::RegClass(s) => {
1377 asm::InlineAsmRegOrRegClass::RegClass(if let Some(asm_arch) = asm_arch {
1378 asm::InlineAsmRegClass::parse(asm_arch, s).unwrap_or_else(|e| {
1379 let msg = format!("invalid register class `{}`: {}", s.as_str(), e);
1380 sess.struct_span_err(*op_sp, &msg).emit();
1381 asm::InlineAsmRegClass::Err
1384 asm::InlineAsmRegClass::Err
1389 let op = match *op {
1390 InlineAsmOperand::In { reg, ref expr } => hir::InlineAsmOperand::In {
1391 reg: lower_reg(reg),
1392 expr: self.lower_expr_mut(expr),
1394 InlineAsmOperand::Out { reg, late, ref expr } => hir::InlineAsmOperand::Out {
1395 reg: lower_reg(reg),
1397 expr: expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1399 InlineAsmOperand::InOut { reg, late, ref expr } => {
1400 hir::InlineAsmOperand::InOut {
1401 reg: lower_reg(reg),
1403 expr: self.lower_expr_mut(expr),
1406 InlineAsmOperand::SplitInOut { reg, late, ref in_expr, ref out_expr } => {
1407 hir::InlineAsmOperand::SplitInOut {
1408 reg: lower_reg(reg),
1410 in_expr: self.lower_expr_mut(in_expr),
1411 out_expr: out_expr.as_ref().map(|expr| self.lower_expr_mut(expr)),
1414 InlineAsmOperand::Const { ref expr } => {
1415 hir::InlineAsmOperand::Const { expr: self.lower_expr_mut(expr) }
1417 InlineAsmOperand::Sym { ref expr } => {
1418 hir::InlineAsmOperand::Sym { expr: self.lower_expr_mut(expr) }
1425 // Validate template modifiers against the register classes for the operands
1426 for p in &asm.template {
1427 if let InlineAsmTemplatePiece::Placeholder {
1429 modifier: Some(modifier),
1430 span: placeholder_span,
1433 let op_sp = asm.operands[operand_idx].1;
1434 match &operands[operand_idx].0 {
1435 hir::InlineAsmOperand::In { reg, .. }
1436 | hir::InlineAsmOperand::Out { reg, .. }
1437 | hir::InlineAsmOperand::InOut { reg, .. }
1438 | hir::InlineAsmOperand::SplitInOut { reg, .. } => {
1439 let class = reg.reg_class();
1440 if class == asm::InlineAsmRegClass::Err {
1443 let valid_modifiers = class.valid_modifiers(asm_arch.unwrap());
1444 if !valid_modifiers.contains(&modifier) {
1445 let mut err = sess.struct_span_err(
1447 "invalid asm template modifier for this register class",
1449 err.span_label(placeholder_span, "template modifier");
1450 err.span_label(op_sp, "argument");
1451 if !valid_modifiers.is_empty() {
1452 let mut mods = format!("`{}`", valid_modifiers[0]);
1453 for m in &valid_modifiers[1..] {
1454 let _ = write!(mods, ", `{}`", m);
1457 "the `{}` register class supports \
1458 the following template modifiers: {}",
1464 "the `{}` register class does not support template modifiers",
1471 hir::InlineAsmOperand::Const { .. } => {
1472 let mut err = sess.struct_span_err(
1474 "asm template modifiers are not allowed for `const` arguments",
1476 err.span_label(placeholder_span, "template modifier");
1477 err.span_label(op_sp, "argument");
1480 hir::InlineAsmOperand::Sym { .. } => {
1481 let mut err = sess.struct_span_err(
1483 "asm template modifiers are not allowed for `sym` arguments",
1485 err.span_label(placeholder_span, "template modifier");
1486 err.span_label(op_sp, "argument");
1493 let mut used_input_regs = FxHashMap::default();
1494 let mut used_output_regs = FxHashMap::default();
1495 let mut required_features: Vec<&str> = vec![];
1496 for (idx, &(ref op, op_sp)) in operands.iter().enumerate() {
1497 if let Some(reg) = op.reg() {
1498 // Make sure we don't accidentally carry features from the
1499 // previous iteration.
1500 required_features.clear();
1502 // Validate register classes against currently enabled target
1503 // features. We check that at least one type is available for
1504 // the current target.
1505 let reg_class = reg.reg_class();
1506 if reg_class == asm::InlineAsmRegClass::Err {
1509 for &(_, feature) in reg_class.supported_types(asm_arch.unwrap()) {
1510 if let Some(feature) = feature {
1511 if self.sess.target_features.contains(&Symbol::intern(feature)) {
1512 required_features.clear();
1515 required_features.push(feature);
1518 required_features.clear();
1522 // We are sorting primitive strs here and can use unstable sort here
1523 required_features.sort_unstable();
1524 required_features.dedup();
1525 match &required_features[..] {
1529 "register class `{}` requires the `{}` target feature",
1533 sess.struct_span_err(op_sp, &msg).emit();
1537 "register class `{}` requires at least one target feature: {}",
1541 sess.struct_span_err(op_sp, &msg).emit();
1545 // Check for conflicts between explicit register operands.
1546 if let asm::InlineAsmRegOrRegClass::Reg(reg) = reg {
1547 let (input, output) = match op {
1548 hir::InlineAsmOperand::In { .. } => (true, false),
1549 // Late output do not conflict with inputs, but normal outputs do
1550 hir::InlineAsmOperand::Out { late, .. } => (!late, true),
1551 hir::InlineAsmOperand::InOut { .. }
1552 | hir::InlineAsmOperand::SplitInOut { .. } => (true, true),
1553 hir::InlineAsmOperand::Const { .. } | hir::InlineAsmOperand::Sym { .. } => {
1558 // Flag to output the error only once per operand
1559 let mut skip = false;
1560 reg.overlapping_regs(|r| {
1561 let mut check = |used_regs: &mut FxHashMap<asm::InlineAsmReg, usize>,
1563 match used_regs.entry(r) {
1564 Entry::Occupied(o) => {
1570 let idx2 = *o.get();
1571 let &(ref op2, op_sp2) = &operands[idx2];
1572 let reg2 = match op2.reg() {
1573 Some(asm::InlineAsmRegOrRegClass::Reg(r)) => r,
1574 _ => unreachable!(),
1578 "register `{}` conflicts with register `{}`",
1582 let mut err = sess.struct_span_err(op_sp, &msg);
1583 err.span_label(op_sp, &format!("register `{}`", reg.name()));
1584 err.span_label(op_sp2, &format!("register `{}`", reg2.name()));
1588 hir::InlineAsmOperand::In { .. },
1589 hir::InlineAsmOperand::Out { late, .. },
1592 hir::InlineAsmOperand::Out { late, .. },
1593 hir::InlineAsmOperand::In { .. },
1596 let out_op_sp = if input { op_sp2 } else { op_sp };
1597 let msg = "use `lateout` instead of \
1598 `out` to avoid conflict";
1599 err.span_help(out_op_sp, msg);
1606 Entry::Vacant(v) => {
1612 check(&mut used_input_regs, true);
1615 check(&mut used_output_regs, false);
1622 let operands = self.arena.alloc_from_iter(operands);
1623 let template = self.arena.alloc_from_iter(asm.template.iter().cloned());
1624 let line_spans = self.arena.alloc_slice(&asm.line_spans[..]);
1625 let hir_asm = hir::InlineAsm { template, operands, options: asm.options, line_spans };
1626 hir::ExprKind::InlineAsm(self.arena.alloc(hir_asm))
1629 fn lower_expr_llvm_asm(&mut self, asm: &LlvmInlineAsm) -> hir::ExprKind<'hir> {
1630 let inner = hir::LlvmInlineAsmInner {
1631 inputs: asm.inputs.iter().map(|&(c, _)| c).collect(),
1635 .map(|out| hir::LlvmInlineAsmOutput {
1636 constraint: out.constraint,
1638 is_indirect: out.is_indirect,
1639 span: out.expr.span,
1643 asm_str_style: asm.asm_str_style,
1644 clobbers: asm.clobbers.clone(),
1645 volatile: asm.volatile,
1646 alignstack: asm.alignstack,
1647 dialect: asm.dialect,
1649 let hir_asm = hir::LlvmInlineAsm {
1651 inputs_exprs: self.arena.alloc_from_iter(
1652 asm.inputs.iter().map(|&(_, ref input)| self.lower_expr_mut(input)),
1656 .alloc_from_iter(asm.outputs.iter().map(|out| self.lower_expr_mut(&out.expr))),
1658 hir::ExprKind::LlvmInlineAsm(self.arena.alloc(hir_asm))
1661 fn lower_expr_field(&mut self, f: &ExprField) -> hir::ExprField<'hir> {
1663 hir_id: self.next_id(),
1665 expr: self.lower_expr(&f.expr),
1667 is_shorthand: f.is_shorthand,
1671 fn lower_expr_yield(&mut self, span: Span, opt_expr: Option<&Expr>) -> hir::ExprKind<'hir> {
1672 match self.generator_kind {
1673 Some(hir::GeneratorKind::Gen) => {}
1674 Some(hir::GeneratorKind::Async(_)) => {
1679 "`async` generators are not yet supported"
1683 None => self.generator_kind = Some(hir::GeneratorKind::Gen),
1687 opt_expr.as_ref().map(|x| self.lower_expr(x)).unwrap_or_else(|| self.expr_unit(span));
1689 hir::ExprKind::Yield(expr, hir::YieldSource::Yield)
1692 /// Desugar `ExprForLoop` from: `[opt_ident]: for <pat> in <head> <body>` into:
1695 /// let result = match ::std::iter::IntoIterator::into_iter(<head>) {
1697 /// [opt_ident]: loop {
1699 /// match ::std::iter::Iterator::next(&mut iter) {
1700 /// ::std::option::Option::Some(val) => __next = val,
1701 /// ::std::option::Option::None => break
1703 /// let <pat> = __next;
1704 /// StmtKind::Expr(<body>);
1717 opt_label: Option<Label>,
1718 ) -> hir::Expr<'hir> {
1719 let orig_head_span = head.span;
1721 let mut head = self.lower_expr_mut(head);
1722 let desugared_span = self.mark_span_with_reason(
1723 DesugaringKind::ForLoop(ForLoopLoc::Head),
1727 head.span = desugared_span;
1729 let iter = Ident::with_dummy_span(sym::iter);
1731 let next_ident = Ident::with_dummy_span(sym::__next);
1732 let (next_pat, next_pat_hid) = self.pat_ident_binding_mode(
1735 hir::BindingAnnotation::Mutable,
1738 // `::std::option::Option::Some(val) => __next = val`
1740 let val_ident = Ident::with_dummy_span(sym::val);
1741 let (val_pat, val_pat_hid) = self.pat_ident(pat.span, val_ident);
1742 let val_expr = self.expr_ident(pat.span, val_ident, val_pat_hid);
1743 let next_expr = self.expr_ident(pat.span, next_ident, next_pat_hid);
1744 let assign = self.arena.alloc(self.expr(
1746 hir::ExprKind::Assign(next_expr, val_expr, pat.span),
1749 let some_pat = self.pat_some(pat.span, val_pat);
1750 self.arm(some_pat, assign)
1753 // `::std::option::Option::None => break`
1756 self.with_loop_scope(e.id, |this| this.expr_break(e.span, ThinVec::new()));
1757 let pat = self.pat_none(e.span);
1758 self.arm(pat, break_expr)
1762 let (iter_pat, iter_pat_nid) =
1763 self.pat_ident_binding_mode(desugared_span, iter, hir::BindingAnnotation::Mutable);
1765 // `match ::std::iter::Iterator::next(&mut iter) { ... }`
1767 let iter = self.expr_ident(desugared_span, iter, iter_pat_nid);
1768 let ref_mut_iter = self.expr_mut_addr_of(desugared_span, iter);
1769 let next_expr = self.expr_call_lang_item_fn(
1771 hir::LangItem::IteratorNext,
1772 arena_vec![self; ref_mut_iter],
1774 let arms = arena_vec![self; pat_arm, break_arm];
1776 self.expr_match(desugared_span, next_expr, arms, hir::MatchSource::ForLoopDesugar)
1778 let match_stmt = self.stmt_expr(desugared_span, match_expr);
1780 let next_expr = self.expr_ident(desugared_span, next_ident, next_pat_hid);
1783 let next_let = self.stmt_let_pat(
1788 hir::LocalSource::ForLoopDesugar,
1791 // `let <pat> = __next`
1792 let pat = self.lower_pat(pat);
1793 let pat_let = self.stmt_let_pat(
1798 hir::LocalSource::ForLoopDesugar,
1801 let body_block = self.with_loop_scope(e.id, |this| this.lower_block(body, false));
1802 let body_expr = self.expr_block(body_block, ThinVec::new());
1803 let body_stmt = self.stmt_expr(body.span, body_expr);
1805 let loop_block = self.block_all(
1807 arena_vec![self; next_let, match_stmt, pat_let, body_stmt],
1811 // `[opt_ident]: loop { ... }`
1812 let kind = hir::ExprKind::Loop(
1815 hir::LoopSource::ForLoop,
1816 e.span.with_hi(orig_head_span.hi()),
1819 self.arena.alloc(hir::Expr { hir_id: self.lower_node_id(e.id), kind, span: e.span });
1821 // `mut iter => { ... }`
1822 let iter_arm = self.arm(iter_pat, loop_expr);
1824 let into_iter_span = self.mark_span_with_reason(
1825 DesugaringKind::ForLoop(ForLoopLoc::IntoIter),
1830 // `match ::std::iter::IntoIterator::into_iter(<head>) { ... }`
1831 let into_iter_expr = {
1832 self.expr_call_lang_item_fn(
1834 hir::LangItem::IntoIterIntoIter,
1835 arena_vec![self; head],
1839 let match_expr = self.arena.alloc(self.expr_match(
1842 arena_vec![self; iter_arm],
1843 hir::MatchSource::ForLoopDesugar,
1846 let attrs: Vec<_> = e.attrs.iter().map(|a| self.lower_attr(a)).collect();
1848 // This is effectively `{ let _result = ...; _result }`.
1849 // The construct was introduced in #21984 and is necessary to make sure that
1850 // temporaries in the `head` expression are dropped and do not leak to the
1851 // surrounding scope of the `match` since the `match` is not a terminating scope.
1853 // Also, add the attributes to the outer returned expr node.
1854 self.expr_drop_temps_mut(desugared_span, match_expr, attrs.into())
1857 /// Desugar `ExprKind::Try` from: `<expr>?` into:
1859 /// match Try::into_result(<expr>) {
1860 /// Ok(val) => #[allow(unreachable_code)] val,
1861 /// Err(err) => #[allow(unreachable_code)]
1862 /// // If there is an enclosing `try {...}`:
1863 /// break 'catch_target Try::from_error(From::from(err)),
1865 /// return Try::from_error(From::from(err)),
1868 fn lower_expr_try(&mut self, span: Span, sub_expr: &Expr) -> hir::ExprKind<'hir> {
1869 let unstable_span = self.mark_span_with_reason(
1870 DesugaringKind::QuestionMark,
1872 self.allow_try_trait.clone(),
1874 let try_span = self.sess.source_map().end_point(span);
1875 let try_span = self.mark_span_with_reason(
1876 DesugaringKind::QuestionMark,
1878 self.allow_try_trait.clone(),
1881 // `Try::into_result(<expr>)`
1884 let sub_expr = self.lower_expr_mut(sub_expr);
1886 self.expr_call_lang_item_fn(
1888 hir::LangItem::TryIntoResult,
1889 arena_vec![self; sub_expr],
1893 // `#[allow(unreachable_code)]`
1895 // `allow(unreachable_code)`
1897 let allow_ident = Ident::new(sym::allow, span);
1898 let uc_ident = Ident::new(sym::unreachable_code, span);
1899 let uc_nested = attr::mk_nested_word_item(uc_ident);
1900 attr::mk_list_item(allow_ident, vec![uc_nested])
1902 attr::mk_attr_outer(allow)
1904 let attrs = vec![attr];
1906 // `Ok(val) => #[allow(unreachable_code)] val,`
1908 let val_ident = Ident::with_dummy_span(sym::val);
1909 let (val_pat, val_pat_nid) = self.pat_ident(span, val_ident);
1910 let val_expr = self.arena.alloc(self.expr_ident_with_attrs(
1914 ThinVec::from(attrs.clone()),
1916 let ok_pat = self.pat_ok(span, val_pat);
1917 self.arm(ok_pat, val_expr)
1920 // `Err(err) => #[allow(unreachable_code)]
1921 // return Try::from_error(From::from(err)),`
1923 let err_ident = Ident::with_dummy_span(sym::err);
1924 let (err_local, err_local_nid) = self.pat_ident(try_span, err_ident);
1926 let err_expr = self.expr_ident_mut(try_span, err_ident, err_local_nid);
1927 self.expr_call_lang_item_fn(
1929 hir::LangItem::FromFrom,
1930 arena_vec![self; err_expr],
1933 let from_err_expr = self.wrap_in_try_constructor(
1934 hir::LangItem::TryFromError,
1939 let thin_attrs = ThinVec::from(attrs);
1940 let catch_scope = self.catch_scopes.last().copied();
1941 let ret_expr = if let Some(catch_node) = catch_scope {
1942 let target_id = Ok(self.lower_node_id(catch_node));
1943 self.arena.alloc(self.expr(
1945 hir::ExprKind::Break(
1946 hir::Destination { label: None, target_id },
1947 Some(from_err_expr),
1952 self.arena.alloc(self.expr(
1954 hir::ExprKind::Ret(Some(from_err_expr)),
1959 let err_pat = self.pat_err(try_span, err_local);
1960 self.arm(err_pat, ret_expr)
1963 hir::ExprKind::Match(
1965 arena_vec![self; err_arm, ok_arm],
1966 hir::MatchSource::TryDesugar,
1970 // =========================================================================
1971 // Helper methods for building HIR.
1972 // =========================================================================
1974 /// Constructs a `true` or `false` literal expression.
1975 pub(super) fn expr_bool(&mut self, span: Span, val: bool) -> &'hir hir::Expr<'hir> {
1976 let lit = Spanned { span, node: LitKind::Bool(val) };
1977 self.arena.alloc(self.expr(span, hir::ExprKind::Lit(lit), ThinVec::new()))
1980 /// Wrap the given `expr` in a terminating scope using `hir::ExprKind::DropTemps`.
1982 /// In terms of drop order, it has the same effect as wrapping `expr` in
1983 /// `{ let _t = $expr; _t }` but should provide better compile-time performance.
1985 /// The drop order can be important in e.g. `if expr { .. }`.
1986 pub(super) fn expr_drop_temps(
1989 expr: &'hir hir::Expr<'hir>,
1991 ) -> &'hir hir::Expr<'hir> {
1992 self.arena.alloc(self.expr_drop_temps_mut(span, expr, attrs))
1995 pub(super) fn expr_drop_temps_mut(
1998 expr: &'hir hir::Expr<'hir>,
2000 ) -> hir::Expr<'hir> {
2001 self.expr(span, hir::ExprKind::DropTemps(expr), attrs)
2007 arg: &'hir hir::Expr<'hir>,
2008 arms: &'hir [hir::Arm<'hir>],
2009 source: hir::MatchSource,
2010 ) -> hir::Expr<'hir> {
2011 self.expr(span, hir::ExprKind::Match(arg, arms, source), ThinVec::new())
2014 fn expr_break(&mut self, span: Span, attrs: AttrVec) -> &'hir hir::Expr<'hir> {
2015 let expr_break = hir::ExprKind::Break(self.lower_loop_destination(None), None);
2016 self.arena.alloc(self.expr(span, expr_break, attrs))
2019 fn expr_mut_addr_of(&mut self, span: Span, e: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2022 hir::ExprKind::AddrOf(hir::BorrowKind::Ref, hir::Mutability::Mut, e),
2027 fn expr_unit(&mut self, sp: Span) -> &'hir hir::Expr<'hir> {
2028 self.arena.alloc(self.expr(sp, hir::ExprKind::Tup(&[]), ThinVec::new()))
2034 e: &'hir hir::Expr<'hir>,
2035 args: &'hir [hir::Expr<'hir>],
2036 ) -> hir::Expr<'hir> {
2037 self.expr(span, hir::ExprKind::Call(e, args), ThinVec::new())
2043 e: &'hir hir::Expr<'hir>,
2044 args: &'hir [hir::Expr<'hir>],
2045 ) -> &'hir hir::Expr<'hir> {
2046 self.arena.alloc(self.expr_call_mut(span, e, args))
2049 fn expr_call_lang_item_fn_mut(
2052 lang_item: hir::LangItem,
2053 args: &'hir [hir::Expr<'hir>],
2054 ) -> hir::Expr<'hir> {
2055 let path = self.arena.alloc(self.expr_lang_item_path(span, lang_item, ThinVec::new()));
2056 self.expr_call_mut(span, path, args)
2059 fn expr_call_lang_item_fn(
2062 lang_item: hir::LangItem,
2063 args: &'hir [hir::Expr<'hir>],
2064 ) -> &'hir hir::Expr<'hir> {
2065 self.arena.alloc(self.expr_call_lang_item_fn_mut(span, lang_item, args))
2068 fn expr_lang_item_path(
2071 lang_item: hir::LangItem,
2073 ) -> hir::Expr<'hir> {
2074 self.expr(span, hir::ExprKind::Path(hir::QPath::LangItem(lang_item, span)), attrs)
2077 pub(super) fn expr_ident(
2081 binding: hir::HirId,
2082 ) -> &'hir hir::Expr<'hir> {
2083 self.arena.alloc(self.expr_ident_mut(sp, ident, binding))
2086 pub(super) fn expr_ident_mut(
2090 binding: hir::HirId,
2091 ) -> hir::Expr<'hir> {
2092 self.expr_ident_with_attrs(sp, ident, binding, ThinVec::new())
2095 fn expr_ident_with_attrs(
2099 binding: hir::HirId,
2101 ) -> hir::Expr<'hir> {
2102 let expr_path = hir::ExprKind::Path(hir::QPath::Resolved(
2104 self.arena.alloc(hir::Path {
2106 res: Res::Local(binding),
2107 segments: arena_vec![self; hir::PathSegment::from_ident(ident)],
2111 self.expr(span, expr_path, attrs)
2114 fn expr_unsafe(&mut self, expr: &'hir hir::Expr<'hir>) -> hir::Expr<'hir> {
2115 let hir_id = self.next_id();
2116 let span = expr.span;
2119 hir::ExprKind::Block(
2120 self.arena.alloc(hir::Block {
2124 rules: hir::BlockCheckMode::UnsafeBlock(hir::UnsafeSource::CompilerGenerated),
2126 targeted_by_break: false,
2134 fn expr_block_empty(&mut self, span: Span) -> &'hir hir::Expr<'hir> {
2135 let blk = self.block_all(span, &[], None);
2136 let expr = self.expr_block(blk, ThinVec::new());
2137 self.arena.alloc(expr)
2140 pub(super) fn expr_block(
2142 b: &'hir hir::Block<'hir>,
2144 ) -> hir::Expr<'hir> {
2145 self.expr(b.span, hir::ExprKind::Block(b, None), attrs)
2151 kind: hir::ExprKind<'hir>,
2153 ) -> hir::Expr<'hir> {
2154 let hir_id = self.next_id();
2155 self.lower_attrs(hir_id, &attrs);
2156 hir::Expr { hir_id, kind, span }
2162 expr: &'hir hir::Expr<'hir>,
2164 ) -> hir::ExprField<'hir> {
2165 hir::ExprField { hir_id: self.next_id(), ident, span, expr, is_shorthand: false }
2168 fn arm(&mut self, pat: &'hir hir::Pat<'hir>, expr: &'hir hir::Expr<'hir>) -> hir::Arm<'hir> {
2169 hir::Arm { hir_id: self.next_id(), pat, guard: None, span: expr.span, body: expr }