]> git.lizzy.rs Git - rust.git/blobdiff - src/librustc_parse/parser/expr.rs
Auto merge of #69590 - Dylan-DPC:rollup-i3z0sic, r=Dylan-DPC
[rust.git] / src / librustc_parse / parser / expr.rs
index 97daa91eed196ee08c351b18f3a632908b3ae473..71ca8fba0b4799195288271d75470ce2df64ac0a 100644 (file)
@@ -97,15 +97,14 @@ pub(super) fn parse_anon_const_expr(&mut self) -> PResult<'a, AnonConst> {
     fn parse_expr_catch_underscore(&mut self) -> PResult<'a, P<Expr>> {
         match self.parse_expr() {
             Ok(expr) => Ok(expr),
-            Err(mut err) => match self.token.kind {
+            Err(mut err) => match self.normalized_token.kind {
                 token::Ident(name, false)
                     if name == kw::Underscore && self.look_ahead(1, |t| t == &token::Comma) =>
                 {
                     // Special-case handling of `foo(_, _, _)`
                     err.emit();
-                    let sp = self.token.span;
                     self.bump();
-                    Ok(self.mk_expr(sp, ExprKind::Err, AttrVec::new()))
+                    Ok(self.mk_expr(self.prev_token.span, ExprKind::Err, AttrVec::new()))
                 }
                 _ => Err(err),
             },
@@ -166,7 +165,7 @@ pub(super) fn parse_assoc_expr_with(
         while let Some(op) = self.check_assoc_op() {
             // Adjust the span for interpolated LHS to point to the `$lhs` token
             // and not to what it refers to.
-            let lhs_span = match self.unnormalized_prev_token.kind {
+            let lhs_span = match self.prev_token.kind {
                 TokenKind::Interpolated(..) => self.prev_span,
                 _ => lhs.span,
             };
@@ -225,12 +224,8 @@ pub(super) fn parse_assoc_expr_with(
 
             // Make sure that the span of the parent node is larger than the span of lhs and rhs,
             // including the attributes.
-            let lhs_span = lhs
-                .attrs
-                .iter()
-                .filter(|a| a.style == AttrStyle::Outer)
-                .next()
-                .map_or(lhs_span, |a| a.span);
+            let lhs_span =
+                lhs.attrs.iter().find(|a| a.style == AttrStyle::Outer).map_or(lhs_span, |a| a.span);
             let span = lhs_span.to(rhs.span);
             lhs = match op {
                 AssocOp::Add
@@ -337,7 +332,7 @@ fn error_found_expr_would_be_stmt(&self, lhs: &Expr) {
     /// Also performs recovery for `and` / `or` which are mistaken for `&&` and `||` respectively.
     fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
         Some(Spanned {
-            node: match (AssocOp::from_token(&self.token), &self.token.kind) {
+            node: match (AssocOp::from_token(&self.token), &self.normalized_token.kind) {
                 (Some(op), _) => op,
                 (None, token::Ident(sym::and, false)) => {
                     self.error_bad_logical_op("and", "&&", "conjunction");
@@ -349,7 +344,7 @@ fn check_assoc_op(&self) -> Option<Spanned<AssocOp>> {
                 }
                 _ => return None,
             },
-            span: self.token.span,
+            span: self.normalized_token.span,
         })
     }
 
@@ -441,7 +436,7 @@ fn parse_prefix_expr(&mut self, attrs: Option<AttrVec>) -> PResult<'a, P<Expr>>
         let attrs = self.parse_or_use_outer_attributes(attrs)?;
         let lo = self.token.span;
         // Note: when adding new unary operators, don't forget to adjust TokenKind::can_begin_expr()
-        let (hi, ex) = match self.token.kind {
+        let (hi, ex) = match self.normalized_token.kind {
             token::Not => self.parse_unary_expr(lo, UnOp::Not), // `!expr`
             token::Tilde => self.recover_tilde_expr(lo),        // `~expr`
             token::BinOp(token::Minus) => self.parse_unary_expr(lo, UnOp::Neg), // `-expr`
@@ -527,7 +522,7 @@ fn interpolated_or_expr_span(
     ) -> PResult<'a, (Span, P<Expr>)> {
         expr.map(|e| {
             (
-                match self.unnormalized_prev_token.kind {
+                match self.prev_token.kind {
                     TokenKind::Interpolated(..) => self.prev_span,
                     _ => e.span,
                 },
@@ -708,7 +703,7 @@ fn parse_dot_or_call_expr_with_(&mut self, mut e: P<Expr>, lo: Span) -> PResult<
     }
 
     fn parse_dot_suffix_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>> {
-        match self.token.kind {
+        match self.normalized_token.kind {
             token::Ident(..) => self.parse_dot_suffix(base, lo),
             token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
                 Ok(self.parse_tuple_field_access_expr(lo, base, symbol, suffix))
@@ -758,7 +753,7 @@ fn recover_field_access_by_float_lit(
                 s.print_usize(float.trunc() as usize);
                 s.pclose();
                 s.s.word(".");
-                s.s.word(fstr.splitn(2, ".").last().unwrap().to_string())
+                s.s.word(fstr.splitn(2, '.').last().unwrap().to_string())
             });
             err.span_suggestion(
                 lo.to(self.prev_span),
@@ -777,8 +772,8 @@ fn parse_tuple_field_access_expr(
         field: Symbol,
         suffix: Option<Symbol>,
     ) -> P<Expr> {
-        let span = self.token.span;
         self.bump();
+        let span = self.prev_token.span;
         let field = ExprKind::Field(base, Ident::new(field, span));
         self.expect_no_suffix(span, "a tuple index", suffix);
         self.mk_expr(lo.to(span), field, AttrVec::new())
@@ -802,7 +797,7 @@ fn parse_index_expr(&mut self, lo: Span, base: P<Expr>) -> PResult<'a, P<Expr>>
 
     /// Assuming we have just parsed `.`, continue parsing into an expression.
     fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
-        if self.token.span.rust_2018() && self.eat_keyword(kw::Await) {
+        if self.normalized_token.span.rust_2018() && self.eat_keyword(kw::Await) {
             return self.mk_await_expr(self_arg, lo);
         }
 
@@ -916,7 +911,7 @@ fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
             //       |             ^ expected expression
             self.bump();
             Ok(self.mk_expr_err(self.token.span))
-        } else if self.token.span.rust_2018() {
+        } else if self.normalized_token.span.rust_2018() {
             // `Span::rust_2018()` is somewhat expensive; don't get it repeatedly.
             if self.check_keyword(kw::Async) {
                 if self.is_async_block() {
@@ -1346,7 +1341,7 @@ fn parse_closure_expr(&mut self, attrs: AttrVec) -> PResult<'a, P<Expr>> {
             if self.eat_keyword(kw::Static) { Movability::Static } else { Movability::Movable };
 
         let asyncness =
-            if self.token.span.rust_2018() { self.parse_asyncness() } else { Async::No };
+            if self.normalized_token.span.rust_2018() { self.parse_asyncness() } else { Async::No };
         if asyncness.is_async() {
             // Feature-gate `async ||` closures.
             self.sess.gated_spans.gate(sym::async_closure, self.prev_span);
@@ -1560,9 +1555,8 @@ fn parse_loop_expr(
 
     fn eat_label(&mut self) -> Option<Label> {
         self.token.lifetime().map(|ident| {
-            let span = self.token.span;
             self.bump();
-            Label { ident: Ident::new(ident.name, span) }
+            Label { ident }
         })
     }
 
@@ -1625,10 +1619,10 @@ pub(super) fn parse_arm(&mut self) -> PResult<'a, Arm> {
         let hi = self.token.span;
 
         if require_comma {
-            let cm = self.sess.source_map();
+            let sm = self.sess.source_map();
             self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)]).map_err(
                 |mut err| {
-                    match (cm.span_to_lines(expr.span), cm.span_to_lines(arm_start_span)) {
+                    match (sm.span_to_lines(expr.span), sm.span_to_lines(arm_start_span)) {
                         (Ok(ref expr_lines), Ok(ref arm_start_lines))
                             if arm_start_lines.lines[0].end_col == expr_lines.lines[0].end_col
                                 && expr_lines.lines.len() == 2
@@ -1704,7 +1698,7 @@ fn is_do_catch_block(&self) -> bool {
     fn is_try_block(&self) -> bool {
         self.token.is_keyword(kw::Try) &&
         self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
-        self.token.span.rust_2018() &&
+        self.normalized_token.span.rust_2018() &&
         // Prevent `while try {} {}`, `if try {} {} else {}`, etc.
         !self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
     }
@@ -1854,13 +1848,12 @@ pub(super) fn parse_struct_expr(
 
     /// Use in case of error after field-looking code: `S { foo: () with a }`.
     fn find_struct_error_after_field_looking_code(&self) -> Option<Field> {
-        if let token::Ident(name, _) = self.token.kind {
+        if let token::Ident(name, _) = self.normalized_token.kind {
             if !self.token.is_reserved_ident() && self.look_ahead(1, |t| *t == token::Colon) {
-                let span = self.token.span;
                 return Some(ast::Field {
-                    ident: Ident::new(name, span),
-                    span,
-                    expr: self.mk_expr_err(span),
+                    ident: Ident::new(name, self.normalized_token.span),
+                    span: self.token.span,
+                    expr: self.mk_expr_err(self.token.span),
                     is_shorthand: false,
                     attrs: AttrVec::new(),
                     id: DUMMY_NODE_ID,