13 use crate::lexer::UnmatchedBrace;
14 pub use attr_wrapper::AttrWrapper;
15 pub use diagnostics::AttemptLocalParseRecovery;
16 pub(crate) use item::FnParseMode;
17 pub use pat::{CommaRecoveryMode, RecoverColon, RecoverComma};
18 pub use path::PathStyle;
20 use rustc_ast::ptr::P;
21 use rustc_ast::token::{self, Delimiter, Nonterminal, Token, TokenKind};
22 use rustc_ast::tokenstream::{AttributesData, DelimSpan, Spacing};
23 use rustc_ast::tokenstream::{TokenStream, TokenTree, TokenTreeCursor};
24 use rustc_ast::util::case::Case;
25 use rustc_ast::AttrId;
26 use rustc_ast::DUMMY_NODE_ID;
27 use rustc_ast::{self as ast, AnonConst, AttrStyle, Const, DelimArgs, Extern};
28 use rustc_ast::{Async, AttrArgs, AttrArgsEq, Expr, ExprKind, MacDelimiter, Mutability, StrLit};
29 use rustc_ast::{HasAttrs, HasTokens, Unsafe, Visibility, VisibilityKind};
30 use rustc_ast_pretty::pprust;
31 use rustc_data_structures::fx::FxHashMap;
32 use rustc_errors::PResult;
34 Applicability, DiagnosticBuilder, ErrorGuaranteed, FatalError, IntoDiagnostic, MultiSpan,
36 use rustc_session::parse::ParseSess;
37 use rustc_span::source_map::{Span, DUMMY_SP};
38 use rustc_span::symbol::{kw, sym, Ident, Symbol};
41 use std::{cmp, mem, slice};
44 DocCommentDoesNotDocumentAnything, IncorrectVisibilityRestriction, MismatchedClosingDelimiter,
49 struct Restrictions: u8 {
50 const STMT_EXPR = 1 << 0;
51 const NO_STRUCT_LITERAL = 1 << 1;
52 const CONST_EXPR = 1 << 2;
53 const ALLOW_LET = 1 << 3;
57 #[derive(Clone, Copy, PartialEq, Debug)]
64 #[derive(Clone, Copy, PartialEq, Debug)]
70 /// Whether or not we should force collection of tokens for an AST node,
71 /// regardless of whether or not it has attributes
72 #[derive(Clone, Copy, PartialEq)]
73 pub enum ForceCollect {
78 #[derive(Debug, Eq, PartialEq)]
79 pub enum TrailingToken {
83 /// If the trailing token is a comma, then capture it
84 /// Otherwise, ignore the trailing token
88 /// Like `maybe_whole_expr`, but for things other than expressions.
90 macro_rules! maybe_whole {
91 ($p:expr, $constructor:ident, |$x:ident| $e:expr) => {
92 if let token::Interpolated(nt) = &$p.token.kind {
93 if let token::$constructor(x) = &**nt {
102 /// If the next tokens are ill-formed `$ty::` recover them as `<$ty>::`.
104 macro_rules! maybe_recover_from_interpolated_ty_qpath {
105 ($self: expr, $allow_qpath_recovery: expr) => {
106 if $allow_qpath_recovery
107 && $self.may_recover()
108 && $self.look_ahead(1, |t| t == &token::ModSep)
109 && let token::Interpolated(nt) = &$self.token.kind
110 && let token::NtTy(ty) = &**nt
114 return $self.maybe_recover_from_bad_qpath_stage_2($self.prev_token.span, ty);
119 #[derive(Clone, Copy)]
126 pub struct Parser<'a> {
127 pub sess: &'a ParseSess,
128 /// The current token.
130 /// The spacing for the current token
131 pub token_spacing: Spacing,
132 /// The previous token.
133 pub prev_token: Token,
134 pub capture_cfg: bool,
135 restrictions: Restrictions,
136 expected_tokens: Vec<TokenType>,
137 // Important: This must only be advanced from `bump` to ensure that
138 // `token_cursor.num_next_calls` is updated properly.
139 token_cursor: TokenCursor,
140 desugar_doc_comments: bool,
141 /// This field is used to keep track of how many left angle brackets we have seen. This is
142 /// required in order to detect extra leading left angle brackets (`<` characters) and error
145 /// See the comments in the `parse_path_segment` function for more details.
146 unmatched_angle_bracket_count: u32,
147 max_angle_bracket_count: u32,
148 /// A list of all unclosed delimiters found by the lexer. If an entry is used for error recovery
149 /// it gets removed from here. Every entry left at the end gets emitted as an independent
151 pub(super) unclosed_delims: Vec<UnmatchedBrace>,
152 last_unexpected_token_span: Option<Span>,
153 /// Span pointing at the `:` for the last type ascription the parser has seen, and whether it
154 /// looked like it could have been a mistyped path or literal `Option:Some(42)`).
155 pub last_type_ascription: Option<(Span, bool /* likely path typo */)>,
156 /// If present, this `Parser` is not parsing Rust code but rather a macro call.
157 subparser_name: Option<&'static str>,
158 capture_state: CaptureState,
159 /// This allows us to recover when the user forget to add braces around
160 /// multiple statements in the closure body.
161 pub current_closure: Option<ClosureSpans>,
162 /// Whether the parser is allowed to do recovery.
163 /// This is disabled when parsing macro arguments, see #103534
164 pub recovery: Recovery,
167 // This type is used a lot, e.g. it's cloned when matching many declarative macro rules with nonterminals. Make sure
168 // it doesn't unintentionally get bigger.
169 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
170 rustc_data_structures::static_assert_size!(Parser<'_>, 312);
172 /// Stores span information about a closure.
174 pub struct ClosureSpans {
175 pub whole_closure: Span,
176 pub closing_pipe: Span,
180 /// Indicates a range of tokens that should be replaced by
181 /// the tokens in the provided vector. This is used in two
182 /// places during token collection:
184 /// 1. During the parsing of an AST node that may have a `#[derive]`
185 /// attribute, we parse a nested AST node that has `#[cfg]` or `#[cfg_attr]`
186 /// In this case, we use a `ReplaceRange` to replace the entire inner AST node
187 /// with `FlatToken::AttrTarget`, allowing us to perform eager cfg-expansion
188 /// on an `AttrTokenStream`.
190 /// 2. When we parse an inner attribute while collecting tokens. We
191 /// remove inner attributes from the token stream entirely, and
192 /// instead track them through the `attrs` field on the AST node.
193 /// This allows us to easily manipulate them (for example, removing
194 /// the first macro inner attribute to invoke a proc-macro).
195 /// When create a `TokenStream`, the inner attributes get inserted
196 /// into the proper place in the token stream.
197 pub type ReplaceRange = (Range<u32>, Vec<(FlatToken, Spacing)>);
199 /// Controls how we capture tokens. Capturing can be expensive,
200 /// so we try to avoid performing capturing in cases where
201 /// we will never need an `AttrTokenStream`.
202 #[derive(Copy, Clone)]
204 /// We aren't performing any capturing - this is the default mode.
206 /// We are capturing tokens
211 struct CaptureState {
212 capturing: Capturing,
213 replace_ranges: Vec<ReplaceRange>,
214 inner_attr_ranges: FxHashMap<AttrId, ReplaceRange>,
217 impl<'a> Drop for Parser<'a> {
219 emit_unclosed_delims(&mut self.unclosed_delims, &self.sess);
223 /// Iterator over a `TokenStream` that produces `Token`s. It's a bit odd that
224 /// we (a) lex tokens into a nice tree structure (`TokenStream`), and then (b)
225 /// use this type to emit them as a linear sequence. But a linear sequence is
226 /// what the parser expects, for the most part.
229 // Cursor for the current (innermost) token stream. The delimiters for this
230 // token stream are found in `self.stack.last()`; when that is `None` then
231 // we are in the outermost token stream which never has delimiters.
232 tree_cursor: TokenTreeCursor,
234 // Token streams surrounding the current one. The delimiters for stack[n]'s
235 // tokens are in `stack[n-1]`. `stack[0]` (when present) has no delimiters
236 // because it's the outermost token stream which never has delimiters.
237 stack: Vec<(TokenTreeCursor, Delimiter, DelimSpan)>,
239 desugar_doc_comments: bool,
241 // Counts the number of calls to `{,inlined_}next`.
242 num_next_calls: usize,
244 // During parsing, we may sometimes need to 'unglue' a
245 // glued token into two component tokens
246 // (e.g. '>>' into '>' and '>), so that the parser
247 // can consume them one at a time. This process
248 // bypasses the normal capturing mechanism
249 // (e.g. `num_next_calls` will not be incremented),
250 // since the 'unglued' tokens due not exist in
251 // the original `TokenStream`.
253 // If we end up consuming both unglued tokens,
254 // then this is not an issue - we'll end up
255 // capturing the single 'glued' token.
257 // However, in certain circumstances, we may
258 // want to capture just the first 'unglued' token.
259 // For example, capturing the `Vec<u8>`
260 // in `Option<Vec<u8>>` requires us to unglue
261 // the trailing `>>` token. The `break_last_token`
262 // field is used to track this token - it gets
263 // appended to the captured stream when
264 // we evaluate a `LazyAttrTokenStream`.
265 break_last_token: bool,
269 fn next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
270 self.inlined_next(desugar_doc_comments)
273 /// This always-inlined version should only be used on hot code paths.
275 fn inlined_next(&mut self, desugar_doc_comments: bool) -> (Token, Spacing) {
277 // FIXME: we currently don't return `Delimiter` open/close delims. To fix #67062 we will
278 // need to, whereupon the `delim != Delimiter::Invisible` conditions below can be
280 if let Some(tree) = self.tree_cursor.next_ref() {
282 &TokenTree::Token(ref token, spacing) => match (desugar_doc_comments, token) {
283 (true, &Token { kind: token::DocComment(_, attr_style, data), span }) => {
284 let desugared = self.desugar(attr_style, data, span);
285 self.tree_cursor.replace_prev_and_rewind(desugared);
286 // Continue to get the first token of the desugared doc comment.
289 debug_assert!(!matches!(
291 token::OpenDelim(_) | token::CloseDelim(_)
293 return (token.clone(), spacing);
296 &TokenTree::Delimited(sp, delim, ref tts) => {
297 let trees = tts.clone().into_trees();
298 self.stack.push((mem::replace(&mut self.tree_cursor, trees), delim, sp));
299 if delim != Delimiter::Invisible {
300 return (Token::new(token::OpenDelim(delim), sp.open), Spacing::Alone);
302 // No open delimiter to return; continue on to the next iteration.
305 } else if let Some((tree_cursor, delim, span)) = self.stack.pop() {
306 // We have exhausted this token stream. Move back to its parent token stream.
307 self.tree_cursor = tree_cursor;
308 if delim != Delimiter::Invisible {
309 return (Token::new(token::CloseDelim(delim), span.close), Spacing::Alone);
311 // No close delimiter to return; continue on to the next iteration.
313 // We have exhausted the outermost token stream.
314 return (Token::new(token::Eof, DUMMY_SP), Spacing::Alone);
319 // Desugar a doc comment into something like `#[doc = r"foo"]`.
320 fn desugar(&mut self, attr_style: AttrStyle, data: Symbol, span: Span) -> Vec<TokenTree> {
321 // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
322 // required to wrap the text. E.g.
323 // - `abc d` is wrapped as `r"abc d"` (num_of_hashes = 0)
324 // - `abc "d"` is wrapped as `r#"abc "d""#` (num_of_hashes = 1)
325 // - `abc "##d##"` is wrapped as `r###"abc ##"d"##"###` (num_of_hashes = 3)
326 let mut num_of_hashes = 0;
328 for ch in data.as_str().chars() {
331 '#' if count > 0 => count + 1,
334 num_of_hashes = cmp::max(num_of_hashes, count);
337 // `/// foo` becomes `doc = r"foo".
338 let delim_span = DelimSpan::from_single(span);
339 let body = TokenTree::Delimited(
343 TokenTree::token_alone(token::Ident(sym::doc, false), span),
344 TokenTree::token_alone(token::Eq, span),
345 TokenTree::token_alone(
346 TokenKind::lit(token::StrRaw(num_of_hashes), data, None),
351 .collect::<TokenStream>(),
354 if attr_style == AttrStyle::Inner {
356 TokenTree::token_alone(token::Pound, span),
357 TokenTree::token_alone(token::Not, span),
361 vec![TokenTree::token_alone(token::Pound, span), body]
366 #[derive(Debug, Clone, PartialEq)]
379 fn to_string(&self) -> String {
381 TokenType::Token(t) => format!("`{}`", pprust::token_kind_to_string(t)),
382 TokenType::Keyword(kw) => format!("`{}`", kw),
383 TokenType::Operator => "an operator".to_string(),
384 TokenType::Lifetime => "lifetime".to_string(),
385 TokenType::Ident => "identifier".to_string(),
386 TokenType::Path => "path".to_string(),
387 TokenType::Type => "type".to_string(),
388 TokenType::Const => "a const expression".to_string(),
393 #[derive(Copy, Clone, Debug)]
394 enum TokenExpectType {
399 /// A sequence separator.
401 /// The separator token.
402 sep: Option<TokenKind>,
403 /// `true` if a trailing separator is allowed.
404 trailing_sep_allowed: bool,
408 fn trailing_allowed(t: TokenKind) -> SeqSep {
409 SeqSep { sep: Some(t), trailing_sep_allowed: true }
412 fn none() -> SeqSep {
413 SeqSep { sep: None, trailing_sep_allowed: false }
417 pub enum FollowedByType {
422 #[derive(Clone, Copy, PartialEq, Eq)]
423 pub enum TokenDescription {
430 impl TokenDescription {
431 pub fn from_token(token: &Token) -> Option<Self> {
433 _ if token.is_special_ident() => Some(TokenDescription::ReservedIdentifier),
434 _ if token.is_used_keyword() => Some(TokenDescription::Keyword),
435 _ if token.is_unused_keyword() => Some(TokenDescription::ReservedKeyword),
436 token::DocComment(..) => Some(TokenDescription::DocComment),
442 pub(super) fn token_descr(token: &Token) -> String {
443 let name = pprust::token_to_string(token).to_string();
445 let kind = TokenDescription::from_token(token).map(|kind| match kind {
446 TokenDescription::ReservedIdentifier => "reserved identifier",
447 TokenDescription::Keyword => "keyword",
448 TokenDescription::ReservedKeyword => "reserved keyword",
449 TokenDescription::DocComment => "doc comment",
452 if let Some(kind) = kind { format!("{} `{}`", kind, name) } else { format!("`{}`", name) }
455 impl<'a> Parser<'a> {
459 desugar_doc_comments: bool,
460 subparser_name: Option<&'static str>,
462 let mut parser = Parser {
464 token: Token::dummy(),
465 token_spacing: Spacing::Alone,
466 prev_token: Token::dummy(),
468 restrictions: Restrictions::empty(),
469 expected_tokens: Vec::new(),
470 token_cursor: TokenCursor {
471 tree_cursor: tokens.into_trees(),
474 desugar_doc_comments,
475 break_last_token: false,
477 desugar_doc_comments,
478 unmatched_angle_bracket_count: 0,
479 max_angle_bracket_count: 0,
480 unclosed_delims: Vec::new(),
481 last_unexpected_token_span: None,
482 last_type_ascription: None,
484 capture_state: CaptureState {
485 capturing: Capturing::No,
486 replace_ranges: Vec::new(),
487 inner_attr_ranges: Default::default(),
489 current_closure: None,
490 recovery: Recovery::Allowed,
493 // Make parser point to the first token.
499 pub fn recovery(mut self, recovery: Recovery) -> Self {
500 self.recovery = recovery;
504 /// Whether the parser is allowed to recover from broken code.
506 /// If this returns false, recovering broken code into valid code (especially if this recovery does lookahead)
507 /// is not allowed. All recovery done by the parser must be gated behind this check.
509 /// Technically, this only needs to restrict eager recovery by doing lookahead at more tokens.
510 /// But making the distinction is very subtle, and simply forbidding all recovery is a lot simpler to uphold.
511 fn may_recover(&self) -> bool {
512 matches!(self.recovery, Recovery::Allowed)
515 pub fn unexpected<T>(&mut self) -> PResult<'a, T> {
516 match self.expect_one_of(&[], &[]) {
518 // We can get `Ok(true)` from `recover_closing_delimiter`
519 // which is called in `expected_one_of_not_found`.
520 Ok(_) => FatalError.raise(),
524 /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
525 pub fn expect(&mut self, t: &TokenKind) -> PResult<'a, bool /* recovered */> {
526 if self.expected_tokens.is_empty() {
527 if self.token == *t {
531 self.unexpected_try_recover(t)
534 self.expect_one_of(slice::from_ref(t), &[])
538 /// Expect next token to be edible or inedible token. If edible,
539 /// then consume it; if inedible, then return without consuming
540 /// anything. Signal a fatal error if next token is unexpected.
541 pub fn expect_one_of(
543 edible: &[TokenKind],
544 inedible: &[TokenKind],
545 ) -> PResult<'a, bool /* recovered */> {
546 if edible.contains(&self.token.kind) {
549 } else if inedible.contains(&self.token.kind) {
550 // leave it in the input
552 } else if self.last_unexpected_token_span == Some(self.token.span) {
555 self.expected_one_of_not_found(edible, inedible)
559 // Public for rustfmt usage.
560 pub fn parse_ident(&mut self) -> PResult<'a, Ident> {
561 self.parse_ident_common(true)
564 fn ident_or_err(&mut self) -> PResult<'a, (Ident, /* is_raw */ bool)> {
565 self.token.ident().ok_or_else(|| match self.prev_token.kind {
566 TokenKind::DocComment(..) => DocCommentDoesNotDocumentAnything {
567 span: self.prev_token.span,
570 .into_diagnostic(&self.sess.span_diagnostic),
571 _ => self.expected_ident_found(),
575 fn parse_ident_common(&mut self, recover: bool) -> PResult<'a, Ident> {
576 let (ident, is_raw) = self.ident_or_err()?;
577 if !is_raw && ident.is_reserved() {
578 let mut err = self.expected_ident_found();
589 /// Checks if the next token is `tok`, and returns `true` if so.
591 /// This method will automatically add `tok` to `expected_tokens` if `tok` is not
593 fn check(&mut self, tok: &TokenKind) -> bool {
594 let is_present = self.token == *tok;
596 self.expected_tokens.push(TokenType::Token(tok.clone()));
601 fn check_noexpect(&self, tok: &TokenKind) -> bool {
605 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
607 /// the main purpose of this function is to reduce the cluttering of the suggestions list
608 /// which using the normal eat method could introduce in some cases.
609 pub fn eat_noexpect(&mut self, tok: &TokenKind) -> bool {
610 let is_present = self.check_noexpect(tok);
617 /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
618 pub fn eat(&mut self, tok: &TokenKind) -> bool {
619 let is_present = self.check(tok);
626 /// If the next token is the given keyword, returns `true` without eating it.
627 /// An expectation is also added for diagnostics purposes.
628 fn check_keyword(&mut self, kw: Symbol) -> bool {
629 self.expected_tokens.push(TokenType::Keyword(kw));
630 self.token.is_keyword(kw)
633 fn check_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
634 if self.check_keyword(kw) {
638 if case == Case::Insensitive
639 && let Some((ident, /* is_raw */ false)) = self.token.ident()
640 && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
647 /// If the next token is the given keyword, eats it and returns `true`.
648 /// Otherwise, returns `false`. An expectation is also added for diagnostics purposes.
649 // Public for rustfmt usage.
650 pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
651 if self.check_keyword(kw) {
659 /// Eats a keyword, optionally ignoring the case.
660 /// If the case differs (and is ignored) an error is issued.
661 /// This is useful for recovery.
662 fn eat_keyword_case(&mut self, kw: Symbol, case: Case) -> bool {
663 if self.eat_keyword(kw) {
667 if case == Case::Insensitive
668 && let Some((ident, /* is_raw */ false)) = self.token.ident()
669 && ident.as_str().to_lowercase() == kw.as_str().to_lowercase() {
671 .struct_span_err(ident.span, format!("keyword `{kw}` is written in a wrong case"))
674 "write it in the correct case",
676 Applicability::MachineApplicable
686 fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
687 if self.token.is_keyword(kw) {
695 /// If the given word is not a keyword, signals an error.
696 /// If the next token is not the given word, signals an error.
697 /// Otherwise, eats it.
698 fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
699 if !self.eat_keyword(kw) { self.unexpected() } else { Ok(()) }
702 /// Is the given keyword `kw` followed by a non-reserved identifier?
703 fn is_kw_followed_by_ident(&self, kw: Symbol) -> bool {
704 self.token.is_keyword(kw) && self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
707 fn check_or_expected(&mut self, ok: bool, typ: TokenType) -> bool {
711 self.expected_tokens.push(typ);
716 fn check_ident(&mut self) -> bool {
717 self.check_or_expected(self.token.is_ident(), TokenType::Ident)
720 fn check_path(&mut self) -> bool {
721 self.check_or_expected(self.token.is_path_start(), TokenType::Path)
724 fn check_type(&mut self) -> bool {
725 self.check_or_expected(self.token.can_begin_type(), TokenType::Type)
728 fn check_const_arg(&mut self) -> bool {
729 self.check_or_expected(self.token.can_begin_const_arg(), TokenType::Const)
732 fn check_const_closure(&self) -> bool {
733 self.is_keyword_ahead(0, &[kw::Const])
734 && self.look_ahead(1, |t| match &t.kind {
735 token::Ident(kw::Move | kw::Static | kw::Async, _)
737 | token::BinOp(token::Or) => true,
742 fn check_inline_const(&self, dist: usize) -> bool {
743 self.is_keyword_ahead(dist, &[kw::Const])
744 && self.look_ahead(dist + 1, |t| match &t.kind {
745 token::Interpolated(nt) => matches!(**nt, token::NtBlock(..)),
746 token::OpenDelim(Delimiter::Brace) => true,
751 /// Checks to see if the next token is either `+` or `+=`.
752 /// Otherwise returns `false`.
753 fn check_plus(&mut self) -> bool {
754 self.check_or_expected(
755 self.token.is_like_plus(),
756 TokenType::Token(token::BinOp(token::Plus)),
760 /// Eats the expected token if it's present possibly breaking
761 /// compound tokens like multi-character operators in process.
762 /// Returns `true` if the token was eaten.
763 fn break_and_eat(&mut self, expected: TokenKind) -> bool {
764 if self.token.kind == expected {
768 match self.token.kind.break_two_token_op() {
769 Some((first, second)) if first == expected => {
770 let first_span = self.sess.source_map().start_point(self.token.span);
771 let second_span = self.token.span.with_lo(first_span.hi());
772 self.token = Token::new(first, first_span);
773 // Keep track of this token - if we end token capturing now,
774 // we'll want to append this token to the captured stream.
776 // If we consume any additional tokens, then this token
777 // is not needed (we'll capture the entire 'glued' token),
778 // and `bump` will set this field to `None`
779 self.token_cursor.break_last_token = true;
780 // Use the spacing of the glued token as the spacing
781 // of the unglued second token.
782 self.bump_with((Token::new(second, second_span), self.token_spacing));
786 self.expected_tokens.push(TokenType::Token(expected));
792 /// Eats `+` possibly breaking tokens like `+=` in process.
793 fn eat_plus(&mut self) -> bool {
794 self.break_and_eat(token::BinOp(token::Plus))
797 /// Eats `&` possibly breaking tokens like `&&` in process.
798 /// Signals an error if `&` is not eaten.
799 fn expect_and(&mut self) -> PResult<'a, ()> {
800 if self.break_and_eat(token::BinOp(token::And)) { Ok(()) } else { self.unexpected() }
803 /// Eats `|` possibly breaking tokens like `||` in process.
804 /// Signals an error if `|` was not eaten.
805 fn expect_or(&mut self) -> PResult<'a, ()> {
806 if self.break_and_eat(token::BinOp(token::Or)) { Ok(()) } else { self.unexpected() }
809 /// Eats `<` possibly breaking tokens like `<<` in process.
810 fn eat_lt(&mut self) -> bool {
811 let ate = self.break_and_eat(token::Lt);
813 // See doc comment for `unmatched_angle_bracket_count`.
814 self.unmatched_angle_bracket_count += 1;
815 self.max_angle_bracket_count += 1;
816 debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
821 /// Eats `<` possibly breaking tokens like `<<` in process.
822 /// Signals an error if `<` was not eaten.
823 fn expect_lt(&mut self) -> PResult<'a, ()> {
824 if self.eat_lt() { Ok(()) } else { self.unexpected() }
827 /// Eats `>` possibly breaking tokens like `>>` in process.
828 /// Signals an error if `>` was not eaten.
829 fn expect_gt(&mut self) -> PResult<'a, ()> {
830 if self.break_and_eat(token::Gt) {
831 // See doc comment for `unmatched_angle_bracket_count`.
832 if self.unmatched_angle_bracket_count > 0 {
833 self.unmatched_angle_bracket_count -= 1;
834 debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
842 fn expect_any_with_type(&mut self, kets: &[&TokenKind], expect: TokenExpectType) -> bool {
843 kets.iter().any(|k| match expect {
844 TokenExpectType::Expect => self.check(k),
845 TokenExpectType::NoExpect => self.token == **k,
849 fn parse_seq_to_before_tokens<T>(
853 expect: TokenExpectType,
854 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
855 ) -> PResult<'a, (Vec<T>, bool /* trailing */, bool /* recovered */)> {
856 let mut first = true;
857 let mut recovered = false;
858 let mut trailing = false;
860 let unclosed_delims = !self.unclosed_delims.is_empty();
862 while !self.expect_any_with_type(kets, expect) {
863 if let token::CloseDelim(..) | token::Eof = self.token.kind {
866 if let Some(t) = &sep.sep {
870 match self.expect(t) {
872 self.current_closure.take();
875 self.current_closure.take();
879 Err(mut expect_err) => {
880 let sp = self.prev_token.span.shrink_to_hi();
881 let token_str = pprust::token_kind_to_string(t);
883 match self.current_closure.take() {
884 Some(closure_spans) if self.token.kind == TokenKind::Semi => {
885 // Finding a semicolon instead of a comma
886 // after a closure body indicates that the
887 // closure body may be a block but the user
888 // forgot to put braces around its
891 self.recover_missing_braces_around_closure_body(
900 // Attempt to keep parsing if it was a similar separator.
901 if let Some(tokens) = t.similar_tokens() {
902 if tokens.contains(&self.token.kind) && !unclosed_delims {
909 // If this was a missing `@` in a binding pattern
910 // bail with a suggestion
911 // https://github.com/rust-lang/rust/issues/72373
912 if self.prev_token.is_ident() && self.token.kind == token::DotDot {
914 "if you meant to bind the contents of \
915 the rest of the array pattern into `{}`, use `@`",
916 pprust::token_to_string(&self.prev_token)
919 .span_suggestion_verbose(
920 self.prev_token.span.shrink_to_hi().until(self.token.span),
923 Applicability::MaybeIncorrect,
929 // Attempt to keep parsing if it was an omitted separator.
932 // Parsed successfully, therefore most probably the code only
933 // misses a separator.
935 .span_suggestion_short(
937 &format!("missing `{}`", token_str),
939 Applicability::MaybeIncorrect,
947 // Parsing failed, therefore it must be something more serious
948 // than just a missing separator.
949 for xx in &e.children {
950 // propagate the help message from sub error 'e' to main error 'expect_err;
951 expect_err.children.push(xx.clone());
963 if sep.trailing_sep_allowed && self.expect_any_with_type(kets, expect) {
972 Ok((v, trailing, recovered))
975 fn recover_missing_braces_around_closure_body(
977 closure_spans: ClosureSpans,
978 mut expect_err: DiagnosticBuilder<'_, ErrorGuaranteed>,
979 ) -> PResult<'a, ()> {
980 let initial_semicolon = self.token.span;
982 while self.eat(&TokenKind::Semi) {
983 let _ = self.parse_stmt(ForceCollect::Yes)?;
986 expect_err.set_primary_message(
987 "closure bodies that contain statements must be surrounded by braces",
990 let preceding_pipe_span = closure_spans.closing_pipe;
991 let following_token_span = self.token.span;
993 let mut first_note = MultiSpan::from(vec![initial_semicolon]);
994 first_note.push_span_label(
996 "this `;` turns the preceding closure into a statement",
998 first_note.push_span_label(
1000 "this expression is a statement because of the trailing semicolon",
1002 expect_err.span_note(first_note, "statement found outside of a block");
1004 let mut second_note = MultiSpan::from(vec![closure_spans.whole_closure]);
1005 second_note.push_span_label(closure_spans.whole_closure, "this is the parsed closure...");
1006 second_note.push_span_label(
1007 following_token_span,
1008 "...but likely you meant the closure to end here",
1010 expect_err.span_note(second_note, "the closure body may be incorrectly delimited");
1012 expect_err.set_span(vec![preceding_pipe_span, following_token_span]);
1014 let opening_suggestion_str = " {".to_string();
1015 let closing_suggestion_str = "}".to_string();
1017 expect_err.multipart_suggestion(
1018 "try adding braces",
1020 (preceding_pipe_span.shrink_to_hi(), opening_suggestion_str),
1021 (following_token_span.shrink_to_lo(), closing_suggestion_str),
1023 Applicability::MaybeIncorrect,
1031 /// Parses a sequence, not including the closing delimiter. The function
1032 /// `f` must consume tokens until reaching the next separator or
1033 /// closing bracket.
1034 fn parse_seq_to_before_end<T>(
1038 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1039 ) -> PResult<'a, (Vec<T>, bool, bool)> {
1040 self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
1043 /// Parses a sequence, including the closing delimiter. The function
1044 /// `f` must consume tokens until reaching the next separator or
1045 /// closing bracket.
1046 fn parse_seq_to_end<T>(
1050 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1051 ) -> PResult<'a, (Vec<T>, bool /* trailing */)> {
1052 let (val, trailing, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
1059 /// Parses a sequence, including the closing delimiter. The function
1060 /// `f` must consume tokens until reaching the next separator or
1061 /// closing bracket.
1062 fn parse_unspanned_seq<T>(
1067 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1068 ) -> PResult<'a, (Vec<T>, bool)> {
1070 self.parse_seq_to_end(ket, sep, f)
1073 fn parse_delim_comma_seq<T>(
1076 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1077 ) -> PResult<'a, (Vec<T>, bool)> {
1078 self.parse_unspanned_seq(
1079 &token::OpenDelim(delim),
1080 &token::CloseDelim(delim),
1081 SeqSep::trailing_allowed(token::Comma),
1086 fn parse_paren_comma_seq<T>(
1088 f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
1089 ) -> PResult<'a, (Vec<T>, bool)> {
1090 self.parse_delim_comma_seq(Delimiter::Parenthesis, f)
1093 /// Advance the parser by one token using provided token as the next one.
1094 fn bump_with(&mut self, next: (Token, Spacing)) {
1095 self.inlined_bump_with(next)
1098 /// This always-inlined version should only be used on hot code paths.
1100 fn inlined_bump_with(&mut self, (next_token, next_spacing): (Token, Spacing)) {
1101 // Update the current and previous tokens.
1102 self.prev_token = mem::replace(&mut self.token, next_token);
1103 self.token_spacing = next_spacing;
1106 self.expected_tokens.clear();
1109 /// Advance the parser by one token.
1110 pub fn bump(&mut self) {
1111 // Note: destructuring here would give nicer code, but it was found in #96210 to be slower
1112 // than `.0`/`.1` access.
1113 let mut next = self.token_cursor.inlined_next(self.desugar_doc_comments);
1114 self.token_cursor.num_next_calls += 1;
1115 // We've retrieved an token from the underlying
1116 // cursor, so we no longer need to worry about
1117 // an unglued token. See `break_and_eat` for more details
1118 self.token_cursor.break_last_token = false;
1119 if next.0.span.is_dummy() {
1120 // Tweak the location for better diagnostics, but keep syntactic context intact.
1121 let fallback_span = self.token.span;
1122 next.0.span = fallback_span.with_ctxt(next.0.span.ctxt());
1124 debug_assert!(!matches!(
1126 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
1128 self.inlined_bump_with(next)
1131 /// Look-ahead `dist` tokens of `self.token` and get access to that token there.
1132 /// When `dist == 0` then the current token is looked at.
1133 pub fn look_ahead<R>(&self, dist: usize, looker: impl FnOnce(&Token) -> R) -> R {
1135 return looker(&self.token);
1138 let tree_cursor = &self.token_cursor.tree_cursor;
1139 if let Some(&(_, delim, span)) = self.token_cursor.stack.last()
1140 && delim != Delimiter::Invisible
1142 let all_normal = (0..dist).all(|i| {
1143 let token = tree_cursor.look_ahead(i);
1144 !matches!(token, Some(TokenTree::Delimited(_, Delimiter::Invisible, _)))
1147 return match tree_cursor.look_ahead(dist - 1) {
1148 Some(tree) => match tree {
1149 TokenTree::Token(token, _) => looker(token),
1150 TokenTree::Delimited(dspan, delim, _) => {
1151 looker(&Token::new(token::OpenDelim(*delim), dspan.open))
1154 None => looker(&Token::new(token::CloseDelim(delim), span.close)),
1159 let mut cursor = self.token_cursor.clone();
1161 let mut token = Token::dummy();
1163 token = cursor.next(/* desugar_doc_comments */ false).0;
1166 token::OpenDelim(Delimiter::Invisible) | token::CloseDelim(Delimiter::Invisible)
1172 return looker(&token);
1175 /// Returns whether any of the given keywords are `dist` tokens ahead of the current one.
1176 fn is_keyword_ahead(&self, dist: usize, kws: &[Symbol]) -> bool {
1177 self.look_ahead(dist, |t| kws.iter().any(|&kw| t.is_keyword(kw)))
1180 /// Parses asyncness: `async` or nothing.
1181 fn parse_asyncness(&mut self, case: Case) -> Async {
1182 if self.eat_keyword_case(kw::Async, case) {
1183 let span = self.prev_token.uninterpolated_span();
1184 Async::Yes { span, closure_id: DUMMY_NODE_ID, return_impl_trait_id: DUMMY_NODE_ID }
1190 /// Parses unsafety: `unsafe` or nothing.
1191 fn parse_unsafety(&mut self, case: Case) -> Unsafe {
1192 if self.eat_keyword_case(kw::Unsafe, case) {
1193 Unsafe::Yes(self.prev_token.uninterpolated_span())
1199 /// Parses constness: `const` or nothing.
1200 fn parse_constness(&mut self, case: Case) -> Const {
1201 // Avoid const blocks to be parsed as const items
1202 if self.look_ahead(1, |t| t != &token::OpenDelim(Delimiter::Brace))
1203 && self.eat_keyword_case(kw::Const, case)
1205 Const::Yes(self.prev_token.uninterpolated_span())
1211 /// Parses inline const expressions.
1212 fn parse_const_block(&mut self, span: Span, pat: bool) -> PResult<'a, P<Expr>> {
1214 self.sess.gated_spans.gate(sym::inline_const_pat, span);
1216 self.sess.gated_spans.gate(sym::inline_const, span);
1218 self.eat_keyword(kw::Const);
1219 let (attrs, blk) = self.parse_inner_attrs_and_block()?;
1220 let anon_const = AnonConst {
1222 value: self.mk_expr(blk.span, ExprKind::Block(blk, None)),
1224 let blk_span = anon_const.value.span;
1225 Ok(self.mk_expr_with_attrs(span.to(blk_span), ExprKind::ConstBlock(anon_const), attrs))
1228 /// Parses mutability (`mut` or nothing).
1229 fn parse_mutability(&mut self) -> Mutability {
1230 if self.eat_keyword(kw::Mut) { Mutability::Mut } else { Mutability::Not }
1233 /// Possibly parses mutability (`const` or `mut`).
1234 fn parse_const_or_mut(&mut self) -> Option<Mutability> {
1235 if self.eat_keyword(kw::Mut) {
1236 Some(Mutability::Mut)
1237 } else if self.eat_keyword(kw::Const) {
1238 Some(Mutability::Not)
1244 fn parse_field_name(&mut self) -> PResult<'a, Ident> {
1245 if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token.kind
1247 if let Some(suffix) = suffix {
1248 self.expect_no_tuple_index_suffix(self.token.span, suffix);
1251 Ok(Ident::new(symbol, self.prev_token.span))
1253 self.parse_ident_common(true)
1257 fn parse_delim_args(&mut self) -> PResult<'a, P<DelimArgs>> {
1258 if let Some(args) = self.parse_delim_args_inner() { Ok(P(args)) } else { self.unexpected() }
1261 fn parse_attr_args(&mut self) -> PResult<'a, AttrArgs> {
1262 Ok(if let Some(args) = self.parse_delim_args_inner() {
1263 AttrArgs::Delimited(args)
1265 if self.eat(&token::Eq) {
1266 let eq_span = self.prev_token.span;
1267 AttrArgs::Eq(eq_span, AttrArgsEq::Ast(self.parse_expr_force_collect()?))
1274 fn parse_delim_args_inner(&mut self) -> Option<DelimArgs> {
1275 if self.check(&token::OpenDelim(Delimiter::Parenthesis))
1276 || self.check(&token::OpenDelim(Delimiter::Bracket))
1277 || self.check(&token::OpenDelim(Delimiter::Brace))
1279 match self.parse_token_tree() {
1280 // We've confirmed above that there is a delimiter so unwrapping is OK.
1281 TokenTree::Delimited(dspan, delim, tokens) => Some(DelimArgs {
1283 delim: MacDelimiter::from_token(delim).unwrap(),
1286 _ => unreachable!(),
1293 fn parse_or_use_outer_attributes(
1295 already_parsed_attrs: Option<AttrWrapper>,
1296 ) -> PResult<'a, AttrWrapper> {
1297 if let Some(attrs) = already_parsed_attrs {
1300 self.parse_outer_attributes()
1304 /// Parses a single token tree from the input.
1305 pub(crate) fn parse_token_tree(&mut self) -> TokenTree {
1306 match self.token.kind {
1307 token::OpenDelim(..) => {
1308 // Grab the tokens within the delimiters.
1309 let tree_cursor = &self.token_cursor.tree_cursor;
1310 let stream = tree_cursor.stream.clone();
1311 let (_, delim, span) = *self.token_cursor.stack.last().unwrap();
1313 // Advance the token cursor through the entire delimited
1314 // sequence. After getting the `OpenDelim` we are *within* the
1315 // delimited sequence, i.e. at depth `d`. After getting the
1316 // matching `CloseDelim` we are *after* the delimited sequence,
1317 // i.e. at depth `d - 1`.
1318 let target_depth = self.token_cursor.stack.len() - 1;
1320 // Advance one token at a time, so `TokenCursor::next()`
1321 // can capture these tokens if necessary.
1323 if self.token_cursor.stack.len() == target_depth {
1324 debug_assert!(matches!(self.token.kind, token::CloseDelim(_)));
1329 // Consume close delimiter
1331 TokenTree::Delimited(span, delim, stream)
1333 token::CloseDelim(_) | token::Eof => unreachable!(),
1336 TokenTree::Token(self.prev_token.clone(), Spacing::Alone)
1341 /// Parses a stream of tokens into a list of `TokenTree`s, up to EOF.
1342 pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
1343 let mut tts = Vec::new();
1344 while self.token != token::Eof {
1345 tts.push(self.parse_token_tree());
1350 pub fn parse_tokens(&mut self) -> TokenStream {
1351 let mut result = Vec::new();
1353 match self.token.kind {
1354 token::Eof | token::CloseDelim(..) => break,
1355 _ => result.push(self.parse_token_tree()),
1358 TokenStream::new(result)
1361 /// Evaluates the closure with restrictions in place.
1363 /// Afters the closure is evaluated, restrictions are reset.
1364 fn with_res<T>(&mut self, res: Restrictions, f: impl FnOnce(&mut Self) -> T) -> T {
1365 let old = self.restrictions;
1366 self.restrictions = res;
1368 self.restrictions = old;
1372 /// Parses `pub` and `pub(in path)` plus shortcuts `pub(crate)` for `pub(in crate)`, `pub(self)`
1373 /// for `pub(in self)` and `pub(super)` for `pub(in super)`.
1374 /// If the following element can't be a tuple (i.e., it's a function definition), then
1375 /// it's not a tuple struct field), and the contents within the parentheses aren't valid,
1376 /// so emit a proper diagnostic.
1377 // Public for rustfmt usage.
1378 pub fn parse_visibility(&mut self, fbt: FollowedByType) -> PResult<'a, Visibility> {
1379 maybe_whole!(self, NtVis, |x| x.into_inner());
1381 if !self.eat_keyword(kw::Pub) {
1382 // We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
1383 // keyword to grab a span from for inherited visibility; an empty span at the
1384 // beginning of the current token would seem to be the "Schelling span".
1385 return Ok(Visibility {
1386 span: self.token.span.shrink_to_lo(),
1387 kind: VisibilityKind::Inherited,
1391 let lo = self.prev_token.span;
1393 if self.check(&token::OpenDelim(Delimiter::Parenthesis)) {
1394 // We don't `self.bump()` the `(` yet because this might be a struct definition where
1395 // `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
1396 // Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
1397 // by the following tokens.
1398 if self.is_keyword_ahead(1, &[kw::In]) {
1399 // Parse `pub(in path)`.
1401 self.bump(); // `in`
1402 let path = self.parse_path(PathStyle::Mod)?; // `path`
1403 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
1404 let vis = VisibilityKind::Restricted {
1406 id: ast::DUMMY_NODE_ID,
1409 return Ok(Visibility {
1410 span: lo.to(self.prev_token.span),
1414 } else if self.look_ahead(2, |t| t == &token::CloseDelim(Delimiter::Parenthesis))
1415 && self.is_keyword_ahead(1, &[kw::Crate, kw::Super, kw::SelfLower])
1417 // Parse `pub(crate)`, `pub(self)`, or `pub(super)`.
1419 let path = self.parse_path(PathStyle::Mod)?; // `crate`/`super`/`self`
1420 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
1421 let vis = VisibilityKind::Restricted {
1423 id: ast::DUMMY_NODE_ID,
1426 return Ok(Visibility {
1427 span: lo.to(self.prev_token.span),
1431 } else if let FollowedByType::No = fbt {
1432 // Provide this diagnostic if a type cannot follow;
1433 // in particular, if this is not a tuple struct.
1434 self.recover_incorrect_vis_restriction()?;
1435 // Emit diagnostic, but continue with public visibility.
1439 Ok(Visibility { span: lo, kind: VisibilityKind::Public, tokens: None })
1442 /// Recovery for e.g. `pub(something) fn ...` or `struct X { pub(something) y: Z }`
1443 fn recover_incorrect_vis_restriction(&mut self) -> PResult<'a, ()> {
1445 let path = self.parse_path(PathStyle::Mod)?;
1446 self.expect(&token::CloseDelim(Delimiter::Parenthesis))?; // `)`
1448 let path_str = pprust::path_to_string(&path);
1449 self.sess.emit_err(IncorrectVisibilityRestriction { span: path.span, inner_str: path_str });
1454 /// Parses `extern string_literal?`.
1455 fn parse_extern(&mut self, case: Case) -> Extern {
1456 if self.eat_keyword_case(kw::Extern, case) {
1457 let mut extern_span = self.prev_token.span;
1458 let abi = self.parse_abi();
1459 if let Some(abi) = abi {
1460 extern_span = extern_span.to(abi.span);
1462 Extern::from_abi(abi, extern_span)
1468 /// Parses a string literal as an ABI spec.
1469 fn parse_abi(&mut self) -> Option<StrLit> {
1470 match self.parse_str_lit() {
1471 Ok(str_lit) => Some(str_lit),
1472 Err(Some(lit)) => match lit.kind {
1473 ast::LitKind::Err => None,
1475 self.sess.emit_err(NonStringAbiLiteral { span: lit.span });
1483 pub fn collect_tokens_no_attrs<R: HasAttrs + HasTokens>(
1485 f: impl FnOnce(&mut Self) -> PResult<'a, R>,
1486 ) -> PResult<'a, R> {
1487 // The only reason to call `collect_tokens_no_attrs` is if you want tokens, so use
1488 // `ForceCollect::Yes`
1489 self.collect_tokens_trailing_token(
1490 AttrWrapper::empty(),
1492 |this, _attrs| Ok((f(this)?, TrailingToken::None)),
1497 fn is_import_coupler(&mut self) -> bool {
1498 self.check(&token::ModSep)
1499 && self.look_ahead(1, |t| {
1500 *t == token::OpenDelim(Delimiter::Brace) || *t == token::BinOp(token::Star)
1504 pub fn clear_expected_tokens(&mut self) {
1505 self.expected_tokens.clear();
1508 pub fn approx_token_stream_pos(&self) -> usize {
1509 self.token_cursor.num_next_calls
1513 pub(crate) fn make_unclosed_delims_error(
1514 unmatched: UnmatchedBrace,
1516 ) -> Option<DiagnosticBuilder<'_, ErrorGuaranteed>> {
1517 // `None` here means an `Eof` was found. We already emit those errors elsewhere, we add them to
1518 // `unmatched_braces` only for error recovery in the `Parser`.
1519 let found_delim = unmatched.found_delim?;
1520 let mut spans = vec![unmatched.found_span];
1521 if let Some(sp) = unmatched.unclosed_span {
1524 let err = MismatchedClosingDelimiter {
1526 delimiter: pprust::token_kind_to_string(&token::CloseDelim(found_delim)).to_string(),
1527 unmatched: unmatched.found_span,
1528 opening_candidate: unmatched.candidate_span,
1529 unclosed: unmatched.unclosed_span,
1531 .into_diagnostic(&sess.span_diagnostic);
1535 pub fn emit_unclosed_delims(unclosed_delims: &mut Vec<UnmatchedBrace>, sess: &ParseSess) {
1536 *sess.reached_eof.borrow_mut() |=
1537 unclosed_delims.iter().any(|unmatched_delim| unmatched_delim.found_delim.is_none());
1538 for unmatched in unclosed_delims.drain(..) {
1539 if let Some(mut e) = make_unclosed_delims_error(unmatched, sess) {
1545 /// A helper struct used when building an `AttrTokenStream` from
1546 /// a `LazyAttrTokenStream`. Both delimiter and non-delimited tokens
1547 /// are stored as `FlatToken::Token`. A vector of `FlatToken`s
1548 /// is then 'parsed' to build up an `AttrTokenStream` with nested
1549 /// `AttrTokenTree::Delimited` tokens.
1550 #[derive(Debug, Clone)]
1551 pub enum FlatToken {
1552 /// A token - this holds both delimiter (e.g. '{' and '}')
1553 /// and non-delimiter tokens
1555 /// Holds the `AttributesData` for an AST node. The
1556 /// `AttributesData` is inserted directly into the
1557 /// constructed `AttrTokenStream` as
1558 /// an `AttrTokenTree::Attributes`.
1559 AttrTarget(AttributesData),
1560 /// A special 'empty' token that is ignored during the conversion
1561 /// to an `AttrTokenStream`. This is used to simplify the
1562 /// handling of replace ranges.