]> git.lizzy.rs Git - rust.git/blobdiff - compiler/rustc_ast/src/tokenstream.rs
Rollup merge of #99786 - obeis:issue-99625, r=compiler-errors
[rust.git] / compiler / rustc_ast / src / tokenstream.rs
index 37de90d64c774409d4ecdb353fdfb1f7d5e052e3..9e4a22e1fa3cd19551ddeda23c2609dc172f4953 100644 (file)
 #[derive(Debug, Clone, PartialEq, Encodable, Decodable, HashStable_Generic)]
 pub enum TokenTree {
     /// A single token.
-    Token(Token),
+    Token(Token, Spacing),
     /// A delimited sequence of token trees.
     Delimited(DelimSpan, Delimiter, TokenStream),
 }
 
+// This type is used a lot. Make sure it doesn't unintentionally get bigger.
+#[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
+rustc_data_structures::static_assert_size!(TokenTree, 32);
+
 // Ensure all fields of `TokenTree` is `Send` and `Sync`.
 #[cfg(parallel_compiler)]
 fn _dummy()
@@ -62,7 +66,7 @@ impl TokenTree {
     /// Checks if this `TokenTree` is equal to the other, regardless of span information.
     pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
         match (self, other) {
-            (TokenTree::Token(token), TokenTree::Token(token2)) => token.kind == token2.kind,
+            (TokenTree::Token(token, _), TokenTree::Token(token2, _)) => token.kind == token2.kind,
             (TokenTree::Delimited(_, delim, tts), TokenTree::Delimited(_, delim2, tts2)) => {
                 delim == delim2 && tts.eq_unspanned(&tts2)
             }
@@ -73,7 +77,7 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
     /// Retrieves the `TokenTree`'s span.
     pub fn span(&self) -> Span {
         match self {
-            TokenTree::Token(token) => token.span,
+            TokenTree::Token(token, _) => token.span,
             TokenTree::Delimited(sp, ..) => sp.entire(),
         }
     }
@@ -81,18 +85,26 @@ pub fn span(&self) -> Span {
     /// Modify the `TokenTree`'s span in-place.
     pub fn set_span(&mut self, span: Span) {
         match self {
-            TokenTree::Token(token) => token.span = span,
+            TokenTree::Token(token, _) => token.span = span,
             TokenTree::Delimited(dspan, ..) => *dspan = DelimSpan::from_single(span),
         }
     }
 
-    pub fn token(kind: TokenKind, span: Span) -> TokenTree {
-        TokenTree::Token(Token::new(kind, span))
+    // Create a `TokenTree::Token` with alone spacing.
+    pub fn token_alone(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span), Spacing::Alone)
+    }
+
+    // Create a `TokenTree::Token` with joint spacing.
+    pub fn token_joint(kind: TokenKind, span: Span) -> TokenTree {
+        TokenTree::Token(Token::new(kind, span), Spacing::Joint)
     }
 
     pub fn uninterpolate(self) -> TokenTree {
         match self {
-            TokenTree::Token(token) => TokenTree::Token(token.uninterpolate().into_owned()),
+            TokenTree::Token(token, spacing) => {
+                TokenTree::Token(token.uninterpolate().into_owned(), spacing)
+            }
             tt => tt,
         }
     }
@@ -194,13 +206,12 @@ pub fn to_tokenstream(&self) -> TokenStream {
             .iter()
             .flat_map(|tree| match &tree.0 {
                 AttrAnnotatedTokenTree::Token(inner) => {
-                    smallvec![(TokenTree::Token(inner.clone()), tree.1)].into_iter()
+                    smallvec![TokenTree::Token(inner.clone(), tree.1)].into_iter()
+                }
+                AttrAnnotatedTokenTree::Delimited(span, delim, stream) => {
+                    smallvec![TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),]
+                        .into_iter()
                 }
-                AttrAnnotatedTokenTree::Delimited(span, delim, stream) => smallvec![(
-                    TokenTree::Delimited(*span, *delim, stream.to_tokenstream()),
-                    tree.1,
-                )]
-                .into_iter(),
                 AttrAnnotatedTokenTree::Attributes(data) => {
                     let mut outer_attrs = Vec::new();
                     let mut inner_attrs = Vec::new();
@@ -226,7 +237,7 @@ pub fn to_tokenstream(&self) -> TokenStream {
                     if !inner_attrs.is_empty() {
                         let mut found = false;
                         // Check the last two trees (to account for a trailing semi)
-                        for (tree, _) in target_tokens.iter_mut().rev().take(2) {
+                        for tree in target_tokens.iter_mut().rev().take(2) {
                             if let TokenTree::Delimited(span, delim, delim_tokens) = tree {
                                 // Inner attributes are only supported on extern blocks, functions, impls,
                                 // and modules. All of these have their inner attributes placed at
@@ -299,15 +310,13 @@ pub struct AttributesData {
 /// Today's `TokenTree`s can still contain AST via `token::Interpolated` for
 /// backwards compatibility.
 #[derive(Clone, Debug, Default, Encodable, Decodable)]
-pub struct TokenStream(pub(crate) Lrc<Vec<TreeAndSpacing>>);
-
-pub type TreeAndSpacing = (TokenTree, Spacing);
+pub struct TokenStream(pub(crate) Lrc<Vec<TokenTree>>);
 
 // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(all(target_arch = "x86_64", target_pointer_width = "64"))]
 rustc_data_structures::static_assert_size!(TokenStream, 8);
 
-#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable)]
+#[derive(Clone, Copy, Debug, PartialEq, Encodable, Decodable, HashStable_Generic)]
 pub enum Spacing {
     Alone,
     Joint,
@@ -323,10 +332,10 @@ pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
         while let Some((pos, ts)) = iter.next() {
             if let Some((_, next)) = iter.peek() {
                 let sp = match (&ts, &next) {
-                    (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+                    (_, TokenTree::Token(Token { kind: token::Comma, .. }, _)) => continue,
                     (
-                        (TokenTree::Token(token_left), Spacing::Alone),
-                        (TokenTree::Token(token_right), _),
+                        TokenTree::Token(token_left, Spacing::Alone),
+                        TokenTree::Token(token_right, _),
                     ) if ((token_left.is_ident() && !token_left.is_reserved_ident())
                         || token_left.is_lit())
                         && ((token_right.is_ident() && !token_right.is_reserved_ident())
@@ -334,11 +343,11 @@ pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
                     {
                         token_left.span
                     }
-                    ((TokenTree::Delimited(sp, ..), Spacing::Alone), _) => sp.entire(),
+                    (TokenTree::Delimited(sp, ..), _) => sp.entire(),
                     _ => continue,
                 };
                 let sp = sp.shrink_to_hi();
-                let comma = (TokenTree::token(token::Comma, sp), Spacing::Alone);
+                let comma = TokenTree::token_alone(token::Comma, sp);
                 suggestion = Some((pos, comma, sp));
             }
         }
@@ -360,21 +369,9 @@ fn from((tree, spacing): (AttrAnnotatedTokenTree, Spacing)) -> AttrAnnotatedToke
     }
 }
 
-impl From<TokenTree> for TokenStream {
-    fn from(tree: TokenTree) -> TokenStream {
-        TokenStream::new(vec![(tree, Spacing::Alone)])
-    }
-}
-
-impl From<TokenTree> for TreeAndSpacing {
-    fn from(tree: TokenTree) -> TreeAndSpacing {
-        (tree, Spacing::Alone)
-    }
-}
-
 impl iter::FromIterator<TokenTree> for TokenStream {
     fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
-        TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndSpacing>>())
+        TokenStream::new(iter.into_iter().collect::<Vec<TokenTree>>())
     }
 }
 
@@ -387,7 +384,7 @@ fn eq(&self, other: &TokenStream) -> bool {
 }
 
 impl TokenStream {
-    pub fn new(streams: Vec<TreeAndSpacing>) -> TokenStream {
+    pub fn new(streams: Vec<TokenTree>) -> TokenStream {
         TokenStream(Lrc::new(streams))
     }
 
@@ -420,13 +417,7 @@ pub fn eq_unspanned(&self, other: &TokenStream) -> bool {
     }
 
     pub fn map_enumerated<F: FnMut(usize, &TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        TokenStream(Lrc::new(
-            self.0
-                .iter()
-                .enumerate()
-                .map(|(i, (tree, is_joint))| (f(i, tree), *is_joint))
-                .collect(),
-        ))
+        TokenStream(Lrc::new(self.0.iter().enumerate().map(|(i, tree)| f(i, tree)).collect()))
     }
 
     fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
@@ -444,6 +435,21 @@ fn opt_from_ast(node: &(impl HasAttrs + HasTokens)) -> Option<TokenStream> {
         Some(attr_annotated.to_tokenstream())
     }
 
+    // Create a token stream containing a single token with alone spacing.
+    pub fn token_alone(kind: TokenKind, span: Span) -> TokenStream {
+        TokenStream::new(vec![TokenTree::token_alone(kind, span)])
+    }
+
+    // Create a token stream containing a single token with joint spacing.
+    pub fn token_joint(kind: TokenKind, span: Span) -> TokenStream {
+        TokenStream::new(vec![TokenTree::token_joint(kind, span)])
+    }
+
+    // Create a token stream containing a single `Delimited`.
+    pub fn delimited(span: DelimSpan, delim: Delimiter, tts: TokenStream) -> TokenStream {
+        TokenStream::new(vec![TokenTree::Delimited(span, delim, tts)])
+    }
+
     pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> TokenStream {
         TokenStream::opt_from_ast(node)
             .unwrap_or_else(|| panic!("missing tokens for node at {:?}: {:?}", node.span(), node))
@@ -452,16 +458,16 @@ pub fn from_ast(node: &(impl HasAttrs + HasSpan + HasTokens + fmt::Debug)) -> To
     pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
         match nt {
             Nonterminal::NtIdent(ident, is_raw) => {
-                TokenTree::token(token::Ident(ident.name, *is_raw), ident.span).into()
+                TokenStream::token_alone(token::Ident(ident.name, *is_raw), ident.span)
             }
             Nonterminal::NtLifetime(ident) => {
-                TokenTree::token(token::Lifetime(ident.name), ident.span).into()
+                TokenStream::token_alone(token::Lifetime(ident.name), ident.span)
             }
             Nonterminal::NtItem(item) => TokenStream::from_ast(item),
             Nonterminal::NtBlock(block) => TokenStream::from_ast(block),
             Nonterminal::NtStmt(stmt) if let StmtKind::Empty = stmt.kind => {
                 // FIXME: Properly collect tokens for empty statements.
-                TokenTree::token(token::Semi, stmt.span).into()
+                TokenStream::token_alone(token::Semi, stmt.span)
             }
             Nonterminal::NtStmt(stmt) => TokenStream::from_ast(stmt),
             Nonterminal::NtPat(pat) => TokenStream::from_ast(pat),
@@ -473,23 +479,23 @@ pub fn from_nonterminal_ast(nt: &Nonterminal) -> TokenStream {
         }
     }
 
-    fn flatten_token(token: &Token) -> TokenTree {
+    fn flatten_token(token: &Token, spacing: Spacing) -> TokenTree {
         match &token.kind {
             token::Interpolated(nt) if let token::NtIdent(ident, is_raw) = **nt => {
-                TokenTree::token(token::Ident(ident.name, is_raw), ident.span)
+                TokenTree::Token(Token::new(token::Ident(ident.name, is_raw), ident.span), spacing)
             }
             token::Interpolated(nt) => TokenTree::Delimited(
                 DelimSpan::from_single(token.span),
                 Delimiter::Invisible,
                 TokenStream::from_nonterminal_ast(&nt).flattened(),
             ),
-            _ => TokenTree::Token(token.clone()),
+            _ => TokenTree::Token(token.clone(), spacing),
         }
     }
 
     fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
         match tree {
-            TokenTree::Token(token) => TokenStream::flatten_token(token),
+            TokenTree::Token(token, spacing) => TokenStream::flatten_token(token, *spacing),
             TokenTree::Delimited(span, delim, tts) => {
                 TokenTree::Delimited(*span, *delim, tts.flattened())
             }
@@ -500,7 +506,7 @@ fn flatten_token_tree(tree: &TokenTree) -> TokenTree {
     pub fn flattened(&self) -> TokenStream {
         fn can_skip(stream: &TokenStream) -> bool {
             stream.trees().all(|tree| match tree {
-                TokenTree::Token(token) => !matches!(token.kind, token::Interpolated(_)),
+                TokenTree::Token(token, _) => !matches!(token.kind, token::Interpolated(_)),
                 TokenTree::Delimited(_, _, inner) => can_skip(inner),
             })
         }
@@ -522,8 +528,8 @@ pub fn new() -> TokenStreamBuilder {
         TokenStreamBuilder(SmallVec::new())
     }
 
-    pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
-        self.0.push(stream.into());
+    pub fn push(&mut self, stream: TokenStream) {
+        self.0.push(stream);
     }
 
     pub fn build(self) -> TokenStream {
@@ -564,14 +570,14 @@ pub fn build(self) -> TokenStream {
                     // `stream` is not empty and the first tree within it is a
                     // token tree, and (c) the two tokens can be glued
                     // together...
-                    if let Some((TokenTree::Token(last_tok), Spacing::Joint)) = res_vec_mut.last()
-                        && let Some((TokenTree::Token(tok), spacing)) = stream.0.first()
+                    if let Some(TokenTree::Token(last_tok, Spacing::Joint)) = res_vec_mut.last()
+                        && let Some(TokenTree::Token(tok, spacing)) = stream.0.first()
                         && let Some(glued_tok) = last_tok.glue(&tok)
                     {
                         // ...then overwrite the last token tree in
                         // `res_vec_mut` with the glued token, and skip the
                         // first token tree from `stream`.
-                        *res_vec_mut.last_mut().unwrap() = (TokenTree::Token(glued_tok), *spacing);
+                        *res_vec_mut.last_mut().unwrap() = TokenTree::Token(glued_tok, *spacing);
                         res_vec_mut.extend(stream_iter.skip(1));
                     } else {
                         // Append all of `stream`.
@@ -597,16 +603,8 @@ fn new(stream: &'t TokenStream) -> Self {
         CursorRef { stream, index: 0 }
     }
 
-    #[inline]
-    fn next_with_spacing(&mut self) -> Option<&'t TreeAndSpacing> {
-        self.stream.0.get(self.index).map(|tree| {
-            self.index += 1;
-            tree
-        })
-    }
-
     pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
-        self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
+        self.stream.0.get(self.index + n)
     }
 }
 
@@ -614,7 +612,10 @@ impl<'t> Iterator for CursorRef<'t> {
     type Item = &'t TokenTree;
 
     fn next(&mut self) -> Option<&'t TokenTree> {
-        self.next_with_spacing().map(|(tree, _)| tree)
+        self.stream.0.get(self.index).map(|tree| {
+            self.index += 1;
+            tree
+        })
     }
 }
 
@@ -630,7 +631,10 @@ impl Iterator for Cursor {
     type Item = TokenTree;
 
     fn next(&mut self) -> Option<TokenTree> {
-        self.next_with_spacing().map(|(tree, _)| tree)
+        self.stream.0.get(self.index).map(|tree| {
+            self.index += 1;
+            tree.clone()
+        })
     }
 }
 
@@ -640,15 +644,7 @@ fn new(stream: TokenStream) -> Self {
     }
 
     #[inline]
-    pub fn next_with_spacing(&mut self) -> Option<TreeAndSpacing> {
-        self.stream.0.get(self.index).map(|tree| {
-            self.index += 1;
-            tree.clone()
-        })
-    }
-
-    #[inline]
-    pub fn next_with_spacing_ref(&mut self) -> Option<&TreeAndSpacing> {
+    pub fn next_ref(&mut self) -> Option<&TokenTree> {
         self.stream.0.get(self.index).map(|tree| {
             self.index += 1;
             tree
@@ -656,7 +652,7 @@ pub fn next_with_spacing_ref(&mut self) -> Option<&TreeAndSpacing> {
     }
 
     pub fn look_ahead(&self, n: usize) -> Option<&TokenTree> {
-        self.stream.0[self.index..].get(n).map(|(tree, _)| tree)
+        self.stream.0.get(self.index + n)
     }
 }