]> git.lizzy.rs Git - rust.git/commitdiff
Remove `ThinTokenStream`.
authorNicholas Nethercote <nnethercote@mozilla.com>
Wed, 9 Jan 2019 05:53:14 +0000 (16:53 +1100)
committerNicholas Nethercote <nnethercote@mozilla.com>
Sun, 13 Jan 2019 22:10:26 +0000 (09:10 +1100)
`TokenStream` is now almost identical to `ThinTokenStream`. This commit
removes the latter, replacing it with the former.

src/librustc/ich/impls_syntax.rs
src/librustc_lint/builtin.rs
src/libsyntax/ast.rs
src/libsyntax/attr/mod.rs
src/libsyntax/ext/quote.rs
src/libsyntax/fold.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/print/pprust.rs
src/libsyntax/tokenstream.rs
src/libsyntax/visit.rs

index 70ec72d73bc6cc9a83f8c15ecf2e8c4a26239641..de567183a3c05b14671f97009f8ed4b679a48c84 100644 (file)
@@ -258,7 +258,7 @@ fn hash_stable<W: StableHasherResult>(&self,
             tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
                 span.hash_stable(hcx, hasher);
                 std_hash::Hash::hash(&delim, hasher);
-                for sub_tt in tts.stream().trees() {
+                for sub_tt in tts.trees() {
                     sub_tt.hash_stable(hcx, hasher);
                 }
             }
index 5678f30dabccd5b98112ece998330c1132daac80..0fce166d828b08dec0d671e8a3eee3da45dce4e8 100644 (file)
@@ -1540,7 +1540,7 @@ fn check_tokens(&mut self, cx: &EarlyContext, tokens: TokenStream) {
                     _ => {},
                 }
                 TokenTree::Delimited(_, _, tts) => {
-                    self.check_tokens(cx, tts.stream())
+                    self.check_tokens(cx, tts)
                 },
             }
         }
index e3a8980a975c19a7827b0d6fc75b6011e227294e..1e91f4adc36d726ae6e65679eb9471276a2759a1 100644 (file)
@@ -15,7 +15,7 @@
 use source_map::{dummy_spanned, respan, Spanned};
 use symbol::{keywords, Symbol};
 use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{ThinTokenStream, TokenStream};
+use tokenstream::TokenStream;
 use ThinVec;
 
 use rustc_data_structures::fx::FxHashSet;
@@ -1216,7 +1216,7 @@ pub enum Movability {
 pub struct Mac_ {
     pub path: Path,
     pub delim: MacDelimiter,
-    pub tts: ThinTokenStream,
+    pub tts: TokenStream,
 }
 
 #[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
@@ -1228,13 +1228,13 @@ pub enum MacDelimiter {
 
 impl Mac_ {
     pub fn stream(&self) -> TokenStream {
-        self.tts.stream()
+        self.tts.clone()
     }
 }
 
 #[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
 pub struct MacroDef {
-    pub tokens: ThinTokenStream,
+    pub tokens: TokenStream,
     pub legacy: bool,
 }
 
index d03563f8891aa56982862e1dda9ce45344a9f96d..0f8ca5e7b9982835bf3ef4306f72300ad8a7e50e 100644 (file)
@@ -565,7 +565,7 @@ fn from_tokens<I>(tokens: &mut iter::Peekable<I>) -> Option<MetaItemKind>
             }
             Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
                 tokens.next();
-                tts.stream()
+                tts.clone()
             }
             _ => return Some(MetaItemKind::Word),
         };
index c3124144009ab61bfc2b7f589d8df50e59d3586c..c01e7f538b90d209bda9ae208c1c849e2795eec2 100644 (file)
@@ -748,7 +748,7 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
         },
         TokenTree::Delimited(span, delim, ref tts) => {
             let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
-            stmts.extend(statements_mk_tts(cx, tts.stream()));
+            stmts.extend(statements_mk_tts(cx, tts.clone()));
             stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
             stmts
         }
index 8ac103856dcd17fe4b799789066cc993132f689a..a4c3b38f691edd635c640a7ea57c3bde4e968fe7 100644 (file)
@@ -598,7 +598,7 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
         TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
             DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
             delim,
-            fld.fold_tts(tts.stream()).into(),
+            fld.fold_tts(tts).into(),
         ),
     }
 }
index ba5676a65d7eb01de502d1ee76c69489ed2c98f1..759de578847a98c0f37a4f4e1f5cac4eaa199f8d 100644 (file)
@@ -811,7 +811,7 @@ fn string_to_tts_macro () {
                 )
                 if name_macro_rules.name == "macro_rules"
                 && name_zip.name == "zip" => {
-                    let tts = &macro_tts.stream().trees().collect::<Vec<_>>();
+                    let tts = &macro_tts.trees().collect::<Vec<_>>();
                     match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                         (
                             3,
@@ -820,7 +820,7 @@ fn string_to_tts_macro () {
                             Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
                         )
                         if macro_delim == token::Paren => {
-                            let tts = &first_tts.stream().trees().collect::<Vec<_>>();
+                            let tts = &first_tts.trees().collect::<Vec<_>>();
                             match (tts.len(), tts.get(0), tts.get(1)) {
                                 (
                                     2,
@@ -830,7 +830,7 @@ fn string_to_tts_macro () {
                                 if first_delim == token::Paren && ident.name == "a" => {},
                                 _ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
                             }
-                            let tts = &second_tts.stream().trees().collect::<Vec<_>>();
+                            let tts = &second_tts.trees().collect::<Vec<_>>();
                             match (tts.len(), tts.get(0), tts.get(1)) {
                                 (
                                     2,
index 5c8ed94731afbb16785163a68e350cfe0714bfc1..6df95d539affb7fee4039427caeb9c6852f43964 100644 (file)
@@ -46,7 +46,7 @@
 use ptr::P;
 use parse::PResult;
 use ThinVec;
-use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
+use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
 use symbol::{Symbol, keywords};
 
 use std::borrow::Cow;
@@ -285,12 +285,12 @@ enum LastToken {
 }
 
 impl TokenCursorFrame {
-    fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
+    fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
         TokenCursorFrame {
             delim: delim,
             span: sp,
             open_delim: delim == token::NoDelim,
-            tree_cursor: tts.stream().into_trees(),
+            tree_cursor: tts.clone().into_trees(),
             close_delim: delim == token::NoDelim,
             last_token: LastToken::Was(None),
         }
@@ -2325,7 +2325,7 @@ pub fn mk_mac_expr(&mut self, span: Span, m: Mac_, attrs: ThinVec<Attribute>) ->
         })
     }
 
-    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
+    fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
         let delim = match self.token {
             token::OpenDelim(delim) => delim,
             _ => {
@@ -2345,7 +2345,7 @@ fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinToke
             token::Brace => MacDelimiter::Brace,
             token::NoDelim => self.bug("unexpected no delimiter"),
         };
-        Ok((delim, tts.stream().into()))
+        Ok((delim, tts.into()))
     }
 
     /// At the bottom (top?) of the precedence hierarchy,
@@ -4633,7 +4633,7 @@ fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
                 let ident = self.parse_ident()?;
                 let tokens = if self.check(&token::OpenDelim(token::Brace)) {
                     match self.parse_token_tree() {
-                        TokenTree::Delimited(_, _, tts) => tts.stream(),
+                        TokenTree::Delimited(_, _, tts) => tts,
                         _ => unreachable!(),
                     }
                 } else if self.check(&token::OpenDelim(token::Paren)) {
index 2ad3d3a6d648795d112c9cbd05e351d7249e88f1..c53594032a00aefb006bd2dc9ead3eaf7ac61485 100644 (file)
@@ -807,7 +807,7 @@ fn print_tt(&mut self, tt: tokenstream::TokenTree) -> io::Result<()> {
             TokenTree::Delimited(_, delim, tts) => {
                 self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
                 self.writer().space()?;
-                self.print_tts(tts.stream())?;
+                self.print_tts(tts)?;
                 self.writer().space()?;
                 self.writer().word(token_to_string(&token::CloseDelim(delim)))
             },
index 95ff7728897e554c21eb9ed4e192ffffe065beeb..d5c362490ca6a8df41201cd6f241785f30472799 100644 (file)
@@ -41,7 +41,7 @@ pub enum TokenTree {
     /// A single token
     Token(Span, token::Token),
     /// A delimited sequence of token trees
-    Delimited(DelimSpan, DelimToken, ThinTokenStream),
+    Delimited(DelimSpan, DelimToken, TokenStream),
 }
 
 impl TokenTree {
@@ -62,8 +62,7 @@ pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
             (&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
             (&TokenTree::Delimited(_, delim, ref tts),
              &TokenTree::Delimited(_, delim2, ref tts2)) => {
-                delim == delim2 &&
-                tts.stream().eq_unspanned(&tts2.stream())
+                delim == delim2 && tts.eq_unspanned(&tts2)
             }
             (_, _) => false,
         }
@@ -81,8 +80,7 @@ pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
             }
             (&TokenTree::Delimited(_, delim, ref tts),
              &TokenTree::Delimited(_, delim2, ref tts2)) => {
-                delim == delim2 &&
-                tts.stream().probably_equal_for_proc_macro(&tts2.stream())
+                delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
             }
             (_, _) => false,
         }
@@ -492,41 +490,6 @@ pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
     }
 }
 
-/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
-/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
-/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
-#[derive(Debug, Clone)]
-pub struct ThinTokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
-
-impl ThinTokenStream {
-    pub fn stream(&self) -> TokenStream {
-        self.clone().into()
-    }
-}
-
-impl From<TokenStream> for ThinTokenStream {
-    fn from(stream: TokenStream) -> ThinTokenStream {
-        ThinTokenStream(match stream {
-            TokenStream::Empty => None,
-            TokenStream::Stream(stream) => Some(stream),
-        })
-    }
-}
-
-impl From<ThinTokenStream> for TokenStream {
-    fn from(stream: ThinTokenStream) -> TokenStream {
-        stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
-    }
-}
-
-impl Eq for ThinTokenStream {}
-
-impl PartialEq<ThinTokenStream> for ThinTokenStream {
-    fn eq(&self, other: &ThinTokenStream) -> bool {
-        TokenStream::from(self.clone()) == TokenStream::from(other.clone())
-    }
-}
-
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
         f.write_str(&pprust::tokens_to_string(self.clone()))
@@ -545,18 +508,6 @@ fn decode<D: Decoder>(decoder: &mut D) -> Result<TokenStream, D::Error> {
     }
 }
 
-impl Encodable for ThinTokenStream {
-    fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
-        TokenStream::from(self.clone()).encode(encoder)
-    }
-}
-
-impl Decodable for ThinTokenStream {
-    fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
-        TokenStream::decode(decoder).map(Into::into)
-    }
-}
-
 #[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub struct DelimSpan {
     pub open: Span,
index 156546bbba94afb03eb677adeb5d449bc035a1c3..8cbd47ca70fded321d9eb95b2863d2e1327f6fdf 100644 (file)
@@ -832,7 +832,7 @@ pub fn walk_attribute<'a, V: Visitor<'a>>(visitor: &mut V, attr: &'a Attribute)
 pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
     match tt {
         TokenTree::Token(_, tok) => visitor.visit_token(tok),
-        TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
+        TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
     }
 }