]> git.lizzy.rs Git - rust.git/commitdiff
Make `TokenStream` less recursive.
authorNicholas Nethercote <nnethercote@mozilla.com>
Wed, 19 Dec 2018 03:53:52 +0000 (14:53 +1100)
committerNicholas Nethercote <nnethercote@mozilla.com>
Tue, 8 Jan 2019 04:08:46 +0000 (15:08 +1100)
`TokenStream` is currently recursive in *two* ways:

- the `TokenTree` variant contains a `ThinTokenStream`, which can
  contain a `TokenStream`;

- the `TokenStream` variant contains a `Vec<TokenStream>`.

The latter is not necessary and causes significant complexity. This
commit replaces it with the simpler `Vec<(TokenTree, IsJoint)>`.

This reduces complexity significantly. In particular, `StreamCursor` is
eliminated, and `Cursor` becomes much simpler, consisting now of just a
`TokenStream` and an index.

The commit also removes the `Extend` impl for `TokenStream`, because it
is only used in tests. (The commit also removes those tests.)

Overall, the commit reduces the number of lines of code by almost 200.

src/libsyntax/attr/mod.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/parser.rs
src/libsyntax/tokenstream.rs
src/libsyntax_ext/proc_macro_server.rs

index a309775a1a40b1da9765d57b7c3989f1abb77e8a..d03563f8891aa56982862e1dda9ce45344a9f96d 100644 (file)
@@ -472,7 +472,7 @@ fn tokens(&self) -> TokenStream {
                                          Token::from_ast_ident(segment.ident)).into());
             last_pos = segment.ident.span.hi();
         }
-        idents.push(self.node.tokens(self.span));
+        self.node.tokens(self.span).append_to_tree_and_joint_vec(&mut idents);
         TokenStream::new(idents)
     }
 
@@ -529,7 +529,9 @@ pub fn tokens(&self, span: Span) -> TokenStream {
         match *self {
             MetaItemKind::Word => TokenStream::empty(),
             MetaItemKind::NameValue(ref lit) => {
-                TokenStream::new(vec![TokenTree::Token(span, Token::Eq).into(), lit.tokens()])
+                let mut vec = vec![TokenTree::Token(span, Token::Eq).into()];
+                lit.tokens().append_to_tree_and_joint_vec(&mut vec);
+                TokenStream::new(vec)
             }
             MetaItemKind::List(ref list) => {
                 let mut tokens = Vec::new();
@@ -537,7 +539,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                     if i > 0 {
                         tokens.push(TokenTree::Token(span, Token::Comma).into());
                     }
-                    tokens.push(item.node.tokens());
+                    item.node.tokens().append_to_tree_and_joint_vec(&mut tokens);
                 }
                 TokenTree::Delimited(
                     DelimSpan::from_single(span),
index ad8668aca70f9442a7dadb13d68057b163beccef..c3124144009ab61bfc2b7f589d8df50e59d3586c 100644 (file)
@@ -233,7 +233,7 @@ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
                     self.span, token::Token::from_ast_ident(segment.ident)
                 ).into());
             }
-            inner.push(self.tokens.clone());
+            self.tokens.clone().append_to_tree_and_joint_vec(&mut inner);
 
             let delim_span = DelimSpan::from_single(self.span);
             r.push(TokenTree::Delimited(
index 31d87508c6bca13998f7024853967f7d06ce2460..0ef2d3b749d810f962e5b226fba17f94a58e0eac 100644 (file)
@@ -7,7 +7,7 @@
 use parse::token::{self, Token, NtTT};
 use smallvec::SmallVec;
 use syntax_pos::DUMMY_SP;
-use tokenstream::{TokenStream, TokenTree, DelimSpan};
+use tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndJoint};
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
@@ -63,7 +63,7 @@ pub fn transcribe(cx: &ExtCtxt,
     let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
     let interpolations = interp.unwrap_or_else(FxHashMap::default); /* just a convenience */
     let mut repeats = Vec::new();
-    let mut result: Vec<TokenStream> = Vec::new();
+    let mut result: Vec<TreeAndJoint> = Vec::new();
     let mut result_stack = Vec::new();
 
     loop {
@@ -78,7 +78,7 @@ pub fn transcribe(cx: &ExtCtxt,
                     if let Some(sep) = sep.clone() {
                         // repeat same span, I guess
                         let prev_span = match result.last() {
-                            Some(stream) => stream.trees().next().unwrap().span(),
+                            Some((tt, _)) => tt.span(),
                             None => DUMMY_SP,
                         };
                         result.push(TokenTree::Token(prev_span, sep).into());
index 6c4e9e1c940c557fe8c93d4dc3e66bf760707491..d219f29f06c204ba2145ec8a9a3da8195e29a23f 100644 (file)
@@ -1,7 +1,7 @@
 use print::pprust::token_to_string;
 use parse::lexer::StringReader;
 use parse::{token, PResult};
-use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree};
+use tokenstream::{DelimSpan, IsJoint::*, TokenStream, TokenTree, TreeAndJoint};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
@@ -33,7 +33,7 @@ fn parse_token_trees_until_close_delim(&mut self) -> TokenStream {
         }
     }
 
-    fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
+    fn parse_token_tree(&mut self) -> PResult<'a, TreeAndJoint> {
         let sm = self.sess.source_map();
         match self.token {
             token::Eof => {
@@ -156,7 +156,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
                 Ok(TokenTree::Delimited(
                     delim_span,
                     delim,
-                    tts.into(),
+                    tts.into()
                 ).into())
             },
             token::CloseDelim(_) => {
@@ -176,7 +176,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
                 let raw = self.span_src_raw;
                 self.real_token();
                 let is_joint = raw.hi() == self.span_src_raw.lo() && token::is_op(&self.token);
-                Ok(TokenStream::Tree(tt, if is_joint { Joint } else { NonJoint }))
+                Ok((tt, if is_joint { Joint } else { NonJoint }))
             }
         }
     }
index 1e4a26b353759ab244289f2b1845e400627618f4..eababf58dfa4c0881cdb610a8ed9f095b27d02f9 100644 (file)
@@ -2914,7 +2914,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
                 TokenTree::Delimited(
                     frame.span,
                     frame.delim,
-                    frame.tree_cursor.original_stream().into(),
+                    frame.tree_cursor.stream.into(),
                 )
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
index 4b33a715c5c89c3168946bb6a0a61d2f16ccded0..fb72ef9c956ce5057041339d2645dbb56b31c1d8 100644 (file)
@@ -147,9 +147,11 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
 pub enum TokenStream {
     Empty,
     Tree(TokenTree, IsJoint),
-    Stream(Lrc<Vec<TokenStream>>),
+    Stream(Lrc<Vec<TreeAndJoint>>),
 }
 
+pub type TreeAndJoint = (TokenTree, IsJoint);
+
 // `TokenStream` is used a lot. Make sure it doesn't unintentionally get bigger.
 #[cfg(target_arch = "x86_64")]
 static_assert!(MEM_SIZE_OF_TOKEN_STREAM: mem::size_of::<TokenStream>() == 32);
@@ -173,16 +175,14 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
             while let Some((pos, ts)) = iter.next() {
                 if let Some((_, next)) = iter.peek() {
                     let sp = match (&ts, &next) {
-                        (TokenStream::Tree(TokenTree::Token(_, token::Token::Comma), NonJoint), _) |
-                        (_, TokenStream::Tree(TokenTree::Token(_, token::Token::Comma), NonJoint))
-                          => continue,
-                        (TokenStream::Tree(TokenTree::Token(sp, _), NonJoint), _) => *sp,
-                        (TokenStream::Tree(TokenTree::Delimited(sp, ..), NonJoint), _) =>
-                            sp.entire(),
+                        ((TokenTree::Token(_, token::Token::Comma), NonJoint), _) |
+                        (_, (TokenTree::Token(_, token::Token::Comma), NonJoint)) => continue,
+                        ((TokenTree::Token(sp, _), NonJoint), _) => *sp,
+                        ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
                         _ => continue,
                     };
                     let sp = sp.shrink_to_hi();
-                    let comma = TokenStream::Tree(TokenTree::Token(sp, token::Comma), NonJoint);
+                    let comma = (TokenTree::Token(sp, token::Comma), NonJoint);
                     suggestion = Some((pos, comma, sp));
                 }
             }
@@ -200,8 +200,14 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
 }
 
 impl From<TokenTree> for TokenStream {
-    fn from(tt: TokenTree) -> TokenStream {
-        TokenStream::Tree(tt, NonJoint)
+    fn from(tree: TokenTree) -> TokenStream {
+        TokenStream::Tree(tree, NonJoint)
+    }
+}
+
+impl From<TokenTree> for TreeAndJoint {
+    fn from(tree: TokenTree) -> TreeAndJoint {
+        (tree, NonJoint)
     }
 }
 
@@ -213,56 +219,7 @@ fn from(token: Token) -> TokenStream {
 
 impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
     fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
-        TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<_>>())
-    }
-}
-
-impl Extend<TokenStream> for TokenStream {
-    fn extend<I: IntoIterator<Item = TokenStream>>(&mut self, iter: I) {
-        let iter = iter.into_iter();
-        let this = mem::replace(self, TokenStream::Empty);
-
-        // Vector of token streams originally in self.
-        let tts: Vec<TokenStream> = match this {
-            TokenStream::Empty => {
-                let mut vec = Vec::new();
-                vec.reserve(iter.size_hint().0);
-                vec
-            }
-            TokenStream::Tree(..) => {
-                let mut vec = Vec::new();
-                vec.reserve(1 + iter.size_hint().0);
-                vec.push(this);
-                vec
-            }
-            TokenStream::Stream(rc_vec) => match Lrc::try_unwrap(rc_vec) {
-                Ok(mut vec) => {
-                    // Extend in place using the existing capacity if possible.
-                    // This is the fast path for libraries like `quote` that
-                    // build a token stream.
-                    vec.reserve(iter.size_hint().0);
-                    vec
-                }
-                Err(rc_vec) => {
-                    // Self is shared so we need to copy and extend that.
-                    let mut vec = Vec::new();
-                    vec.reserve(rc_vec.len() + iter.size_hint().0);
-                    vec.extend_from_slice(&rc_vec);
-                    vec
-                }
-            }
-        };
-
-        // Perform the extend, joining tokens as needed along the way.
-        let mut builder = TokenStreamBuilder(tts);
-        for stream in iter {
-            builder.push(stream);
-        }
-
-        // Build the resulting token stream. If it contains more than one token,
-        // preserve capacity in the vector in anticipation of the caller
-        // performing additional calls to extend.
-        *self = TokenStream::new(builder.0);
+        TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<Vec<_>>())
     }
 }
 
@@ -294,14 +251,43 @@ pub fn is_empty(&self) -> bool {
         }
     }
 
-    pub fn new(mut streams: Vec<TokenStream>) -> TokenStream {
+    fn from_streams(mut streams: Vec<TokenStream>) -> TokenStream {
         match streams.len() {
             0 => TokenStream::empty(),
             1 => streams.pop().unwrap(),
+            _ => {
+                let mut vec = vec![];
+                for stream in streams {
+                    match stream {
+                        TokenStream::Empty => {},
+                        TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)),
+                        TokenStream::Stream(stream2) => vec.extend(stream2.iter().cloned()),
+                    }
+                }
+                TokenStream::new(vec)
+            }
+        }
+    }
+
+    pub fn new(mut streams: Vec<TreeAndJoint>) -> TokenStream {
+        match streams.len() {
+            0 => TokenStream::empty(),
+            1 => {
+                let (tree, is_joint) = streams.pop().unwrap();
+                TokenStream::Tree(tree, is_joint)
+            }
             _ => TokenStream::Stream(Lrc::new(streams)),
         }
     }
 
+    pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
+        match self {
+            TokenStream::Empty => {}
+            TokenStream::Tree(tree, is_joint) => vec.push((tree, is_joint)),
+            TokenStream::Stream(stream) => vec.extend(stream.iter().cloned()),
+        }
+    }
+
     pub fn trees(&self) -> Cursor {
         self.clone().into_trees()
     }
@@ -362,54 +348,58 @@ fn semantic_tree(tree: &TokenTree) -> bool {
         t1.next().is_none() && t2.next().is_none()
     }
 
-    /// Precondition: `self` consists of a single token tree.
-    /// Returns true if the token tree is a joint operation w.r.t. `proc_macro::TokenNode`.
-    pub fn as_tree(self) -> (TokenTree, bool /* joint? */) {
-        match self {
-            TokenStream::Tree(tree, is_joint) => (tree, is_joint == Joint),
-            _ => unreachable!(),
-        }
-    }
-
     pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        let mut trees = self.into_trees();
-        let mut result = Vec::new();
-        let mut i = 0;
-        while let Some(stream) = trees.next_as_stream() {
-            result.push(match stream {
-                TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(i, tree), is_joint),
-                _ => unreachable!()
-            });
-            i += 1;
+        match self {
+            TokenStream::Empty => TokenStream::Empty,
+            TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(0, tree), is_joint),
+            TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new(
+                stream
+                    .iter()
+                    .enumerate()
+                    .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
+                    .collect()
+            )),
         }
-        TokenStream::new(result)
     }
 
     pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        let mut trees = self.into_trees();
-        let mut result = Vec::new();
-        while let Some(stream) = trees.next_as_stream() {
-            result.push(match stream {
-                TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint),
-                _ => unreachable!()
-            });
+        match self {
+            TokenStream::Empty => TokenStream::Empty,
+            TokenStream::Tree(tree, is_joint) => TokenStream::Tree(f(tree), is_joint),
+            TokenStream::Stream(stream) => TokenStream::Stream(Lrc::new(
+                stream
+                    .iter()
+                    .map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
+                    .collect()
+            )),
         }
-        TokenStream::new(result)
     }
 
     fn first_tree_and_joint(&self) -> Option<(TokenTree, IsJoint)> {
         match self {
             TokenStream::Empty => None,
             TokenStream::Tree(ref tree, is_joint) => Some((tree.clone(), *is_joint)),
-            TokenStream::Stream(ref stream) => stream.first().unwrap().first_tree_and_joint(),
+            TokenStream::Stream(ref stream) => Some(stream.first().unwrap().clone())
         }
     }
 
     fn last_tree_if_joint(&self) -> Option<TokenTree> {
         match self {
-            TokenStream::Empty | TokenStream::Tree(_, NonJoint) => None,
-            TokenStream::Tree(ref tree, Joint) => Some(tree.clone()),
-            TokenStream::Stream(ref stream) => stream.last().unwrap().last_tree_if_joint(),
+            TokenStream::Empty => None,
+            TokenStream::Tree(ref tree, is_joint) => {
+                if *is_joint == Joint {
+                    Some(tree.clone())
+                } else {
+                    None
+                }
+            }
+            TokenStream::Stream(ref stream) => {
+                if let (tree, Joint) = stream.last().unwrap() {
+                    Some(tree.clone())
+                } else {
+                    None
+                }
+            }
         }
     }
 }
@@ -442,13 +432,8 @@ pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
         self.0.push(stream);
     }
 
-    pub fn add<T: Into<TokenStream>>(mut self, stream: T) -> Self {
-        self.push(stream);
-        self
-    }
-
     pub fn build(self) -> TokenStream {
-        TokenStream::new(self.0)
+        TokenStream::from_streams(self.0)
     }
 
     fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
@@ -456,10 +441,9 @@ fn push_all_but_last_tree(&mut self, stream: &TokenStream) {
             let len = streams.len();
             match len {
                 1 => {}
-                2 => self.0.push(streams[0].clone().into()),
-                _ => self.0.push(TokenStream::new(streams[0 .. len - 1].to_vec())),
+                2 => self.0.push(TokenStream::Tree(streams[0].0.clone(), streams[0].1)),
+                _ => self.0.push(TokenStream::Stream(Lrc::new(streams[0 .. len - 1].to_vec()))),
             }
-            self.push_all_but_last_tree(&streams[len - 1])
         }
     }
 
@@ -468,154 +452,77 @@ fn push_all_but_first_tree(&mut self, stream: &TokenStream) {
             let len = streams.len();
             match len {
                 1 => {}
-                2 => self.0.push(streams[1].clone().into()),
-                _ => self.0.push(TokenStream::new(streams[1 .. len].to_vec())),
+                2 => self.0.push(TokenStream::Tree(streams[1].0.clone(), streams[1].1)),
+                _ => self.0.push(TokenStream::Stream(Lrc::new(streams[1 .. len].to_vec()))),
             }
-            self.push_all_but_first_tree(&streams[0])
         }
     }
 }
 
 #[derive(Clone)]
-pub struct Cursor(CursorKind);
-
-#[derive(Clone)]
-enum CursorKind {
-    Empty,
-    Tree(TokenTree, IsJoint, bool /* consumed? */),
-    Stream(StreamCursor),
-}
-
-#[derive(Clone)]
-struct StreamCursor {
-    stream: Lrc<Vec<TokenStream>>,
+pub struct Cursor {
+    pub stream: TokenStream,
     index: usize,
-    stack: Vec<(Lrc<Vec<TokenStream>>, usize)>,
-}
-
-impl StreamCursor {
-    fn new(stream: Lrc<Vec<TokenStream>>) -> Self {
-        StreamCursor { stream: stream, index: 0, stack: Vec::new() }
-    }
-
-    fn next_as_stream(&mut self) -> Option<TokenStream> {
-        loop {
-            if self.index < self.stream.len() {
-                self.index += 1;
-                let next = self.stream[self.index - 1].clone();
-                match next {
-                    TokenStream::Empty => {}
-                    TokenStream::Tree(..) => return Some(next),
-                    TokenStream::Stream(stream) => self.insert(stream),
-                }
-            } else if let Some((stream, index)) = self.stack.pop() {
-                self.stream = stream;
-                self.index = index;
-            } else {
-                return None;
-            }
-        }
-    }
-
-    fn insert(&mut self, stream: Lrc<Vec<TokenStream>>) {
-        self.stack.push((mem::replace(&mut self.stream, stream),
-                         mem::replace(&mut self.index, 0)));
-    }
 }
 
 impl Iterator for Cursor {
     type Item = TokenTree;
 
     fn next(&mut self) -> Option<TokenTree> {
-        self.next_as_stream().map(|stream| match stream {
-            TokenStream::Tree(tree, _) => tree,
-            _ => unreachable!()
-        })
+        self.next_with_joint().map(|(tree, _)| tree)
     }
 }
 
 impl Cursor {
     fn new(stream: TokenStream) -> Self {
-        Cursor(match stream {
-            TokenStream::Empty => CursorKind::Empty,
-            TokenStream::Tree(tree, is_joint) => CursorKind::Tree(tree, is_joint, false),
-            TokenStream::Stream(stream) => CursorKind::Stream(StreamCursor::new(stream)),
-        })
-    }
-
-    pub fn next_as_stream(&mut self) -> Option<TokenStream> {
-        let (stream, consumed) = match self.0 {
-            CursorKind::Tree(ref tree, ref is_joint, ref mut consumed @ false) =>
-                (TokenStream::Tree(tree.clone(), *is_joint), consumed),
-            CursorKind::Stream(ref mut cursor) => return cursor.next_as_stream(),
-            _ => return None,
-        };
-
-        *consumed = true;
-        Some(stream)
+        Cursor { stream, index: 0 }
     }
 
-    pub fn insert(&mut self, stream: TokenStream) {
-        match self.0 {
-            _ if stream.is_empty() => return,
-            CursorKind::Empty => *self = stream.trees(),
-            CursorKind::Tree(_, _, consumed) => {
-                *self = TokenStream::new(vec![self.original_stream(), stream]).trees();
-                if consumed {
-                    self.next();
+    pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
+        match self.stream {
+            TokenStream::Empty => None,
+            TokenStream::Tree(ref tree, ref is_joint) => {
+                if self.index == 0 {
+                    self.index = 1;
+                    Some((tree.clone(), *is_joint))
+                } else {
+                    None
                 }
             }
-            CursorKind::Stream(ref mut cursor) => {
-                cursor.insert(ThinTokenStream::from(stream).0.unwrap());
+            TokenStream::Stream(ref stream) => {
+                if self.index < stream.len() {
+                    self.index += 1;
+                    Some(stream[self.index - 1].clone())
+                } else {
+                    None
+                }
             }
         }
     }
 
-    pub fn original_stream(&self) -> TokenStream {
-        match self.0 {
-            CursorKind::Empty => TokenStream::empty(),
-            CursorKind::Tree(ref tree, ref is_joint, _) =>
-                TokenStream::Tree(tree.clone(), *is_joint),
-            CursorKind::Stream(ref cursor) => TokenStream::Stream(
-                cursor.stack.get(0).cloned().map(|(stream, _)| stream)
-                    .unwrap_or_else(|| cursor.stream.clone())
-            ),
+    pub fn append(&mut self, new_stream: TokenStream) {
+        if new_stream.is_empty() {
+            return;
         }
+        let index = self.index;
+        let stream = mem::replace(&mut self.stream, TokenStream::Empty);
+        *self = TokenStream::from_streams(vec![stream, new_stream]).into_trees();
+        self.index = index;
     }
 
     pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
-        fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize> {
-            for stream in streams {
-                n = match stream {
-                    TokenStream::Tree(ref tree, _) if n == 0 => return Ok(tree.clone()),
-                    TokenStream::Tree(..) => n - 1,
-                    TokenStream::Stream(ref stream) => match look_ahead(stream, n) {
-                        Ok(tree) => return Ok(tree),
-                        Err(n) => n,
-                    },
-                    _ => n,
-                };
+        match self.stream {
+            TokenStream::Empty => None,
+            TokenStream::Tree(ref tree, _) => {
+                if n == 0 && self.index == 0 {
+                    Some(tree.clone())
+                } else {
+                    None
+                }
             }
-            Err(n)
+            TokenStream::Stream(ref stream) =>
+                stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
         }
-
-        match self.0 {
-            CursorKind::Empty |
-            CursorKind::Tree(_, _, true) => Err(n),
-            CursorKind::Tree(ref tree, _, false) => look_ahead(&[tree.clone().into()], n),
-            CursorKind::Stream(ref cursor) => {
-                look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| {
-                    for &(ref stream, index) in cursor.stack.iter().rev() {
-                        n = match look_ahead(&stream[index..], n) {
-                            Ok(tree) => return Ok(tree),
-                            Err(n) => n,
-                        }
-                    }
-
-                    Err(n)
-                })
-            }
-        }.ok()
     }
 }
 
@@ -623,7 +530,7 @@ fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize>
 /// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
 /// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
 #[derive(Debug, Clone)]
-pub struct ThinTokenStream(Option<Lrc<Vec<TokenStream>>>);
+pub struct ThinTokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
 
 impl ThinTokenStream {
     pub fn stream(&self) -> TokenStream {
@@ -635,7 +542,7 @@ impl From<TokenStream> for ThinTokenStream {
     fn from(stream: TokenStream) -> ThinTokenStream {
         ThinTokenStream(match stream {
             TokenStream::Empty => None,
-            TokenStream::Tree(..) => Some(Lrc::new(vec![stream])),
+            TokenStream::Tree(tree, is_joint) => Some(Lrc::new(vec![(tree, is_joint)])),
             TokenStream::Stream(stream) => Some(stream),
         })
     }
@@ -742,7 +649,7 @@ fn test_concat() {
             let test_res = string_to_ts("foo::bar::baz");
             let test_fst = string_to_ts("foo::bar");
             let test_snd = string_to_ts("::baz");
-            let eq_res = TokenStream::new(vec![test_fst, test_snd]);
+            let eq_res = TokenStream::from_streams(vec![test_fst, test_snd]);
             assert_eq!(test_res.trees().count(), 5);
             assert_eq!(eq_res.trees().count(), 5);
             assert_eq!(test_res.eq_unspanned(&eq_res), true);
@@ -827,107 +734,4 @@ fn test_dotdotdot() {
         assert!(stream.eq_unspanned(&string_to_ts("...")));
         assert_eq!(stream.trees().count(), 1);
     }
-
-    #[test]
-    fn test_extend_empty() {
-        with_globals(|| {
-            // Append a token onto an empty token stream.
-            let mut stream = TokenStream::empty();
-            stream.extend(vec![string_to_ts("t")]);
-
-            let expected = string_to_ts("t");
-            assert!(stream.eq_unspanned(&expected));
-        });
-    }
-
-    #[test]
-    fn test_extend_nothing() {
-        with_globals(|| {
-            // Append nothing onto a token stream containing one token.
-            let mut stream = string_to_ts("t");
-            stream.extend(vec![]);
-
-            let expected = string_to_ts("t");
-            assert!(stream.eq_unspanned(&expected));
-        });
-    }
-
-    #[test]
-    fn test_extend_single() {
-        with_globals(|| {
-            // Append a token onto token stream containing a single token.
-            let mut stream = string_to_ts("t1");
-            stream.extend(vec![string_to_ts("t2")]);
-
-            let expected = string_to_ts("t1 t2");
-            assert!(stream.eq_unspanned(&expected));
-        });
-    }
-
-    #[test]
-    fn test_extend_in_place() {
-        with_globals(|| {
-            // Append a token onto token stream containing a reference counted
-            // vec of tokens. The token stream has a reference count of 1 so
-            // this can happen in place.
-            let mut stream = string_to_ts("t1 t2");
-            stream.extend(vec![string_to_ts("t3")]);
-
-            let expected = string_to_ts("t1 t2 t3");
-            assert!(stream.eq_unspanned(&expected));
-        });
-    }
-
-    #[test]
-    fn test_extend_copy() {
-        with_globals(|| {
-            // Append a token onto token stream containing a reference counted
-            // vec of tokens. The token stream is shared so the extend takes
-            // place on a copy.
-            let mut stream = string_to_ts("t1 t2");
-            let _incref = stream.clone();
-            stream.extend(vec![string_to_ts("t3")]);
-
-            let expected = string_to_ts("t1 t2 t3");
-            assert!(stream.eq_unspanned(&expected));
-        });
-    }
-
-    #[test]
-    fn test_extend_no_join() {
-        with_globals(|| {
-            let first = TokenTree::Token(DUMMY_SP, Token::Dot);
-            let second = TokenTree::Token(DUMMY_SP, Token::Dot);
-
-            // Append a dot onto a token stream containing a dot, but do not
-            // join them.
-            let mut stream = TokenStream::from(first);
-            stream.extend(vec![TokenStream::from(second)]);
-
-            let expected = string_to_ts(". .");
-            assert!(stream.eq_unspanned(&expected));
-
-            let unexpected = string_to_ts("..");
-            assert!(!stream.eq_unspanned(&unexpected));
-        });
-    }
-
-    #[test]
-    fn test_extend_join() {
-        with_globals(|| {
-            let first = TokenTree::Token(DUMMY_SP, Token::Dot).joint();
-            let second = TokenTree::Token(DUMMY_SP, Token::Dot);
-
-            // Append a dot onto a token stream containing a dot, forming a
-            // dotdot.
-            let mut stream = first;
-            stream.extend(vec![TokenStream::from(second)]);
-
-            let expected = string_to_ts("..");
-            assert!(stream.eq_unspanned(&expected));
-
-            let unexpected = string_to_ts(". .");
-            assert!(!stream.eq_unspanned(&unexpected));
-        });
-    }
 }
index afd86a4f7465cd0b72455beed5afbbc1bfed8a1f..158cbc791ef504f30cf0d5ff6802f84769480874 100644 (file)
@@ -11,7 +11,7 @@
 use syntax::ext::base::ExtCtxt;
 use syntax::parse::lexer::comments;
 use syntax::parse::{self, token, ParseSess};
-use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream};
+use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
 use syntax_pos::hygiene::{SyntaxContext, Transparency};
 use syntax_pos::symbol::{keywords, Symbol};
 use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
@@ -46,13 +46,14 @@ fn to_internal(self) -> token::DelimToken {
     }
 }
 
-impl FromInternal<(TokenStream, &'_ ParseSess, &'_ mut Vec<Self>)>
+impl FromInternal<(TreeAndJoint, &'_ ParseSess, &'_ mut Vec<Self>)>
     for TokenTree<Group, Punct, Ident, Literal>
 {
-    fn from_internal((stream, sess, stack): (TokenStream, &ParseSess, &mut Vec<Self>)) -> Self {
+    fn from_internal(((tree, is_joint), sess, stack): (TreeAndJoint, &ParseSess, &mut Vec<Self>))
+                    -> Self {
         use syntax::parse::token::*;
 
-        let (tree, joint) = stream.as_tree();
+        let joint = is_joint == Joint;
         let (span, token) = match tree {
             tokenstream::TokenTree::Delimited(span, delim, tts) => {
                 let delimiter = Delimiter::from_internal(delim);
@@ -450,7 +451,7 @@ fn next(
     ) -> Option<TokenTree<Self::Group, Self::Punct, Self::Ident, Self::Literal>> {
         loop {
             let tree = iter.stack.pop().or_else(|| {
-                let next = iter.cursor.next_as_stream()?;
+                let next = iter.cursor.next_with_joint()?;
                 Some(TokenTree::from_internal((next, self.sess, &mut iter.stack)))
             })?;
             // HACK: The condition "dummy span + group with empty delimiter" represents an AST
@@ -461,7 +462,7 @@ fn next(
             // and not doing the roundtrip through AST.
             if let TokenTree::Group(ref group) = tree {
                 if group.delimiter == Delimiter::None && group.span.entire().is_dummy() {
-                    iter.cursor.insert(group.stream.clone());
+                    iter.cursor.append(group.stream.clone());
                     continue;
                 }
             }