]> git.lizzy.rs Git - rust.git/blobdiff - src/libsyntax/tokenstream.rs
Rollup merge of #65174 - SimonSapin:zero-box, r=alexcrichton
[rust.git] / src / libsyntax / tokenstream.rs
index bef12ed4fadafc22d88d48a3670b3020651b1107..ac155556cdae25d541ca79c82b947409d0c3b58f 100644 (file)
@@ -14,7 +14,6 @@
 //! ownership of the original.
 
 use crate::parse::token::{self, DelimToken, Token, TokenKind};
-use crate::print::pprust;
 
 use syntax_pos::{BytePos, Span, DUMMY_SP};
 #[cfg(target_arch = "x86_64")]
@@ -23,7 +22,7 @@
 use rustc_serialize::{Decoder, Decodable, Encoder, Encodable};
 use smallvec::{SmallVec, smallvec};
 
-use std::{fmt, iter, mem};
+use std::{iter, mem};
 
 #[cfg(test)]
 mod tests;
@@ -137,13 +136,8 @@ pub fn close_tt(span: Span, delim: DelimToken) -> TokenTree {
 /// The goal is for procedural macros to work with `TokenStream`s and `TokenTree`s
 /// instead of a representation of the abstract syntax tree.
 /// Today's `TokenTree`s can still contain AST via `token::Interpolated` for back-compat.
-///
-/// The use of `Option` is an optimization that avoids the need for an
-/// allocation when the stream is empty. However, it is not guaranteed that an
-/// empty stream is represented with `None`; it may be represented as a `Some`
-/// around an empty `Vec`.
-#[derive(Clone, Debug)]
-pub struct TokenStream(pub Option<Lrc<Vec<TreeAndJoint>>>);
+#[derive(Clone, Debug, Default)]
+pub struct TokenStream(pub Lrc<Vec<TreeAndJoint>>);
 
 pub type TreeAndJoint = (TokenTree, IsJoint);
 
@@ -162,38 +156,36 @@ pub enum IsJoint {
 impl TokenStream {
     /// Given a `TokenStream` with a `Stream` of only two arguments, return a new `TokenStream`
     /// separating the two arguments with a comma for diagnostic suggestions.
-    pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
+    pub fn add_comma(&self) -> Option<(TokenStream, Span)> {
         // Used to suggest if a user writes `foo!(a b);`
-        if let Some(ref stream) = self.0 {
-            let mut suggestion = None;
-            let mut iter = stream.iter().enumerate().peekable();
-            while let Some((pos, ts)) = iter.next() {
-                if let Some((_, next)) = iter.peek() {
-                    let sp = match (&ts, &next) {
-                        (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
-                        ((TokenTree::Token(token_left), NonJoint),
-                         (TokenTree::Token(token_right), _))
-                        if ((token_left.is_ident() && !token_left.is_reserved_ident())
-                            || token_left.is_lit()) &&
-                            ((token_right.is_ident() && !token_right.is_reserved_ident())
-                            || token_right.is_lit()) => token_left.span,
-                        ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
-                        _ => continue,
-                    };
-                    let sp = sp.shrink_to_hi();
-                    let comma = (TokenTree::token(token::Comma, sp), NonJoint);
-                    suggestion = Some((pos, comma, sp));
-                }
-            }
-            if let Some((pos, comma, sp)) = suggestion {
-                let mut new_stream = vec![];
-                let parts = stream.split_at(pos + 1);
-                new_stream.extend_from_slice(parts.0);
-                new_stream.push(comma);
-                new_stream.extend_from_slice(parts.1);
-                return Some((TokenStream::new(new_stream), sp));
+        let mut suggestion = None;
+        let mut iter = self.0.iter().enumerate().peekable();
+        while let Some((pos, ts)) = iter.next() {
+            if let Some((_, next)) = iter.peek() {
+                let sp = match (&ts, &next) {
+                    (_, (TokenTree::Token(Token { kind: token::Comma, .. }), _)) => continue,
+                    ((TokenTree::Token(token_left), NonJoint),
+                     (TokenTree::Token(token_right), _))
+                    if ((token_left.is_ident() && !token_left.is_reserved_ident())
+                        || token_left.is_lit()) &&
+                        ((token_right.is_ident() && !token_right.is_reserved_ident())
+                        || token_right.is_lit()) => token_left.span,
+                    ((TokenTree::Delimited(sp, ..), NonJoint), _) => sp.entire(),
+                    _ => continue,
+                };
+                let sp = sp.shrink_to_hi();
+                let comma = (TokenTree::token(token::Comma, sp), NonJoint);
+                suggestion = Some((pos, comma, sp));
             }
         }
+        if let Some((pos, comma, sp)) = suggestion {
+            let mut new_stream = vec![];
+            let parts = self.0.split_at(pos + 1);
+            new_stream.extend_from_slice(parts.0);
+            new_stream.push(comma);
+            new_stream.extend_from_slice(parts.1);
+            return Some((TokenStream::new(new_stream), sp));
+        }
         None
     }
 }
@@ -210,9 +202,9 @@ fn from(tree: TokenTree) -> TreeAndJoint {
     }
 }
 
-impl<T: Into<TokenStream>> iter::FromIterator<T> for TokenStream {
-    fn from_iter<I: IntoIterator<Item = T>>(iter: I) -> Self {
-        TokenStream::from_streams(iter.into_iter().map(Into::into).collect::<SmallVec<_>>())
+impl iter::FromIterator<TokenTree> for TokenStream {
+    fn from_iter<I: IntoIterator<Item = TokenTree>>(iter: I) -> Self {
+        TokenStream::new(iter.into_iter().map(Into::into).collect::<Vec<TreeAndJoint>>())
     }
 }
 
@@ -225,28 +217,21 @@ fn eq(&self, other: &TokenStream) -> bool {
 }
 
 impl TokenStream {
-    pub fn len(&self) -> usize {
-        if let Some(ref slice) = self.0 {
-            slice.len()
-        } else {
-            0
-        }
+    pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
+        TokenStream(Lrc::new(streams))
     }
 
-    pub fn empty() -> TokenStream {
-        TokenStream(None)
+    pub fn is_empty(&self) -> bool {
+        self.0.is_empty()
     }
 
-    pub fn is_empty(&self) -> bool {
-        match self.0 {
-            None => true,
-            Some(ref stream) => stream.is_empty(),
-        }
+    pub fn len(&self) -> usize {
+        self.0.len()
     }
 
     pub(crate) fn from_streams(mut streams: SmallVec<[TokenStream; 2]>) -> TokenStream {
         match streams.len() {
-            0 => TokenStream::empty(),
+            0 => TokenStream::default(),
             1 => streams.pop().unwrap(),
             _ => {
                 // We are going to extend the first stream in `streams` with
@@ -270,43 +255,22 @@ pub fn is_empty(&self) -> bool {
                 // Get the first stream. If it's `None`, create an empty
                 // stream.
                 let mut iter = streams.drain();
-                let mut first_stream_lrc = match iter.next().unwrap().0 {
-                    Some(first_stream_lrc) => first_stream_lrc,
-                    None => Lrc::new(vec![]),
-                };
+                let mut first_stream_lrc = iter.next().unwrap().0;
 
                 // Append the elements to the first stream, after reserving
                 // space for them.
                 let first_vec_mut = Lrc::make_mut(&mut first_stream_lrc);
                 first_vec_mut.reserve(num_appends);
                 for stream in iter {
-                    if let Some(stream) = stream.0 {
-                        first_vec_mut.extend(stream.iter().cloned());
-                    }
+                    first_vec_mut.extend(stream.0.iter().cloned());
                 }
 
                 // Create the final `TokenStream`.
-                match first_vec_mut.len() {
-                    0 => TokenStream(None),
-                    _ => TokenStream(Some(first_stream_lrc)),
-                }
+                TokenStream(first_stream_lrc)
             }
         }
     }
 
-    pub fn new(streams: Vec<TreeAndJoint>) -> TokenStream {
-        match streams.len() {
-            0 => TokenStream(None),
-            _ => TokenStream(Some(Lrc::new(streams))),
-        }
-    }
-
-    pub fn append_to_tree_and_joint_vec(self, vec: &mut Vec<TreeAndJoint>) {
-        if let Some(stream) = self.0 {
-            vec.extend(stream.iter().cloned());
-        }
-    }
-
     pub fn trees(&self) -> Cursor {
         self.clone().into_trees()
     }
@@ -371,24 +335,22 @@ fn semantic_tree(tree: &TokenTree) -> bool {
     }
 
     pub fn map_enumerated<F: FnMut(usize, TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        TokenStream(self.0.map(|stream| {
-            Lrc::new(
-                stream
-                    .iter()
-                    .enumerate()
-                    .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
-                    .collect())
-        }))
+        TokenStream(Lrc::new(
+            self.0
+                .iter()
+                .enumerate()
+                .map(|(i, (tree, is_joint))| (f(i, tree.clone()), *is_joint))
+                .collect()
+        ))
     }
 
     pub fn map<F: FnMut(TokenTree) -> TokenTree>(self, mut f: F) -> TokenStream {
-        TokenStream(self.0.map(|stream| {
-            Lrc::new(
-                stream
-                    .iter()
-                    .map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
-                    .collect())
-        }))
+        TokenStream(Lrc::new(
+            self.0
+                .iter()
+                .map(|(tree, is_joint)| (f(tree.clone()), *is_joint))
+                .collect()
+        ))
     }
 }
 
@@ -406,44 +368,43 @@ pub fn push<T: Into<TokenStream>>(&mut self, stream: T) {
 
         // If `self` is not empty and the last tree within the last stream is a
         // token tree marked with `Joint`...
-        if let Some(TokenStream(Some(ref mut last_stream_lrc))) = self.0.last_mut() {
+        if let Some(TokenStream(ref mut last_stream_lrc)) = self.0.last_mut() {
             if let Some((TokenTree::Token(last_token), Joint)) = last_stream_lrc.last() {
 
                 // ...and `stream` is not empty and the first tree within it is
                 // a token tree...
-                if let TokenStream(Some(ref mut stream_lrc)) = stream {
-                    if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
-
-                        // ...and the two tokens can be glued together...
-                        if let Some(glued_tok) = last_token.glue(&token) {
-
-                            // ...then do so, by overwriting the last token
-                            // tree in `self` and removing the first token tree
-                            // from `stream`. This requires using `make_mut()`
-                            // on the last stream in `self` and on `stream`,
-                            // and in practice this doesn't cause cloning 99.9%
-                            // of the time.
-
-                            // Overwrite the last token tree with the merged
-                            // token.
-                            let last_vec_mut = Lrc::make_mut(last_stream_lrc);
-                            *last_vec_mut.last_mut().unwrap() =
-                                (TokenTree::Token(glued_tok), *is_joint);
-
-                            // Remove the first token tree from `stream`. (This
-                            // is almost always the only tree in `stream`.)
-                            let stream_vec_mut = Lrc::make_mut(stream_lrc);
-                            stream_vec_mut.remove(0);
-
-                            // Don't push `stream` if it's empty -- that could
-                            // block subsequent token gluing, by getting
-                            // between two token trees that should be glued
-                            // together.
-                            if !stream.is_empty() {
-                                self.0.push(stream);
-                            }
-                            return;
+                let TokenStream(ref mut stream_lrc) = stream;
+                if let Some((TokenTree::Token(token), is_joint)) = stream_lrc.first() {
+
+                    // ...and the two tokens can be glued together...
+                    if let Some(glued_tok) = last_token.glue(&token) {
+
+                        // ...then do so, by overwriting the last token
+                        // tree in `self` and removing the first token tree
+                        // from `stream`. This requires using `make_mut()`
+                        // on the last stream in `self` and on `stream`,
+                        // and in practice this doesn't cause cloning 99.9%
+                        // of the time.
+
+                        // Overwrite the last token tree with the merged
+                        // token.
+                        let last_vec_mut = Lrc::make_mut(last_stream_lrc);
+                        *last_vec_mut.last_mut().unwrap() =
+                            (TokenTree::Token(glued_tok), *is_joint);
+
+                        // Remove the first token tree from `stream`. (This
+                        // is almost always the only tree in `stream`.)
+                        let stream_vec_mut = Lrc::make_mut(stream_lrc);
+                        stream_vec_mut.remove(0);
+
+                        // Don't push `stream` if it's empty -- that could
+                        // block subsequent token gluing, by getting
+                        // between two token trees that should be glued
+                        // together.
+                        if !stream.is_empty() {
+                            self.0.push(stream);
                         }
+                        return;
                     }
                 }
             }
@@ -476,16 +437,11 @@ fn new(stream: TokenStream) -> Self {
     }
 
     pub fn next_with_joint(&mut self) -> Option<TreeAndJoint> {
-        match self.stream.0 {
-            None => None,
-            Some(ref stream) => {
-                if self.index < stream.len() {
-                    self.index += 1;
-                    Some(stream[self.index - 1].clone())
-                } else {
-                    None
-                }
-            }
+        if self.index < self.stream.len() {
+            self.index += 1;
+            Some(self.stream.0[self.index - 1].clone())
+        } else {
+            None
         }
     }
 
@@ -494,22 +450,13 @@ pub fn append(&mut self, new_stream: TokenStream) {
             return;
         }
         let index = self.index;
-        let stream = mem::replace(&mut self.stream, TokenStream(None));
+        let stream = mem::take(&mut self.stream);
         *self = TokenStream::from_streams(smallvec![stream, new_stream]).into_trees();
         self.index = index;
     }
 
     pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
-        match self.stream.0 {
-            None => None,
-            Some(ref stream) => stream[self.index ..].get(n).map(|(tree, _)| tree.clone()),
-        }
-    }
-}
-
-impl fmt::Display for TokenStream {
-    fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
-        f.write_str(&pprust::tts_to_string(self.clone()))
+        self.stream.0[self.index ..].get(n).map(|(tree, _)| tree.clone())
     }
 }