]> git.lizzy.rs Git - rust.git/commitdiff
Clean up `tokenstream::Cursor` and `proc_macro`.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Fri, 17 Mar 2017 23:23:12 +0000 (23:23 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Mon, 26 Jun 2017 02:05:46 +0000 (02:05 +0000)
src/libproc_macro/lib.rs
src/libsyntax/tokenstream.rs

index 4744baf1b42feb1da217212e17146b80440c5091..b9f4fa63e6006435a9ee3c4d036f1290cf67b8db 100644 (file)
@@ -48,7 +48,7 @@
 
 use syntax::errors::DiagnosticBuilder;
 use syntax::parse;
-use syntax::tokenstream::TokenStream as TokenStream_;
+use syntax::tokenstream;
 
 /// The main type provided by this crate, representing an abstract stream of
 /// tokens.
@@ -60,9 +60,7 @@
 /// The API of this type is intentionally bare-bones, but it'll be expanded over
 /// time!
 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
-pub struct TokenStream {
-    inner: TokenStream_,
-}
+pub struct TokenStream(tokenstream::TokenStream);
 
 /// Error returned from `TokenStream::from_str`.
 #[derive(Debug)]
@@ -91,26 +89,22 @@ pub mod __internal {
     use syntax::ext::hygiene::Mark;
     use syntax::ptr::P;
     use syntax::parse::{self, token, ParseSess};
-    use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
+    use syntax::tokenstream;
 
     use super::{TokenStream, LexError};
 
     pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
-        TokenStream {
-            inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item))))
-                .into()
-        }
+        let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item))));
+        TokenStream(tokenstream::TokenTree::Token(span, token).into())
     }
 
-    pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
-        TokenStream {
-            inner: inner
-        }
+    pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
+        TokenStream(inner)
     }
 
     pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
         with_sess(move |(sess, _)| {
-            let mut parser = parse::stream_to_parser(sess, stream.inner);
+            let mut parser = parse::stream_to_parser(sess, stream.0);
             let mut items = Vec::new();
 
             while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
@@ -121,8 +115,8 @@ pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>
         })
     }
 
-    pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ {
-        stream.inner
+    pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
+        stream.0
     }
 
     pub trait Registry {
@@ -197,6 +191,6 @@ fn from_str(src: &str) -> Result<TokenStream, LexError> {
 #[stable(feature = "proc_macro_lib", since = "1.15.0")]
 impl fmt::Display for TokenStream {
     fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
-        self.inner.fmt(f)
+        self.0.fmt(f)
     }
 }
index 963482fc223f1befa6c0a518db4f9c8c4084efd4..ab4f697071477dca9f567d0a9b5fee0f2be950d2 100644 (file)
@@ -199,7 +199,7 @@ pub fn is_empty(&self) -> bool {
     pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
         match streams.len() {
             0 => TokenStream::empty(),
-            1 => TokenStream::from(streams.pop().unwrap()),
+            1 => streams.pop().unwrap(),
             _ => TokenStream::concat_rc_slice(RcSlice::new(streams)),
         }
     }
@@ -244,37 +244,22 @@ struct StreamCursor {
     stack: Vec<(RcSlice<TokenStream>, usize)>,
 }
 
-impl Iterator for Cursor {
-    type Item = TokenTree;
-
+impl StreamCursor {
     fn next(&mut self) -> Option<TokenTree> {
-        let cursor = match self.0 {
-            CursorKind::Stream(ref mut cursor) => cursor,
-            CursorKind::Tree(ref tree, ref mut consumed @ false) => {
-                *consumed = true;
-                return Some(tree.clone());
-            }
-            _ => return None,
-        };
-
         loop {
-            if cursor.index < cursor.stream.len() {
-                match cursor.stream[cursor.index].kind.clone() {
-                    TokenStreamKind::Tree(tree) => {
-                        cursor.index += 1;
-                        return Some(tree);
-                    }
+            if self.index < self.stream.len() {
+                self.index += 1;
+                match self.stream[self.index - 1].kind.clone() {
+                    TokenStreamKind::Tree(tree) => return Some(tree),
                     TokenStreamKind::Stream(stream) => {
-                        cursor.stack.push((mem::replace(&mut cursor.stream, stream),
-                                           mem::replace(&mut cursor.index, 0) + 1));
-                    }
-                    TokenStreamKind::Empty => {
-                        cursor.index += 1;
+                        self.stack.push((mem::replace(&mut self.stream, stream),
+                                         mem::replace(&mut self.index, 0)));
                     }
+                    TokenStreamKind::Empty => {}
                 }
-            } else if let Some((stream, index)) = cursor.stack.pop() {
-                cursor.stream = stream;
-                cursor.index = index;
+            } else if let Some((stream, index)) = self.stack.pop() {
+                self.stream = stream;
+                self.index = index;
             } else {
                 return None;
             }
@@ -282,6 +267,21 @@ fn next(&mut self) -> Option<TokenTree> {
     }
 }
 
+impl Iterator for Cursor {
+    type Item = TokenTree;
+
+    fn next(&mut self) -> Option<TokenTree> {
+        let (tree, consumed) = match self.0 {
+            CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed),
+            CursorKind::Stream(ref mut cursor) => return cursor.next(),
+            _ => return None,
+        };
+
+        *consumed = true;
+        Some(tree.clone())
+    }
+}
+
 impl Cursor {
     fn new(stream: TokenStream) -> Self {
         Cursor(match stream.kind {