use syntax::errors::DiagnosticBuilder;
use syntax::parse;
-use syntax::tokenstream::TokenStream as TokenStream_;
+use syntax::tokenstream;
/// The main type provided by this crate, representing an abstract stream of
/// tokens.
/// The API of this type is intentionally bare-bones, but it'll be expanded over
/// time!
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
-pub struct TokenStream {
- inner: TokenStream_,
-}
+pub struct TokenStream(tokenstream::TokenStream);
/// Error returned from `TokenStream::from_str`.
#[derive(Debug)]
use syntax::ext::hygiene::Mark;
use syntax::ptr::P;
use syntax::parse::{self, token, ParseSess};
- use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
+ use syntax::tokenstream;
use super::{TokenStream, LexError};
pub fn new_token_stream(item: P<ast::Item>) -> TokenStream {
- TokenStream {
- inner: TokenTree::Token(item.span, token::Interpolated(Rc::new(token::NtItem(item))))
- .into()
- }
+ let (span, token) = (item.span, token::Interpolated(Rc::new(token::NtItem(item))));
+ TokenStream(tokenstream::TokenTree::Token(span, token).into())
}
- pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
- TokenStream {
- inner: inner
- }
+ pub fn token_stream_wrap(inner: tokenstream::TokenStream) -> TokenStream {
+ TokenStream(inner)
}
pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
with_sess(move |(sess, _)| {
- let mut parser = parse::stream_to_parser(sess, stream.inner);
+ let mut parser = parse::stream_to_parser(sess, stream.0);
let mut items = Vec::new();
while let Some(item) = try!(parser.parse_item().map_err(super::parse_to_lex_err)) {
})
}
- pub fn token_stream_inner(stream: TokenStream) -> TokenStream_ {
- stream.inner
+ pub fn token_stream_inner(stream: TokenStream) -> tokenstream::TokenStream {
+ stream.0
}
pub trait Registry {
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- self.inner.fmt(f)
+ self.0.fmt(f)
}
}
pub fn concat(mut streams: Vec<TokenStream>) -> TokenStream {
match streams.len() {
0 => TokenStream::empty(),
- 1 => TokenStream::from(streams.pop().unwrap()),
+ 1 => streams.pop().unwrap(),
_ => TokenStream::concat_rc_slice(RcSlice::new(streams)),
}
}
stack: Vec<(RcSlice<TokenStream>, usize)>,
}
-impl Iterator for Cursor {
- type Item = TokenTree;
-
+impl StreamCursor {
fn next(&mut self) -> Option<TokenTree> {
- let cursor = match self.0 {
- CursorKind::Stream(ref mut cursor) => cursor,
- CursorKind::Tree(ref tree, ref mut consumed @ false) => {
- *consumed = true;
- return Some(tree.clone());
- }
- _ => return None,
- };
-
loop {
- if cursor.index < cursor.stream.len() {
- match cursor.stream[cursor.index].kind.clone() {
- TokenStreamKind::Tree(tree) => {
- cursor.index += 1;
- return Some(tree);
- }
+ if self.index < self.stream.len() {
+ self.index += 1;
+ match self.stream[self.index - 1].kind.clone() {
+ TokenStreamKind::Tree(tree) => return Some(tree),
TokenStreamKind::Stream(stream) => {
- cursor.stack.push((mem::replace(&mut cursor.stream, stream),
- mem::replace(&mut cursor.index, 0) + 1));
- }
- TokenStreamKind::Empty => {
- cursor.index += 1;
+ self.stack.push((mem::replace(&mut self.stream, stream),
+ mem::replace(&mut self.index, 0)));
}
+ TokenStreamKind::Empty => {}
}
- } else if let Some((stream, index)) = cursor.stack.pop() {
- cursor.stream = stream;
- cursor.index = index;
+ } else if let Some((stream, index)) = self.stack.pop() {
+ self.stream = stream;
+ self.index = index;
} else {
return None;
}
}
}
+impl Iterator for Cursor {
+ type Item = TokenTree;
+
+ fn next(&mut self) -> Option<TokenTree> {
+ let (tree, consumed) = match self.0 {
+ CursorKind::Tree(ref tree, ref mut consumed @ false) => (tree, consumed),
+ CursorKind::Stream(ref mut cursor) => return cursor.next(),
+ _ => return None,
+ };
+
+ *consumed = true;
+ Some(tree.clone())
+ }
+}
+
impl Cursor {
fn new(stream: TokenStream) -> Self {
Cursor(match stream.kind {