tokenstream::TokenTree::Delimited(span, delim, ref tts) => {
span.hash_stable(hcx, hasher);
std_hash::Hash::hash(&delim, hasher);
- for sub_tt in tts.stream().trees() {
+ for sub_tt in tts.trees() {
sub_tt.hash_stable(hcx, hasher);
}
}
_ => {},
}
TokenTree::Delimited(_, _, tts) => {
- self.check_tokens(cx, tts.stream())
+ self.check_tokens(cx, tts)
},
}
}
use source_map::{dummy_spanned, respan, Spanned};
use symbol::{keywords, Symbol};
use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{ThinTokenStream, TokenStream};
+use tokenstream::TokenStream;
use ThinVec;
use rustc_data_structures::fx::FxHashSet;
pub struct Mac_ {
pub path: Path,
pub delim: MacDelimiter,
- pub tts: ThinTokenStream,
+ pub tts: TokenStream,
}
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Debug)]
impl Mac_ {
pub fn stream(&self) -> TokenStream {
- self.tts.stream()
+ self.tts.clone()
}
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct MacroDef {
- pub tokens: ThinTokenStream,
+ pub tokens: TokenStream,
pub legacy: bool,
}
}
Some(TokenTree::Delimited(_, delim, ref tts)) if delim == token::Paren => {
tokens.next();
- tts.stream()
+ tts.clone()
}
_ => return Some(MetaItemKind::Word),
};
},
TokenTree::Delimited(span, delim, ref tts) => {
let mut stmts = statements_mk_tt(cx, &TokenTree::open_tt(span.open, delim), false);
- stmts.extend(statements_mk_tts(cx, tts.stream()));
+ stmts.extend(statements_mk_tts(cx, tts.clone()));
stmts.extend(statements_mk_tt(cx, &TokenTree::close_tt(span.close, delim), false));
stmts
}
TokenTree::Delimited(span, delim, tts) => TokenTree::Delimited(
DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
delim,
- fld.fold_tts(tts.stream()).into(),
+ fld.fold_tts(tts).into(),
),
}
}
)
if name_macro_rules.name == "macro_rules"
&& name_zip.name == "zip" => {
- let tts = ¯o_tts.stream().trees().collect::<Vec<_>>();
+ let tts = ¯o_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
(
3,
Some(&TokenTree::Delimited(_, second_delim, ref second_tts)),
)
if macro_delim == token::Paren => {
- let tts = &first_tts.stream().trees().collect::<Vec<_>>();
+ let tts = &first_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
if first_delim == token::Paren && ident.name == "a" => {},
_ => panic!("value 3: {:?} {:?}", first_delim, first_tts),
}
- let tts = &second_tts.stream().trees().collect::<Vec<_>>();
+ let tts = &second_tts.trees().collect::<Vec<_>>();
match (tts.len(), tts.get(0), tts.get(1)) {
(
2,
use ptr::P;
use parse::PResult;
use ThinVec;
-use tokenstream::{self, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
+use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
use symbol::{Symbol, keywords};
use std::borrow::Cow;
}
impl TokenCursorFrame {
- fn new(sp: DelimSpan, delim: DelimToken, tts: &ThinTokenStream) -> Self {
+ fn new(sp: DelimSpan, delim: DelimToken, tts: &TokenStream) -> Self {
TokenCursorFrame {
delim: delim,
span: sp,
open_delim: delim == token::NoDelim,
- tree_cursor: tts.stream().into_trees(),
+ tree_cursor: tts.clone().into_trees(),
close_delim: delim == token::NoDelim,
last_token: LastToken::Was(None),
}
})
}
- fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, ThinTokenStream)> {
+ fn expect_delimited_token_tree(&mut self) -> PResult<'a, (MacDelimiter, TokenStream)> {
let delim = match self.token {
token::OpenDelim(delim) => delim,
_ => {
token::Brace => MacDelimiter::Brace,
token::NoDelim => self.bug("unexpected no delimiter"),
};
- Ok((delim, tts.stream().into()))
+ Ok((delim, tts.into()))
}
/// At the bottom (top?) of the precedence hierarchy,
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
match self.parse_token_tree() {
- TokenTree::Delimited(_, _, tts) => tts.stream(),
+ TokenTree::Delimited(_, _, tts) => tts,
_ => unreachable!(),
}
} else if self.check(&token::OpenDelim(token::Paren)) {
TokenTree::Delimited(_, delim, tts) => {
self.writer().word(token_to_string(&token::OpenDelim(delim)))?;
self.writer().space()?;
- self.print_tts(tts.stream())?;
+ self.print_tts(tts)?;
self.writer().space()?;
self.writer().word(token_to_string(&token::CloseDelim(delim)))
},
/// A single token
Token(Span, token::Token),
/// A delimited sequence of token trees
- Delimited(DelimSpan, DelimToken, ThinTokenStream),
+ Delimited(DelimSpan, DelimToken, TokenStream),
}
impl TokenTree {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
(&TokenTree::Delimited(_, delim, ref tts),
&TokenTree::Delimited(_, delim2, ref tts2)) => {
- delim == delim2 &&
- tts.stream().eq_unspanned(&tts2.stream())
+ delim == delim2 && tts.eq_unspanned(&tts2)
}
(_, _) => false,
}
}
(&TokenTree::Delimited(_, delim, ref tts),
&TokenTree::Delimited(_, delim2, ref tts2)) => {
- delim == delim2 &&
- tts.stream().probably_equal_for_proc_macro(&tts2.stream())
+ delim == delim2 && tts.probably_equal_for_proc_macro(&tts2)
}
(_, _) => false,
}
}
}
-/// The `TokenStream` type is large enough to represent a single `TokenTree` without allocation.
-/// `ThinTokenStream` is smaller, but needs to allocate to represent a single `TokenTree`.
-/// We must use `ThinTokenStream` in `TokenTree::Delimited` to avoid infinite size due to recursion.
-#[derive(Debug, Clone)]
-pub struct ThinTokenStream(Option<Lrc<Vec<TreeAndJoint>>>);
-
-impl ThinTokenStream {
- pub fn stream(&self) -> TokenStream {
- self.clone().into()
- }
-}
-
-impl From<TokenStream> for ThinTokenStream {
- fn from(stream: TokenStream) -> ThinTokenStream {
- ThinTokenStream(match stream {
- TokenStream::Empty => None,
- TokenStream::Stream(stream) => Some(stream),
- })
- }
-}
-
-impl From<ThinTokenStream> for TokenStream {
- fn from(stream: ThinTokenStream) -> TokenStream {
- stream.0.map(TokenStream::Stream).unwrap_or_else(TokenStream::empty)
- }
-}
-
-impl Eq for ThinTokenStream {}
-
-impl PartialEq<ThinTokenStream> for ThinTokenStream {
- fn eq(&self, other: &ThinTokenStream) -> bool {
- TokenStream::from(self.clone()) == TokenStream::from(other.clone())
- }
-}
-
impl fmt::Display for TokenStream {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
f.write_str(&pprust::tokens_to_string(self.clone()))
}
}
-impl Encodable for ThinTokenStream {
- fn encode<E: Encoder>(&self, encoder: &mut E) -> Result<(), E::Error> {
- TokenStream::from(self.clone()).encode(encoder)
- }
-}
-
-impl Decodable for ThinTokenStream {
- fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
- TokenStream::decode(decoder).map(Into::into)
- }
-}
-
#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub struct DelimSpan {
pub open: Span,
pub fn walk_tt<'a, V: Visitor<'a>>(visitor: &mut V, tt: TokenTree) {
match tt {
TokenTree::Token(_, tok) => visitor.visit_token(tok),
- TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts.stream()),
+ TokenTree::Delimited(_, _, tts) => visitor.visit_tts(tts),
}
}