use syntax::errors::DiagnosticBuilder;
use syntax::parse::{self, token};
use syntax::symbol::Symbol;
-use syntax::tokenstream;
-use syntax_pos::{BytePos, Pos, FileName};
+use syntax::tokenstream::{self, DelimSpan};
+use syntax_pos::{Pos, FileName};
/// The main type provided by this crate, representing an abstract stream of
/// tokens, or, more specifically, a sequence of token trees.
pub struct Group {
delimiter: Delimiter,
stream: TokenStream,
- span: Span,
+ span: DelimSpan,
}
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
Group {
delimiter: delimiter,
stream: stream,
- span: Span::call_site(),
+ span: DelimSpan::from_single(Span::call_site().0),
}
}
/// ```
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn span(&self) -> Span {
- self.span
+ Span(self.span.entire())
}
- /// Returns the span pointing to the opening delimiter of this group, or the
- /// span of the entire group if this is a None-delimited group.
+ /// Returns the span pointing to the opening delimiter of this group.
///
/// ```text
/// pub fn span_open(&self) -> Span {
/// ```
#[unstable(feature = "proc_macro_span", issue = "38356")]
pub fn span_open(&self) -> Span {
- if self.delimiter == Delimiter::None {
- self.span
- } else {
- let lo = self.span.0.lo();
- let new_hi = BytePos::from_usize(lo.to_usize() + 1);
- Span(self.span.0.with_hi(new_hi))
- }
+ Span(self.span.open)
}
- /// Returns the span pointing to the closing delimiter of this group, or the
- /// span of the entire group if this is a None-delimited group.
+ /// Returns the span pointing to the closing delimiter of this group.
///
/// ```text
/// pub fn span_close(&self) -> Span {
/// ```
#[unstable(feature = "proc_macro_span", issue = "38356")]
pub fn span_close(&self) -> Span {
- let hi = self.span.0.hi();
- if self.delimiter == Delimiter::None || hi.to_usize() == 0 {
- self.span
- } else {
- let new_lo = BytePos::from_usize(hi.to_usize() - 1);
- Span(self.span.0.with_lo(new_lo))
- }
+ Span(self.span.close)
}
/// Configures the span for this `Group`'s delimiters, but not its internal
/// tokens at the level of the `Group`.
#[stable(feature = "proc_macro_lib2", since = "1.29.0")]
pub fn set_span(&mut self, span: Span) {
- self.span = span;
+ self.span = DelimSpan::from_single(span.0);
}
}
tokenstream::TokenTree::Delimited(span, delimed) => {
let delimiter = Delimiter::from_internal(delimed.delim);
let mut g = Group::new(delimiter, ::TokenStream(delimed.tts.into()));
- g.set_span(Span(span));
+ g.span = span;
return g.into();
}
};
self::TokenTree::Punct(tt) => (tt.as_char(), tt.spacing(), tt.span()),
self::TokenTree::Group(tt) => {
return TokenTree::Delimited(
- tt.span.0,
+ tt.span,
Delimited {
delim: tt.delimiter.to_internal(),
tts: tt.stream.0.into(),
use syntax::source_map::SourceMap;
use syntax::ext::hygiene::SyntaxContext;
use syntax::symbol::Symbol;
+use syntax::tokenstream::DelimSpan;
use syntax_pos::{Span, DUMMY_SP};
use syntax_pos::hygiene;
}
}
+impl<'a> HashStable<StableHashingContext<'a>> for DelimSpan {
+ fn hash_stable<W: StableHasherResult>(
+ &self,
+ hcx: &mut StableHashingContext<'a>,
+ hasher: &mut StableHasher<W>,
+ ) {
+ self.open.hash_stable(hcx, hasher);
+ self.close.hash_stable(hcx, hasher);
+ }
+}
+
pub fn hash_stable_trait_impls<'a, 'gcx, W, R>(
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>,
use syntax::parse::token::{self, Token};
use syntax::ptr::P;
use syntax::symbol::{Symbol, keywords};
-use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
+use syntax::tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{Span, DUMMY_SP};
use errors::Applicability;
tokens.push(TokenTree::Token(path.span, tok).into());
}
}
- attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
+ let delim_span = DelimSpan::from_single(attrs[i].span);
+ attrs[i].tokens = TokenTree::Delimited(delim_span, Delimited {
delim: token::Paren,
tts: TokenStream::concat(tokens).into(),
}).into();
use ptr::P;
use symbol::Symbol;
use ThinVec;
-use tokenstream::{TokenStream, TokenTree, Delimited};
+use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
use GLOBALS;
use std::iter;
}
tokens.push(item.node.tokens());
}
- TokenTree::Delimited(span, Delimited {
+ TokenTree::Delimited(DelimSpan::from_single(span), Delimited {
delim: token::Paren,
tts: TokenStream::concat(tokens).into(),
}).into()
use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty};
use source_map::respan;
-use syntax_pos::Span;
+use syntax_pos::{Span, DUMMY_SP};
use ext::base::ExtCtxt;
use ext::base;
use ext::build::AstBuilder;
use parse::parser::{Parser, PathStyle};
use parse::token;
use ptr::P;
-use tokenstream::{TokenStream, TokenTree};
+use tokenstream::{DelimSpan, TokenStream, TokenTree};
/// Quasiquoting works via token trees.
///
use symbol::Symbol;
use ThinVec;
- use tokenstream::{self, TokenTree, TokenStream};
+ use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
pub use parse::new_parser_from_tts;
pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
}
inner.push(self.tokens.clone());
- r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
+ let delim_span = DelimSpan::from_single(self.span);
+ r.push(TokenTree::Delimited(delim_span, tokenstream::Delimited {
delim: token::Bracket, tts: TokenStream::concat(inner).into()
}));
r
impl ToTokens for () {
fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
- vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
+ vec![TokenTree::Delimited(DelimSpan::dummy(), tokenstream::Delimited {
delim: token::Paren,
tts: TokenStream::empty().into(),
})]
let mut results = Vec::new();
let mut result = Vec::new();
+ let mut open_span = DUMMY_SP;
for tree in tts {
match tree {
- TokenTree::Token(_, token::OpenDelim(..)) => {
+ TokenTree::Token(span, token::OpenDelim(..)) => {
+ open_span = span;
results.push(::std::mem::replace(&mut result, Vec::new()));
}
TokenTree::Token(span, token::CloseDelim(delim)) => {
- let tree = TokenTree::Delimited(span, Delimited {
+ let delim_span = DelimSpan::from_pair(open_span, span);
+ let tree = TokenTree::Delimited(delim_span, Delimited {
delim,
tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
});
vec![cx.stmt_expr(e_push)]
},
TokenTree::Delimited(span, ref delimed) => {
- let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
+ let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span.open), false);
stmts.extend(statements_mk_tts(cx, delimed.stream()));
- stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
+ stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span.close), false));
stmts
}
}
use self::TokenTreeOrTokenTreeSlice::*;
use ast::Ident;
-use syntax_pos::{self, BytePos, Span};
+use syntax_pos::{self, Span};
use errors::FatalError;
use ext::tt::quoted::{self, TokenTree};
use parse::{Directory, ParseSess};
use print::pprust;
use OneVector;
use symbol::keywords;
-use tokenstream::TokenStream;
+use tokenstream::{DelimSpan, TokenStream};
use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
/// The beginning position in the source that the beginning of this matcher corresponds to. In
/// other words, the token in the source at `sp_lo` is matched against the first token of the
/// matcher.
- sp_lo: BytePos,
+ sp_lo: Span,
/// For each named metavar in the matcher, we keep track of token trees matched against the
/// metavar by the black box parser. In particular, there may be more than one match per
/// Generate the top-level matcher position in which the "dot" is before the first token of the
/// matcher `ms` and we are going to start matching at position `lo` in the source.
-fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
+fn initial_matcher_pos(ms: &[TokenTree], lo: Span) -> MatcherPos {
let match_idx_hi = count_names(ms);
let matches = create_matches(match_idx_hi);
MatcherPos {
/// token tree it was derived from.
#[derive(Debug, Clone)]
pub enum NamedMatch {
- MatchedSeq(Rc<Vec<NamedMatch>>, syntax_pos::Span),
+ MatchedSeq(Rc<Vec<NamedMatch>>, DelimSpan),
MatchedNonterminal(Rc<Nonterminal>),
}
// Add matches from this repetition to the `matches` of `up`
for idx in item.match_lo..item.match_hi {
let sub = item.matches[idx].clone();
- let span = span.with_lo(item.sp_lo);
+ let span = DelimSpan::from_pair(item.sp_lo, span);
new_pos.push_match(idx, MatchedSeq(sub, span));
}
match_cur: item.match_cur,
match_hi: item.match_cur + seq.num_captures,
up: Some(item),
- sp_lo: sp.lo(),
+ sp_lo: sp.open,
top_elts: Tt(TokenTree::Sequence(sp, seq)),
})));
}
//
// This MatcherPos instance is allocated on the stack. All others -- and
// there are frequently *no* others! -- are allocated on the heap.
- let mut initial = initial_matcher_pos(ms, parser.span.lo());
+ let mut initial = initial_matcher_pos(ms, parser.span);
let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
let mut next_items = Vec::new();
use parse::token::{self, NtTT};
use parse::token::Token::*;
use symbol::Symbol;
-use tokenstream::{TokenStream, TokenTree};
+use tokenstream::{DelimSpan, TokenStream, TokenTree};
use rustc_data_structures::fx::FxHashMap;
use std::borrow::Cow;
// ...quasiquoting this would be nice.
// These spans won't matter, anyways
let argument_gram = vec![
- quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
+ quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![
quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
num_captures: 2,
})),
// to phase into semicolon-termination instead of semicolon-separation
- quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
+ quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
separator: None,
op: quoted::KleeneOp::ZeroOrMore,
_ => false,
}
}) {
- sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+ let sp = span.entire();
+ sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
return false;
}
if !check_lhs_no_empty_seq(sess, &seq.tts) {
}
TokenTree::Delimited(span, ref delimited) => {
build_recur(sets, &delimited.tts[..]);
- first.replace_with(delimited.open_tt(span));
+ first.replace_with(delimited.open_tt(span.open));
}
TokenTree::Sequence(sp, ref seq_rep) => {
let subfirst = build_recur(sets, &seq_rep.tts[..]);
- match sets.first.entry(sp) {
+ match sets.first.entry(sp.entire()) {
Entry::Vacant(vac) => {
vac.insert(Some(subfirst.clone()));
}
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
- first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
+ first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
}
// Reverse scan: Sequence comes before `first`.
return first;
}
TokenTree::Delimited(span, ref delimited) => {
- first.add_one(delimited.open_tt(span));
+ first.add_one(delimited.open_tt(span.open));
return first;
}
TokenTree::Sequence(sp, ref seq_rep) => {
- match self.first.get(&sp) {
+ match self.first.get(&sp.entire()) {
Some(&Some(ref subfirst)) => {
// If the sequence contents can be empty, then the first
if let (Some(ref sep), true) = (seq_rep.separator.clone(),
subfirst.maybe_empty) {
- first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
+ first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
}
assert!(first.maybe_empty);
}
}
TokenTree::Delimited(span, ref d) => {
- let my_suffix = TokenSet::singleton(d.close_tt(span));
+ let my_suffix = TokenSet::singleton(d.close_tt(span.close));
check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens
last.replace_with_irrelevant();
let mut new;
let my_suffix = if let Some(ref u) = seq_rep.separator {
new = suffix_first.clone();
- new.add_one_maybe(TokenTree::Token(sp, u.clone()));
+ new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
&new
} else {
&suffix_first
use print::pprust;
use symbol::keywords;
use syntax_pos::{edition::Edition, BytePos, Span};
-use tokenstream;
+use tokenstream::{self, DelimSpan};
use {ast, attr};
use rustc_data_structures::sync::Lrc;
#[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
pub enum TokenTree {
Token(Span, token::Token),
- Delimited(Span, Lrc<Delimited>),
+ Delimited(DelimSpan, Lrc<Delimited>),
/// A kleene-style repetition sequence
- Sequence(Span, Lrc<SequenceRepetition>),
+ Sequence(DelimSpan, Lrc<SequenceRepetition>),
/// E.g. `$var`
MetaVar(Span, ast::Ident),
/// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
}
(&TokenTree::Delimited(span, ref delimed), _) => {
if index == 0 {
- return delimed.open_tt(span);
+ return delimed.open_tt(span.open);
}
if index == delimed.tts.len() + 1 {
- return delimed.close_tt(span);
+ return delimed.close_tt(span.close);
}
delimed.tts[index - 1].clone()
}
match *self {
TokenTree::Token(sp, _)
| TokenTree::MetaVar(sp, _)
- | TokenTree::MetaVarDecl(sp, _, _)
- | TokenTree::Delimited(sp, _)
- | TokenTree::Sequence(sp, _) => sp,
+ | TokenTree::MetaVarDecl(sp, _, _) => sp,
+ TokenTree::Delimited(sp, _)
+ | TokenTree::Sequence(sp, _) => sp.entire(),
}
}
}
if delimited.delim != token::Paren {
let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
let msg = format!("expected `(`, found `{}`", tok);
- sess.span_diagnostic.span_err(span, &msg);
+ sess.span_diagnostic.span_err(span.entire(), &msg);
}
// Parse the contents of the sequence itself
let sequence = parse(
let (separator, op) =
parse_sep_and_kleene_op(
trees,
- span,
+ span.entire(),
sess,
features,
attrs,
use fold::noop_fold_tt;
use parse::token::{self, Token, NtTT};
use OneVector;
-use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{TokenStream, TokenTree, Delimited};
+use syntax_pos::DUMMY_SP;
+use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::sync::Lrc;
Delimited {
forest: Lrc<quoted::Delimited>,
idx: usize,
- span: Span,
+ span: DelimSpan,
},
Sequence {
forest: Lrc<quoted::SequenceRepetition>,
impl Frame {
fn new(tts: Vec<quoted::TokenTree>) -> Frame {
let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
- Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP }
+ Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() }
}
}
&interpolations,
&repeats) {
LockstepIterSize::Unconstrained => {
- cx.span_fatal(sp, /* blame macro writer */
+ cx.span_fatal(sp.entire(), /* blame macro writer */
"attempted to repeat an expression \
containing no syntax \
variables matched as repeating at this depth");
}
LockstepIterSize::Contradiction(ref msg) => {
// FIXME #2887 blame macro invoker instead
- cx.span_fatal(sp, &msg[..]);
+ cx.span_fatal(sp.entire(), &msg[..]);
}
LockstepIterSize::Constraint(len, _) => {
if len == 0 {
if seq.op == quoted::KleeneOp::OneOrMore {
// FIXME #2887 blame invoker
- cx.span_fatal(sp, "this must repeat at least once");
+ cx.span_fatal(sp.entire(), "this must repeat at least once");
}
} else {
repeats.push((0, len));
match tt {
TokenTree::Token(span, tok) =>
TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
- TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited {
- tts: fld.fold_tts(delimed.stream()).into(),
- delim: delimed.delim,
- }),
+ TokenTree::Delimited(span, delimed) => TokenTree::Delimited(
+ DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
+ Delimited {
+ tts: fld.fold_tts(delimed.stream()).into(),
+ delim: delimed.delim,
+ }
+ ),
}
}
use print::pprust::token_to_string;
use parse::lexer::StringReader;
use parse::{token, PResult};
-use tokenstream::{Delimited, TokenStream, TokenTree};
+use tokenstream::{Delimited, DelimSpan, TokenStream, TokenTree};
impl<'a> StringReader<'a> {
// Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
let tts = self.parse_token_trees_until_close_delim();
// Expand to cover the entire delimited token tree
- let span = pre_span.with_hi(self.span.hi());
+ let delim_span = DelimSpan::from_pair(pre_span, self.span);
match self.token {
// Correct delimiter.
_ => {}
}
- Ok(TokenTree::Delimited(span, Delimited {
+ Ok(TokenTree::Delimited(delim_span, Delimited {
delim,
tts: tts.into(),
}).into())
use attr::first_attr_value_str_by_name;
use parse;
use print::pprust::item_to_string;
- use tokenstream::{self, TokenTree};
+ use tokenstream::{self, DelimSpan, TokenTree};
use util::parser_testing::string_to_stream;
use util::parser_testing::{string_to_expr, string_to_item};
use with_globals;
TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
TokenTree::Delimited(
- sp(5, 14),
+ DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
tokenstream::Delimited {
delim: token::DelimToken::Paren,
tts: TokenStream::concat(vec![
]).into(),
}).into(),
TokenTree::Delimited(
- sp(15, 21),
+ DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
tokenstream::Delimited {
delim: token::DelimToken::Brace,
tts: TokenStream::concat(vec![
use ptr::P;
use parse::PResult;
use ThinVec;
-use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream};
+use tokenstream::{self, Delimited, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
use symbol::{Symbol, keywords};
use std::borrow::Cow;
#[derive(Clone)]
struct TokenCursorFrame {
delim: token::DelimToken,
- span: Span,
+ span: DelimSpan,
open_delim: bool,
tree_cursor: tokenstream::Cursor,
close_delim: bool,
}
impl TokenCursorFrame {
- fn new(sp: Span, delimited: &Delimited) -> Self {
+ fn new(sp: DelimSpan, delimited: &Delimited) -> Self {
TokenCursorFrame {
delim: delimited.delim,
span: sp,
let tree = if !self.frame.open_delim {
self.frame.open_delim = true;
Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
- .open_tt(self.frame.span)
+ .open_tt(self.frame.span.open)
} else if let Some(tree) = self.frame.tree_cursor.next() {
tree
} else if !self.frame.close_delim {
self.frame.close_delim = true;
Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
- .close_tt(self.frame.span)
+ .close_tt(self.frame.span.close)
} else if let Some(frame) = self.stack.pop() {
self.frame = frame;
continue
num_of_hashes = cmp::max(num_of_hashes, count);
}
- let body = TokenTree::Delimited(sp, Delimited {
+ let delim_span = DelimSpan::from_single(sp);
+ let body = TokenTree::Delimited(delim_span, Delimited {
delim: token::Bracket,
tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
TokenTree::Token(sp, token::Eq),
.iter().cloned().collect::<TokenStream>().into(),
});
- self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
+ self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(delim_span, &Delimited {
delim: token::NoDelim,
tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
[TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
root_module_name: None,
expected_tokens: Vec::new(),
token_cursor: TokenCursor {
- frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
+ frame: TokenCursorFrame::new(DelimSpan::dummy(), &Delimited {
delim: token::NoDelim,
tts: tokens.into(),
}),
}
match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
- Some(TokenTree::Token(span, _)) | Some(TokenTree::Delimited(span, _)) => span,
+ Some(TokenTree::Token(span, _)) => span,
+ Some(TokenTree::Delimited(span, _)) => span.entire(),
None => self.look_ahead_span(dist - 1),
}
}
token::OpenDelim(..) => {
let frame = mem::replace(&mut self.token_cursor.frame,
self.token_cursor.stack.pop().unwrap());
- self.span = frame.span;
+ self.span = frame.span.entire();
self.bump();
TokenTree::Delimited(frame.span, Delimited {
delim: frame.delim,
use syntax::parse::parse_stream_from_source_str;
use syntax_pos::{self, Span, FileName};
use syntax_pos::symbol::{self, Symbol};
-use tokenstream::{TokenStream, TokenTree};
-use tokenstream;
+use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
use std::{cmp, fmt};
use std::mem;
// that it encompasses more than each token, but it hopefully is "good
// enough" for now at least.
builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
- builder.push(tokenstream::TokenTree::Delimited(attr.span, tokens));
+ let delim_span = DelimSpan::from_single(attr.span);
+ builder.push(tokenstream::TokenTree::Delimited(delim_span, tokens));
}
builder.push(tokens.clone());
Some(builder.build())
//! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
//! ownership of the original.
-use syntax_pos::{BytePos, Span, DUMMY_SP};
+use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
use ext::base;
use ext::tt::{macro_parser, quoted};
use parse::Directory;
/// A single token
Token(Span, token::Token),
/// A delimited sequence of token trees
- Delimited(Span, Delimited),
+ Delimited(DelimSpan, Delimited),
}
impl TokenTree {
/// Retrieve the TokenTree's span.
pub fn span(&self) -> Span {
match *self {
- TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp,
+ TokenTree::Token(sp, _) => sp,
+ TokenTree::Delimited(sp, _) => sp.entire(),
}
}
/// Modify the `TokenTree`'s span inplace.
pub fn set_span(&mut self, span: Span) {
match *self {
- TokenTree::Token(ref mut sp, _) | TokenTree::Delimited(ref mut sp, _) => {
- *sp = span;
- }
+ TokenTree::Token(ref mut sp, _) => *sp = span,
+ TokenTree::Delimited(ref mut sp, _) => *sp = DelimSpan::from_single(span),
}
}
let mut iter = slice.iter().enumerate().peekable();
while let Some((pos, ts)) = iter.next() {
if let Some((_, next)) = iter.peek() {
- match (ts, next) {
- (TokenStream {
- kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))
- }, _) |
- (_, TokenStream {
- kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))
- }) => {}
- (TokenStream {
- kind: TokenStreamKind::Tree(TokenTree::Token(sp, _))
- }, _) |
- (TokenStream {
- kind: TokenStreamKind::Tree(TokenTree::Delimited(sp, _))
- }, _) => {
- let sp = sp.shrink_to_hi();
- let comma = TokenStream {
- kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
- };
- suggestion = Some((pos, comma, sp));
+ let sp = match (&ts.kind, &next.kind) {
+ (TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma)), _) |
+ (_, TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))) => {
+ continue;
}
- _ => {}
- }
+ (TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
+ (TokenStreamKind::Tree(TokenTree::Delimited(sp, _)), _) => sp.entire(),
+ _ => continue,
+ };
+ let sp = sp.shrink_to_hi();
+ let comma = TokenStream {
+ kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
+ };
+ suggestion = Some((pos, comma, sp));
}
}
if let Some((pos, comma, sp)) = suggestion {
}
}
+#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
+pub struct DelimSpan {
+ pub open: Span,
+ pub close: Span,
+}
+
+impl DelimSpan {
+ pub fn from_single(sp: Span) -> Self {
+ DelimSpan {
+ open: sp,
+ close: sp,
+ }
+ }
+
+ pub fn from_pair(open: Span, close: Span) -> Self {
+ DelimSpan { open, close }
+ }
+
+ pub fn dummy() -> Self {
+ Self::from_single(DUMMY_SP)
+ }
+
+ pub fn entire(self) -> Span {
+ self.open.with_hi(self.close.hi())
+ }
+
+ pub fn apply_mark(self, mark: Mark) -> Self {
+ DelimSpan {
+ open: self.open.apply_mark(mark),
+ close: self.close.apply_mark(mark),
+ }
+ }
+}
+
#[cfg(test)]
mod tests {
use super::*;
_ => unreachable!(),
}
}).collect();
- let arm = cx.arm(seq_sp, pats, cx.expr_bool(seq_sp, true));
+ let span = seq_sp.entire();
+ let arm = cx.arm(span, pats, cx.expr_bool(span, true));
quote_expr!(cx,
match $matched_expr {