]> git.lizzy.rs Git - rust.git/commitdiff
Track distinct spans for open and close delimiter
authorDavid Tolnay <dtolnay@gmail.com>
Sun, 9 Sep 2018 01:07:02 +0000 (18:07 -0700)
committerDavid Tolnay <dtolnay@gmail.com>
Sun, 9 Sep 2018 02:01:48 +0000 (19:01 -0700)
17 files changed:
src/libproc_macro/lib.rs
src/libproc_macro/rustc.rs
src/librustc/ich/hcx.rs
src/librustc_resolve/macros.rs
src/libsyntax/attr/mod.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/tokenstream.rs
src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs

index 8ed7b8f52babaceb603390b7a041d13e2b9929ea..d4737052875b6f75840743be10d44e93c1943b3e 100644 (file)
@@ -63,8 +63,8 @@
 use syntax::errors::DiagnosticBuilder;
 use syntax::parse::{self, token};
 use syntax::symbol::Symbol;
-use syntax::tokenstream;
-use syntax_pos::{BytePos, Pos, FileName};
+use syntax::tokenstream::{self, DelimSpan};
+use syntax_pos::{Pos, FileName};
 
 /// The main type provided by this crate, representing an abstract stream of
 /// tokens, or, more specifically, a sequence of token trees.
@@ -609,7 +609,7 @@ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
 pub struct Group {
     delimiter: Delimiter,
     stream: TokenStream,
-    span: Span,
+    span: DelimSpan,
 }
 
 #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
@@ -650,7 +650,7 @@ pub fn new(delimiter: Delimiter, stream: TokenStream) -> Group {
         Group {
             delimiter: delimiter,
             stream: stream,
-            span: Span::call_site(),
+            span: DelimSpan::from_single(Span::call_site().0),
         }
     }
 
@@ -678,11 +678,10 @@ pub fn stream(&self) -> TokenStream {
     /// ```
     #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
     pub fn span(&self) -> Span {
-        self.span
+        Span(self.span.entire())
     }
 
-    /// Returns the span pointing to the opening delimiter of this group, or the
-    /// span of the entire group if this is a None-delimited group.
+    /// Returns the span pointing to the opening delimiter of this group.
     ///
     /// ```text
     /// pub fn span_open(&self) -> Span {
@@ -690,17 +689,10 @@ pub fn span(&self) -> Span {
     /// ```
     #[unstable(feature = "proc_macro_span", issue = "38356")]
     pub fn span_open(&self) -> Span {
-        if self.delimiter == Delimiter::None {
-            self.span
-        } else {
-            let lo = self.span.0.lo();
-            let new_hi = BytePos::from_usize(lo.to_usize() + 1);
-            Span(self.span.0.with_hi(new_hi))
-        }
+        Span(self.span.open)
     }
 
-    /// Returns the span pointing to the closing delimiter of this group, or the
-    /// span of the entire group if this is a None-delimited group.
+    /// Returns the span pointing to the closing delimiter of this group.
     ///
     /// ```text
     /// pub fn span_close(&self) -> Span {
@@ -708,13 +700,7 @@ pub fn span_open(&self) -> Span {
     /// ```
     #[unstable(feature = "proc_macro_span", issue = "38356")]
     pub fn span_close(&self) -> Span {
-        let hi = self.span.0.hi();
-        if self.delimiter == Delimiter::None || hi.to_usize() == 0 {
-            self.span
-        } else {
-            let new_lo = BytePos::from_usize(hi.to_usize() - 1);
-            Span(self.span.0.with_lo(new_lo))
-        }
+        Span(self.span.close)
     }
 
     /// Configures the span for this `Group`'s delimiters, but not its internal
@@ -725,7 +711,7 @@ pub fn span_close(&self) -> Span {
     /// tokens at the level of the `Group`.
     #[stable(feature = "proc_macro_lib2", since = "1.29.0")]
     pub fn set_span(&mut self, span: Span) {
-        self.span = span;
+        self.span = DelimSpan::from_single(span.0);
     }
 }
 
index 21229d3299d7e5a7251e7e52252aec82900539eb..3ce02d1afb10bf60089a65413463d4ac69930304 100644 (file)
@@ -64,7 +64,7 @@ pub(crate) fn from_internal(
             tokenstream::TokenTree::Delimited(span, delimed) => {
                 let delimiter = Delimiter::from_internal(delimed.delim);
                 let mut g = Group::new(delimiter, ::TokenStream(delimed.tts.into()));
-                g.set_span(Span(span));
+                g.span = span;
                 return g.into();
             }
         };
@@ -192,7 +192,7 @@ pub(crate) fn to_internal(self) -> tokenstream::TokenStream {
             self::TokenTree::Punct(tt) => (tt.as_char(), tt.spacing(), tt.span()),
             self::TokenTree::Group(tt) => {
                 return TokenTree::Delimited(
-                    tt.span.0,
+                    tt.span,
                     Delimited {
                         delim: tt.delimiter.to_internal(),
                         tts: tt.stream.0.into(),
index 371f631737c9871001536c5f4e964f2f0a012736..ed4a1e3d72f441eadae187912087882bb1a2a5ed 100644 (file)
@@ -28,6 +28,7 @@
 use syntax::source_map::SourceMap;
 use syntax::ext::hygiene::SyntaxContext;
 use syntax::symbol::Symbol;
+use syntax::tokenstream::DelimSpan;
 use syntax_pos::{Span, DUMMY_SP};
 use syntax_pos::hygiene;
 
@@ -396,6 +397,17 @@ fn hash_stable<W: StableHasherResult>(&self,
     }
 }
 
+impl<'a> HashStable<StableHashingContext<'a>> for DelimSpan {
+    fn hash_stable<W: StableHasherResult>(
+        &self,
+        hcx: &mut StableHashingContext<'a>,
+        hasher: &mut StableHasher<W>,
+    ) {
+        self.open.hash_stable(hcx, hasher);
+        self.close.hash_stable(hcx, hasher);
+    }
+}
+
 pub fn hash_stable_trait_impls<'a, 'gcx, W, R>(
     hcx: &mut StableHashingContext<'a>,
     hasher: &mut StableHasher<W>,
index 879b4ea3fe579f8568aeb1b685af9760e46e932e..032ec8de2b791ed46161c846b93fa6e8c712042a 100644 (file)
@@ -35,7 +35,7 @@
 use syntax::parse::token::{self, Token};
 use syntax::ptr::P;
 use syntax::symbol::{Symbol, keywords};
-use syntax::tokenstream::{TokenStream, TokenTree, Delimited};
+use syntax::tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
 use syntax::util::lev_distance::find_best_match_for_name;
 use syntax_pos::{Span, DUMMY_SP};
 use errors::Applicability;
@@ -279,7 +279,8 @@ fn find_legacy_attr_invoc(&mut self, attrs: &mut Vec<ast::Attribute>, allow_deri
                                 tokens.push(TokenTree::Token(path.span, tok).into());
                             }
                         }
-                        attrs[i].tokens = TokenTree::Delimited(attrs[i].span, Delimited {
+                        let delim_span = DelimSpan::from_single(attrs[i].span);
+                        attrs[i].tokens = TokenTree::Delimited(delim_span, Delimited {
                             delim: token::Paren,
                             tts: TokenStream::concat(tokens).into(),
                         }).into();
index 19bbbceff5fc02f3d6ef9d8e5f7d8421efc3b0f5..a980f3ab51584d78f35ada6e74061b0ff850dc01 100644 (file)
@@ -34,7 +34,7 @@
 use ptr::P;
 use symbol::Symbol;
 use ThinVec;
-use tokenstream::{TokenStream, TokenTree, Delimited};
+use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
 use GLOBALS;
 
 use std::iter;
@@ -535,7 +535,7 @@ pub fn tokens(&self, span: Span) -> TokenStream {
                     }
                     tokens.push(item.node.tokens());
                 }
-                TokenTree::Delimited(span, Delimited {
+                TokenTree::Delimited(DelimSpan::from_single(span), Delimited {
                     delim: token::Paren,
                     tts: TokenStream::concat(tokens).into(),
                 }).into()
index 13a139deea4c31f7205cb183192bdf2164ae4d62..62bc9fae3b59af13855894295903953da83f643c 100644 (file)
 
 use ast::{self, Arg, Arm, Block, Expr, Item, Pat, Stmt, Ty};
 use source_map::respan;
-use syntax_pos::Span;
+use syntax_pos::{Span, DUMMY_SP};
 use ext::base::ExtCtxt;
 use ext::base;
 use ext::build::AstBuilder;
 use parse::parser::{Parser, PathStyle};
 use parse::token;
 use ptr::P;
-use tokenstream::{TokenStream, TokenTree};
+use tokenstream::{DelimSpan, TokenStream, TokenTree};
 
 /// Quasiquoting works via token trees.
 ///
@@ -36,7 +36,7 @@ pub mod rt {
     use symbol::Symbol;
     use ThinVec;
 
-    use tokenstream::{self, TokenTree, TokenStream};
+    use tokenstream::{self, DelimSpan, TokenTree, TokenStream};
 
     pub use parse::new_parser_from_tts;
     pub use syntax_pos::{BytePos, Span, DUMMY_SP, FileName};
@@ -245,7 +245,8 @@ fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
             }
             inner.push(self.tokens.clone());
 
-            r.push(TokenTree::Delimited(self.span, tokenstream::Delimited {
+            let delim_span = DelimSpan::from_single(self.span);
+            r.push(TokenTree::Delimited(delim_span, tokenstream::Delimited {
                 delim: token::Bracket, tts: TokenStream::concat(inner).into()
             }));
             r
@@ -261,7 +262,7 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
 
     impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
-            vec![TokenTree::Delimited(DUMMY_SP, tokenstream::Delimited {
+            vec![TokenTree::Delimited(DelimSpan::dummy(), tokenstream::Delimited {
                 delim: token::Paren,
                 tts: TokenStream::empty().into(),
             })]
@@ -385,13 +386,16 @@ pub fn unflatten(tts: Vec<TokenTree>) -> Vec<TokenTree> {
 
     let mut results = Vec::new();
     let mut result = Vec::new();
+    let mut open_span = DUMMY_SP;
     for tree in tts {
         match tree {
-            TokenTree::Token(_, token::OpenDelim(..)) => {
+            TokenTree::Token(span, token::OpenDelim(..)) => {
+                open_span = span;
                 results.push(::std::mem::replace(&mut result, Vec::new()));
             }
             TokenTree::Token(span, token::CloseDelim(delim)) => {
-                let tree = TokenTree::Delimited(span, Delimited {
+                let delim_span = DelimSpan::from_pair(open_span, span);
+                let tree = TokenTree::Delimited(delim_span, Delimited {
                     delim,
                     tts: result.into_iter().map(TokenStream::from).collect::<TokenStream>().into(),
                 });
@@ -756,9 +760,9 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, quoted: bool) -> Vec<ast::Stmt
             vec![cx.stmt_expr(e_push)]
         },
         TokenTree::Delimited(span, ref delimed) => {
-            let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span), false);
+            let mut stmts = statements_mk_tt(cx, &delimed.open_tt(span.open), false);
             stmts.extend(statements_mk_tts(cx, delimed.stream()));
-            stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span), false));
+            stmts.extend(statements_mk_tt(cx, &delimed.close_tt(span.close), false));
             stmts
         }
     }
index c962e7fcbb4cfce003da8e7f43c0b8eae534cfab..2ef8da3f6d80c3b55bf307d4bb1f11885cccac3d 100644 (file)
@@ -85,7 +85,7 @@
 use self::TokenTreeOrTokenTreeSlice::*;
 
 use ast::Ident;
-use syntax_pos::{self, BytePos, Span};
+use syntax_pos::{self, Span};
 use errors::FatalError;
 use ext::tt::quoted::{self, TokenTree};
 use parse::{Directory, ParseSess};
@@ -94,7 +94,7 @@
 use print::pprust;
 use OneVector;
 use symbol::keywords;
-use tokenstream::TokenStream;
+use tokenstream::{DelimSpan, TokenStream};
 
 use rustc_data_structures::fx::FxHashMap;
 use std::collections::hash_map::Entry::{Occupied, Vacant};
@@ -154,7 +154,7 @@ struct MatcherPos<'a> {
     /// The beginning position in the source that the beginning of this matcher corresponds to. In
     /// other words, the token in the source at `sp_lo` is matched against the first token of the
     /// matcher.
-    sp_lo: BytePos,
+    sp_lo: Span,
 
     /// For each named metavar in the matcher, we keep track of token trees matched against the
     /// metavar by the black box parser. In particular, there may be more than one match per
@@ -285,7 +285,7 @@ fn create_matches(len: usize) -> Vec<Rc<Vec<NamedMatch>>> {
 
 /// Generate the top-level matcher position in which the "dot" is before the first token of the
 /// matcher `ms` and we are going to start matching at position `lo` in the source.
-fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
+fn initial_matcher_pos(ms: &[TokenTree], lo: Span) -> MatcherPos {
     let match_idx_hi = count_names(ms);
     let matches = create_matches(match_idx_hi);
     MatcherPos {
@@ -332,7 +332,7 @@ fn initial_matcher_pos(ms: &[TokenTree], lo: BytePos) -> MatcherPos {
 /// token tree it was derived from.
 #[derive(Debug, Clone)]
 pub enum NamedMatch {
-    MatchedSeq(Rc<Vec<NamedMatch>>, syntax_pos::Span),
+    MatchedSeq(Rc<Vec<NamedMatch>>, DelimSpan),
     MatchedNonterminal(Rc<Nonterminal>),
 }
 
@@ -488,7 +488,7 @@ fn inner_parse_loop<'a>(
                     // Add matches from this repetition to the `matches` of `up`
                     for idx in item.match_lo..item.match_hi {
                         let sub = item.matches[idx].clone();
-                        let span = span.with_lo(item.sp_lo);
+                        let span = DelimSpan::from_pair(item.sp_lo, span);
                         new_pos.push_match(idx, MatchedSeq(sub, span));
                     }
 
@@ -556,7 +556,7 @@ fn inner_parse_loop<'a>(
                         match_cur: item.match_cur,
                         match_hi: item.match_cur + seq.num_captures,
                         up: Some(item),
-                        sp_lo: sp.lo(),
+                        sp_lo: sp.open,
                         top_elts: Tt(TokenTree::Sequence(sp, seq)),
                     })));
                 }
@@ -643,7 +643,7 @@ pub fn parse(
     //
     // This MatcherPos instance is allocated on the stack. All others -- and
     // there are frequently *no* others! -- are allocated on the heap.
-    let mut initial = initial_matcher_pos(ms, parser.span.lo());
+    let mut initial = initial_matcher_pos(ms, parser.span);
     let mut cur_items = smallvec![MatcherPosHandle::Ref(&mut initial)];
     let mut next_items = Vec::new();
 
index d09127d6b08b7a303ffd729c22727f21b798ea0f..86247745c4116b6eedb3d1cdda9f38aadf05fca0 100644 (file)
@@ -25,7 +25,7 @@
 use parse::token::{self, NtTT};
 use parse::token::Token::*;
 use symbol::Symbol;
-use tokenstream::{TokenStream, TokenTree};
+use tokenstream::{DelimSpan, TokenStream, TokenTree};
 
 use rustc_data_structures::fx::FxHashMap;
 use std::borrow::Cow;
@@ -226,7 +226,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
     // ...quasiquoting this would be nice.
     // These spans won't matter, anyways
     let argument_gram = vec![
-        quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
+        quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![
                 quoted::TokenTree::MetaVarDecl(DUMMY_SP, lhs_nm, ast::Ident::from_str("tt")),
                 quoted::TokenTree::Token(DUMMY_SP, token::FatArrow),
@@ -237,7 +237,7 @@ pub fn compile(sess: &ParseSess, features: &Features, def: &ast::Item, edition:
             num_captures: 2,
         })),
         // to phase into semicolon-termination instead of semicolon-separation
-        quoted::TokenTree::Sequence(DUMMY_SP, Lrc::new(quoted::SequenceRepetition {
+        quoted::TokenTree::Sequence(DelimSpan::dummy(), Lrc::new(quoted::SequenceRepetition {
             tts: vec![quoted::TokenTree::Token(DUMMY_SP, token::Semi)],
             separator: None,
             op: quoted::KleeneOp::ZeroOrMore,
@@ -400,7 +400,8 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
                         _ => false,
                     }
                 }) {
-                    sess.span_diagnostic.span_err(span, "repetition matches empty token tree");
+                    let sp = span.entire();
+                    sess.span_diagnostic.span_err(sp, "repetition matches empty token tree");
                     return false;
                 }
                 if !check_lhs_no_empty_seq(sess, &seq.tts) {
@@ -474,12 +475,12 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
                     }
                     TokenTree::Delimited(span, ref delimited) => {
                         build_recur(sets, &delimited.tts[..]);
-                        first.replace_with(delimited.open_tt(span));
+                        first.replace_with(delimited.open_tt(span.open));
                     }
                     TokenTree::Sequence(sp, ref seq_rep) => {
                         let subfirst = build_recur(sets, &seq_rep.tts[..]);
 
-                        match sets.first.entry(sp) {
+                        match sets.first.entry(sp.entire()) {
                             Entry::Vacant(vac) => {
                                 vac.insert(Some(subfirst.clone()));
                             }
@@ -499,7 +500,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
 
                         if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                         subfirst.maybe_empty) {
-                            first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
+                            first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
                         }
 
                         // Reverse scan: Sequence comes before `first`.
@@ -534,11 +535,11 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
                     return first;
                 }
                 TokenTree::Delimited(span, ref delimited) => {
-                    first.add_one(delimited.open_tt(span));
+                    first.add_one(delimited.open_tt(span.open));
                     return first;
                 }
                 TokenTree::Sequence(sp, ref seq_rep) => {
-                    match self.first.get(&sp) {
+                    match self.first.get(&sp.entire()) {
                         Some(&Some(ref subfirst)) => {
 
                             // If the sequence contents can be empty, then the first
@@ -546,7 +547,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
 
                             if let (Some(ref sep), true) = (seq_rep.separator.clone(),
                                                             subfirst.maybe_empty) {
-                                first.add_one_maybe(TokenTree::Token(sp, sep.clone()));
+                                first.add_one_maybe(TokenTree::Token(sp.entire(), sep.clone()));
                             }
 
                             assert!(first.maybe_empty);
@@ -727,7 +728,7 @@ fn check_matcher_core(sess: &ParseSess,
                 }
             }
             TokenTree::Delimited(span, ref d) => {
-                let my_suffix = TokenSet::singleton(d.close_tt(span));
+                let my_suffix = TokenSet::singleton(d.close_tt(span.close));
                 check_matcher_core(sess, features, attrs, first_sets, &d.tts, &my_suffix);
                 // don't track non NT tokens
                 last.replace_with_irrelevant();
@@ -751,7 +752,7 @@ fn check_matcher_core(sess: &ParseSess,
                 let mut new;
                 let my_suffix = if let Some(ref u) = seq_rep.separator {
                     new = suffix_first.clone();
-                    new.add_one_maybe(TokenTree::Token(sp, u.clone()));
+                    new.add_one_maybe(TokenTree::Token(sp.entire(), u.clone()));
                     &new
                 } else {
                     &suffix_first
index cc635a29275fabaae849e8b23bb0c44b469add54..74363f3e5f7d968931a36fccfcd8489fa1dd8c6c 100644 (file)
@@ -16,7 +16,7 @@
 use print::pprust;
 use symbol::keywords;
 use syntax_pos::{edition::Edition, BytePos, Span};
-use tokenstream;
+use tokenstream::{self, DelimSpan};
 use {ast, attr};
 
 use rustc_data_structures::sync::Lrc;
@@ -90,9 +90,9 @@ pub enum KleeneOp {
 #[derive(Debug, Clone, PartialEq, RustcEncodable, RustcDecodable)]
 pub enum TokenTree {
     Token(Span, token::Token),
-    Delimited(Span, Lrc<Delimited>),
+    Delimited(DelimSpan, Lrc<Delimited>),
     /// A kleene-style repetition sequence
-    Sequence(Span, Lrc<SequenceRepetition>),
+    Sequence(DelimSpan, Lrc<SequenceRepetition>),
     /// E.g. `$var`
     MetaVar(Span, ast::Ident),
     /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
@@ -137,10 +137,10 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
             }
             (&TokenTree::Delimited(span, ref delimed), _) => {
                 if index == 0 {
-                    return delimed.open_tt(span);
+                    return delimed.open_tt(span.open);
                 }
                 if index == delimed.tts.len() + 1 {
-                    return delimed.close_tt(span);
+                    return delimed.close_tt(span.close);
                 }
                 delimed.tts[index - 1].clone()
             }
@@ -154,9 +154,9 @@ pub fn span(&self) -> Span {
         match *self {
             TokenTree::Token(sp, _)
             | TokenTree::MetaVar(sp, _)
-            | TokenTree::MetaVarDecl(sp, _, _)
-            TokenTree::Delimited(sp, _)
-            | TokenTree::Sequence(sp, _) => sp,
+            | TokenTree::MetaVarDecl(sp, _, _) => sp,
+            TokenTree::Delimited(sp, _)
+            | TokenTree::Sequence(sp, _) => sp.entire(),
         }
     }
 }
@@ -286,7 +286,7 @@ fn parse_tree<I>(
                 if delimited.delim != token::Paren {
                     let tok = pprust::token_to_string(&token::OpenDelim(delimited.delim));
                     let msg = format!("expected `(`, found `{}`", tok);
-                    sess.span_diagnostic.span_err(span, &msg);
+                    sess.span_diagnostic.span_err(span.entire(), &msg);
                 }
                 // Parse the contents of the sequence itself
                 let sequence = parse(
@@ -302,7 +302,7 @@ fn parse_tree<I>(
                 let (separator, op) =
                     parse_sep_and_kleene_op(
                         trees,
-                        span,
+                        span.entire(),
                         sess,
                         features,
                         attrs,
index 549e5f00dcec4cbe8cc7478f673140fadc4f0aa6..2ed469e8e77f855ce868b12a92786b0f67b09e54 100644 (file)
@@ -16,8 +16,8 @@
 use fold::noop_fold_tt;
 use parse::token::{self, Token, NtTT};
 use OneVector;
-use syntax_pos::{Span, DUMMY_SP};
-use tokenstream::{TokenStream, TokenTree, Delimited};
+use syntax_pos::DUMMY_SP;
+use tokenstream::{TokenStream, TokenTree, Delimited, DelimSpan};
 
 use rustc_data_structures::fx::FxHashMap;
 use rustc_data_structures::sync::Lrc;
@@ -30,7 +30,7 @@ enum Frame {
     Delimited {
         forest: Lrc<quoted::Delimited>,
         idx: usize,
-        span: Span,
+        span: DelimSpan,
     },
     Sequence {
         forest: Lrc<quoted::SequenceRepetition>,
@@ -42,7 +42,7 @@ enum Frame {
 impl Frame {
     fn new(tts: Vec<quoted::TokenTree>) -> Frame {
         let forest = Lrc::new(quoted::Delimited { delim: token::NoDelim, tts: tts });
-        Frame::Delimited { forest: forest, idx: 0, span: DUMMY_SP }
+        Frame::Delimited { forest: forest, idx: 0, span: DelimSpan::dummy() }
     }
 }
 
@@ -123,20 +123,20 @@ pub fn transcribe(cx: &ExtCtxt,
                                          &interpolations,
                                          &repeats) {
                     LockstepIterSize::Unconstrained => {
-                        cx.span_fatal(sp, /* blame macro writer */
+                        cx.span_fatal(sp.entire(), /* blame macro writer */
                             "attempted to repeat an expression \
                              containing no syntax \
                              variables matched as repeating at this depth");
                     }
                     LockstepIterSize::Contradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        cx.span_fatal(sp, &msg[..]);
+                        cx.span_fatal(sp.entire(), &msg[..]);
                     }
                     LockstepIterSize::Constraint(len, _) => {
                         if len == 0 {
                             if seq.op == quoted::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
-                                cx.span_fatal(sp, "this must repeat at least once");
+                                cx.span_fatal(sp.entire(), "this must repeat at least once");
                             }
                         } else {
                             repeats.push((0, len));
index dff408d233977b559c99dce845735fba927c8956..032393b4f12534e3727808531de87e5bfd9a8999 100644 (file)
@@ -594,10 +594,13 @@ pub fn noop_fold_tt<T: Folder>(tt: TokenTree, fld: &mut T) -> TokenTree {
     match tt {
         TokenTree::Token(span, tok) =>
             TokenTree::Token(fld.new_span(span), fld.fold_token(tok)),
-        TokenTree::Delimited(span, delimed) => TokenTree::Delimited(fld.new_span(span), Delimited {
-            tts: fld.fold_tts(delimed.stream()).into(),
-            delim: delimed.delim,
-        }),
+        TokenTree::Delimited(span, delimed) => TokenTree::Delimited(
+            DelimSpan::from_pair(fld.new_span(span.open), fld.new_span(span.close)),
+            Delimited {
+                tts: fld.fold_tts(delimed.stream()).into(),
+                delim: delimed.delim,
+            }
+        ),
     }
 }
 
index e2fd7faf90387e856007ccc7fe8368409ff15044..d19748937e108fc5ce4635098efd0e17b65234fa 100644 (file)
@@ -11,7 +11,7 @@
 use print::pprust::token_to_string;
 use parse::lexer::StringReader;
 use parse::{token, PResult};
-use tokenstream::{Delimited, TokenStream, TokenTree};
+use tokenstream::{Delimited, DelimSpan, TokenStream, TokenTree};
 
 impl<'a> StringReader<'a> {
     // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
@@ -68,7 +68,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
                 let tts = self.parse_token_trees_until_close_delim();
 
                 // Expand to cover the entire delimited token tree
-                let span = pre_span.with_hi(self.span.hi());
+                let delim_span = DelimSpan::from_pair(pre_span, self.span);
 
                 match self.token {
                     // Correct delimiter.
@@ -119,7 +119,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenStream> {
                     _ => {}
                 }
 
-                Ok(TokenTree::Delimited(span, Delimited {
+                Ok(TokenTree::Delimited(delim_span, Delimited {
                     delim,
                     tts: tts.into(),
                 }).into())
index 28d63399b4461f3c024a30820fb3a43ff2d3b757..5c6d5816a472b045acbdedef6395fbd5e94f065f 100644 (file)
@@ -722,7 +722,7 @@ mod tests {
     use attr::first_attr_value_str_by_name;
     use parse;
     use print::pprust::item_to_string;
-    use tokenstream::{self, TokenTree};
+    use tokenstream::{self, DelimSpan, TokenTree};
     use util::parser_testing::string_to_stream;
     use util::parser_testing::{string_to_expr, string_to_item};
     use with_globals;
@@ -805,7 +805,7 @@ fn string_to_tts_1() {
                 TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"), false)).into(),
                 TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"), false)).into(),
                 TokenTree::Delimited(
-                    sp(5, 14),
+                    DelimSpan::from_pair(sp(5, 6), sp(13, 14)),
                     tokenstream::Delimited {
                         delim: token::DelimToken::Paren,
                         tts: TokenStream::concat(vec![
@@ -817,7 +817,7 @@ fn string_to_tts_1() {
                         ]).into(),
                     }).into(),
                 TokenTree::Delimited(
-                    sp(15, 21),
+                    DelimSpan::from_pair(sp(15, 16), sp(20, 21)),
                     tokenstream::Delimited {
                         delim: token::DelimToken::Brace,
                         tts: TokenStream::concat(vec![
index c741bde7c5f24f7d1b4dee695d908ad3f028c4b4..b9d4e9fc268d849e4a8f3316cdd0bf2be80e1f9d 100644 (file)
@@ -54,7 +54,7 @@
 use ptr::P;
 use parse::PResult;
 use ThinVec;
-use tokenstream::{self, Delimited, ThinTokenStream, TokenTree, TokenStream};
+use tokenstream::{self, Delimited, DelimSpan, ThinTokenStream, TokenTree, TokenStream};
 use symbol::{Symbol, keywords};
 
 use std::borrow::Cow;
@@ -262,7 +262,7 @@ struct TokenCursor {
 #[derive(Clone)]
 struct TokenCursorFrame {
     delim: token::DelimToken,
-    span: Span,
+    span: DelimSpan,
     open_delim: bool,
     tree_cursor: tokenstream::Cursor,
     close_delim: bool,
@@ -293,7 +293,7 @@ enum LastToken {
 }
 
 impl TokenCursorFrame {
-    fn new(sp: Span, delimited: &Delimited) -> Self {
+    fn new(sp: DelimSpan, delimited: &Delimited) -> Self {
         TokenCursorFrame {
             delim: delimited.delim,
             span: sp,
@@ -311,13 +311,13 @@ fn next(&mut self) -> TokenAndSpan {
             let tree = if !self.frame.open_delim {
                 self.frame.open_delim = true;
                 Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
-                    .open_tt(self.frame.span)
+                    .open_tt(self.frame.span.open)
             } else if let Some(tree) = self.frame.tree_cursor.next() {
                 tree
             } else if !self.frame.close_delim {
                 self.frame.close_delim = true;
                 Delimited { delim: self.frame.delim, tts: TokenStream::empty().into() }
-                    .close_tt(self.frame.span)
+                    .close_tt(self.frame.span.close)
             } else if let Some(frame) = self.stack.pop() {
                 self.frame = frame;
                 continue
@@ -361,7 +361,8 @@ fn next_desugared(&mut self) -> TokenAndSpan {
             num_of_hashes = cmp::max(num_of_hashes, count);
         }
 
-        let body = TokenTree::Delimited(sp, Delimited {
+        let delim_span = DelimSpan::from_single(sp);
+        let body = TokenTree::Delimited(delim_span, Delimited {
             delim: token::Bracket,
             tts: [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
                   TokenTree::Token(sp, token::Eq),
@@ -370,7 +371,7 @@ fn next_desugared(&mut self) -> TokenAndSpan {
                 .iter().cloned().collect::<TokenStream>().into(),
         });
 
-        self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
+        self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(delim_span, &Delimited {
             delim: token::NoDelim,
             tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
                 [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
@@ -560,7 +561,7 @@ pub fn new(sess: &'a ParseSess,
             root_module_name: None,
             expected_tokens: Vec::new(),
             token_cursor: TokenCursor {
-                frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
+                frame: TokenCursorFrame::new(DelimSpan::dummy(), &Delimited {
                     delim: token::NoDelim,
                     tts: tokens.into(),
                 }),
@@ -1229,7 +1230,8 @@ fn look_ahead_span(&self, dist: usize) -> Span {
         }
 
         match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
-            Some(TokenTree::Token(span, _)) | Some(TokenTree::Delimited(span, _)) => span,
+            Some(TokenTree::Token(span, _)) => span,
+            Some(TokenTree::Delimited(span, _)) => span.entire(),
             None => self.look_ahead_span(dist - 1),
         }
     }
@@ -2796,7 +2798,7 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
             token::OpenDelim(..) => {
                 let frame = mem::replace(&mut self.token_cursor.frame,
                                          self.token_cursor.stack.pop().unwrap());
-                self.span = frame.span;
+                self.span = frame.span.entire();
                 self.bump();
                 TokenTree::Delimited(frame.span, Delimited {
                     delim: frame.delim,
index 1d0c6b5317a38d3484de1adbfd1ed0ef95eb9f5a..6e8014284ec401c2d2ec93a9650a8cae331cd6aa 100644 (file)
@@ -23,8 +23,7 @@
 use syntax::parse::parse_stream_from_source_str;
 use syntax_pos::{self, Span, FileName};
 use syntax_pos::symbol::{self, Symbol};
-use tokenstream::{TokenStream, TokenTree};
-use tokenstream;
+use tokenstream::{self, DelimSpan, TokenStream, TokenTree};
 
 use std::{cmp, fmt};
 use std::mem;
@@ -825,7 +824,8 @@ fn prepend_attrs(sess: &ParseSess,
         // that it encompasses more than each token, but it hopefully is "good
         // enough" for now at least.
         builder.push(tokenstream::TokenTree::Token(attr.span, Pound));
-        builder.push(tokenstream::TokenTree::Delimited(attr.span, tokens));
+        let delim_span = DelimSpan::from_single(attr.span);
+        builder.push(tokenstream::TokenTree::Delimited(delim_span, tokens));
     }
     builder.push(tokens.clone());
     Some(builder.build())
index 840ee299bf338dd302d268ad6fb4a1edb9641a3e..70867f9e42ff5f3cc9c9644f771397d2980559ac 100644 (file)
@@ -22,7 +22,7 @@
 //! and a borrowed `TokenStream` is sufficient to build an owned `TokenStream` without taking
 //! ownership of the original.
 
-use syntax_pos::{BytePos, Span, DUMMY_SP};
+use syntax_pos::{BytePos, Mark, Span, DUMMY_SP};
 use ext::base;
 use ext::tt::{macro_parser, quoted};
 use parse::Directory;
@@ -97,7 +97,7 @@ pub enum TokenTree {
     /// A single token
     Token(Span, token::Token),
     /// A delimited sequence of token trees
-    Delimited(Span, Delimited),
+    Delimited(DelimSpan, Delimited),
 }
 
 impl TokenTree {
@@ -145,16 +145,16 @@ pub fn probably_equal_for_proc_macro(&self, other: &TokenTree) -> bool {
     /// Retrieve the TokenTree's span.
     pub fn span(&self) -> Span {
         match *self {
-            TokenTree::Token(sp, _) | TokenTree::Delimited(sp, _) => sp,
+            TokenTree::Token(sp, _) => sp,
+            TokenTree::Delimited(sp, _) => sp.entire(),
         }
     }
 
     /// Modify the `TokenTree`'s span inplace.
     pub fn set_span(&mut self, span: Span) {
         match *self {
-            TokenTree::Token(ref mut sp, _) | TokenTree::Delimited(ref mut sp, _) => {
-                *sp = span;
-            }
+            TokenTree::Token(ref mut sp, _) => *sp = span,
+            TokenTree::Delimited(ref mut sp, _) => *sp = DelimSpan::from_single(span),
         }
     }
 
@@ -192,27 +192,20 @@ pub(crate) fn add_comma(&self) -> Option<(TokenStream, Span)> {
             let mut iter = slice.iter().enumerate().peekable();
             while let Some((pos, ts)) = iter.next() {
                 if let Some((_, next)) = iter.peek() {
-                    match (ts, next) {
-                        (TokenStream {
-                            kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))
-                        }, _) |
-                        (_, TokenStream {
-                            kind: TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))
-                        }) => {}
-                        (TokenStream {
-                            kind: TokenStreamKind::Tree(TokenTree::Token(sp, _))
-                        }, _) |
-                        (TokenStream {
-                            kind: TokenStreamKind::Tree(TokenTree::Delimited(sp, _))
-                        }, _) => {
-                            let sp = sp.shrink_to_hi();
-                            let comma = TokenStream {
-                                kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
-                            };
-                            suggestion = Some((pos, comma, sp));
+                    let sp = match (&ts.kind, &next.kind) {
+                        (TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma)), _) |
+                        (_, TokenStreamKind::Tree(TokenTree::Token(_, token::Token::Comma))) => {
+                            continue;
                         }
-                        _ => {}
-                    }
+                        (TokenStreamKind::Tree(TokenTree::Token(sp, _)), _) => *sp,
+                        (TokenStreamKind::Tree(TokenTree::Delimited(sp, _)), _) => sp.entire(),
+                        _ => continue,
+                    };
+                    let sp = sp.shrink_to_hi();
+                    let comma = TokenStream {
+                        kind: TokenStreamKind::Tree(TokenTree::Token(sp, token::Comma)),
+                    };
+                    suggestion = Some((pos, comma, sp));
                 }
             }
             if let Some((pos, comma, sp)) = suggestion {
@@ -718,6 +711,40 @@ fn decode<D: Decoder>(decoder: &mut D) -> Result<ThinTokenStream, D::Error> {
     }
 }
 
+#[derive(Debug, Copy, Clone, PartialEq, RustcEncodable, RustcDecodable)]
+pub struct DelimSpan {
+    pub open: Span,
+    pub close: Span,
+}
+
+impl DelimSpan {
+    pub fn from_single(sp: Span) -> Self {
+        DelimSpan {
+            open: sp,
+            close: sp,
+        }
+    }
+
+    pub fn from_pair(open: Span, close: Span) -> Self {
+        DelimSpan { open, close }
+    }
+
+    pub fn dummy() -> Self {
+        Self::from_single(DUMMY_SP)
+    }
+
+    pub fn entire(self) -> Span {
+        self.open.with_hi(self.close.hi())
+    }
+
+    pub fn apply_mark(self, mark: Mark) -> Self {
+        DelimSpan {
+            open: self.open.apply_mark(mark),
+            close: self.close.apply_mark(mark),
+        }
+    }
+}
+
 #[cfg(test)]
 mod tests {
     use super::*;
index f1777745e06c343ea61bb5366566d8ed8739572e..f2f14f84923c0c2aacb5600ef18c89a316724e0e 100644 (file)
@@ -70,7 +70,8 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
                     _ => unreachable!(),
                 }
             }).collect();
-            let arm = cx.arm(seq_sp, pats, cx.expr_bool(seq_sp, true));
+            let span = seq_sp.entire();
+            let arm = cx.arm(span, pats, cx.expr_bool(span, true));
 
             quote_expr!(cx,
                 match $matched_expr {