]> git.lizzy.rs Git - rust.git/commitdiff
Remove `open_span` and `close_span` from `Delimited`.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Mon, 23 Jan 2017 04:58:15 +0000 (04:58 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Mon, 23 Jan 2017 06:49:06 +0000 (06:49 +0000)
src/libproc_macro_plugin/qquote.rs
src/librustc_incremental/calculate_svh/svh_visitor.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/fold.rs
src/libsyntax/parse/lexer/tokentrees.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/tokenstream.rs

index 69c6eba6c0f89a5aa968f9e254933b5af127e1c1..300b4df89294354edcafa092abe5601a700245f6 100644 (file)
@@ -52,8 +52,6 @@ fn delimit(delim: token::DelimToken, stream: TokenStream) -> TokenStream {
     TokenTree::Delimited(DUMMY_SP, Rc::new(Delimited {
         delim: delim,
         tts: stream.trees().cloned().collect(),
-        open_span: DUMMY_SP,
-        close_span: DUMMY_SP,
     })).into()
 }
 
@@ -129,8 +127,6 @@ fn quote(&self) -> TokenStream {
 impl Quote for Rc<Delimited> {
     fn quote(&self) -> TokenStream {
         quote!(::std::rc::Rc::new(::syntax::tokenstream::Delimited {
-            open_span: ::syntax::ext::quote::rt::DUMMY_SP,
-            close_span: ::syntax::ext::quote::rt::DUMMY_SP,
             delim: (quote self.delim),
             tts: (quote self.tts),
         }))
index f0e86e81c076d060e4c3a1a373949026f1d70259..3427a42526181c7205ade8b9f0e6549f29a0da2d 100644 (file)
@@ -1034,18 +1034,14 @@ fn hash_token_tree(&mut self, tt: &tokenstream::TokenTree) {
                 hash_span!(self, span);
                 let tokenstream::Delimited {
                     ref delim,
-                    open_span,
                     ref tts,
-                    close_span,
                 } = **delimited;
 
                 delim.hash(self.st);
-                hash_span!(self, open_span);
                 tts.len().hash(self.st);
                 for sub_tt in tts {
                     self.hash_token_tree(sub_tt);
                 }
-                hash_span!(self, close_span);
             }
             tokenstream::TokenTree::Sequence(span, ref sequence_repetition) => {
                 hash_span!(self, span);
index c0cbda4ba12269d76e097f59205ccba24302c35d..8258a7427b62bbb5ba3ff5208e85b0d59c4996ca 100644 (file)
@@ -231,9 +231,7 @@ fn to_tokens(&self, cx: &ExtCtxt) -> Vec<TokenTree> {
             }
             r.push(TokenTree::Delimited(self.span, Rc::new(tokenstream::Delimited {
                 delim: token::Bracket,
-                open_span: self.span,
                 tts: self.value.to_tokens(cx),
-                close_span: self.span,
             })));
             r
         }
@@ -250,9 +248,7 @@ impl ToTokens for () {
         fn to_tokens(&self, _cx: &ExtCtxt) -> Vec<TokenTree> {
             vec![TokenTree::Delimited(DUMMY_SP, Rc::new(tokenstream::Delimited {
                 delim: token::Paren,
-                open_span: DUMMY_SP,
                 tts: vec![],
-                close_span: DUMMY_SP,
             }))]
         }
     }
@@ -757,11 +753,11 @@ fn statements_mk_tt(cx: &ExtCtxt, tt: &TokenTree, matcher: bool) -> Vec<ast::Stm
                                     vec![e_tok]);
             vec![cx.stmt_expr(e_push)]
         },
-        TokenTree::Delimited(_, ref delimed) => {
-            statements_mk_tt(cx, &delimed.open_tt(), matcher).into_iter()
+        TokenTree::Delimited(span, ref delimed) => {
+            statements_mk_tt(cx, &delimed.open_tt(span), matcher).into_iter()
                 .chain(delimed.tts.iter()
                                   .flat_map(|tt| statements_mk_tt(cx, tt, matcher)))
-                .chain(statements_mk_tt(cx, &delimed.close_tt(), matcher))
+                .chain(statements_mk_tt(cx, &delimed.close_tt(span), matcher))
                 .collect()
         },
         TokenTree::Sequence(sp, ref seq) => {
index f6a25d4aceed7a7923b1e249cbeed92f87c7e7b6..d0c1c0efea7a399e4496e8b24c9e732d5a06e6dc 100644 (file)
@@ -350,9 +350,9 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
                     TokenTree::Token(sp, ref tok) => {
                         first.replace_with((sp, tok.clone()));
                     }
-                    TokenTree::Delimited(_, ref delimited) => {
+                    TokenTree::Delimited(span, ref delimited) => {
                         build_recur(sets, &delimited.tts[..]);
-                        first.replace_with((delimited.open_span,
+                        first.replace_with((delimited.open_tt(span).span(),
                                             Token::OpenDelim(delimited.delim)));
                     }
                     TokenTree::Sequence(sp, ref seq_rep) => {
@@ -410,8 +410,8 @@ fn first(&self, tts: &[TokenTree]) -> TokenSet {
                     first.add_one((sp, tok.clone()));
                     return first;
                 }
-                TokenTree::Delimited(_, ref delimited) => {
-                    first.add_one((delimited.open_span,
+                TokenTree::Delimited(span, ref delimited) => {
+                    first.add_one((delimited.open_tt(span).span(),
                                    Token::OpenDelim(delimited.delim)));
                     return first;
                 }
@@ -603,8 +603,9 @@ fn check_matcher_core(sess: &ParseSess,
                     suffix_first = build_suffix_first();
                 }
             }
-            TokenTree::Delimited(_, ref d) => {
-                let my_suffix = TokenSet::singleton((d.close_span, Token::CloseDelim(d.delim)));
+            TokenTree::Delimited(span, ref d) => {
+                let my_suffix = TokenSet::singleton((d.close_tt(span).span(),
+                                                     Token::CloseDelim(d.delim)));
                 check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
                 // don't track non NT tokens
                 last.replace_with_irrelevant();
index 2e5ce739fb34a7221755d986b256c9811711c891..c42bf24578f0a376a0feb7e1f939a71bd6fa4cda 100644 (file)
@@ -543,9 +543,7 @@ pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
             TokenTree::Delimited(fld.new_span(span), Rc::new(
                             Delimited {
                                 delim: delimed.delim,
-                                open_span: fld.new_span(delimed.open_span),
                                 tts: fld.fold_tts(&delimed.tts),
-                                close_span: fld.new_span(delimed.close_span),
                             }
                         ))
         },
index 7b6f00e0e8265c7415394698c3480609c55dd562..eafc3f77ab05227bbe40a6f20ab9eec74f559371 100644 (file)
@@ -59,7 +59,6 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
 
                 // Parse the open delimiter.
                 self.open_braces.push((delim, self.span));
-                let open_span = self.span;
                 self.real_token();
 
                 // Parse the token trees within the delimiters.
@@ -67,9 +66,8 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                 // uses an incorrect delimiter.
                 let tts = self.parse_token_trees_until_close_delim();
 
-                let close_span = self.span;
                 // Expand to cover the entire delimited token tree
-                let span = Span { hi: close_span.hi, ..pre_span };
+                let span = Span { hi: self.span.hi, ..pre_span };
 
                 match self.token {
                     // Correct delimiter.
@@ -115,9 +113,7 @@ fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
 
                 Ok(TokenTree::Delimited(span, Rc::new(Delimited {
                     delim: delim,
-                    open_span: open_span,
                     tts: tts,
-                    close_span: close_span,
                 })))
             },
             token::CloseDelim(_) => {
index 65e7ec0a34cf7775e458f2202ff78aeab88013b1..f1a3b523cfd93cef737bd75d0311c04f3c538ca8 100644 (file)
@@ -725,24 +725,20 @@ fn string_to_tts_1() {
                 sp(5, 14),
                 Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
-                    open_span: sp(5, 6),
                     tts: vec![
                         TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(8, 9), token::Colon),
                         TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
                     ],
-                    close_span: sp(13, 14),
                 })),
             TokenTree::Delimited(
                 sp(15, 21),
                 Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Brace,
-                    open_span: sp(15, 16),
                     tts: vec![
                         TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
-                    close_span: sp(20, 21),
                 }))
         ];
 
index 5dd772041e27106af1efa87d65172d1ce1aff007..f172f52104c3d6677c524e50d872ccf47f5ecea6 100644 (file)
@@ -254,9 +254,7 @@ pub fn new(sess: &'a ParseSess,
                -> Self {
         let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
             delim: token::NoDelim,
-            open_span: syntax_pos::DUMMY_SP,
             tts: tokens,
-            close_span: syntax_pos::DUMMY_SP,
         }));
         let mut parser = Parser {
             sess: sess,
@@ -2717,7 +2715,7 @@ pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                 }
 
                 let parsing_token_tree = ::std::mem::replace(&mut self.parsing_token_tree, true);
-                let open_span = self.span;
+                let lo = self.span.lo;
                 self.bump();
                 let tts = self.parse_seq_to_before_tokens(&[&token::CloseDelim(token::Brace),
                                                             &token::CloseDelim(token::Paren),
@@ -2726,16 +2724,11 @@ pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
                                                           |p| p.parse_token_tree(),
                                                           |mut e| e.emit());
                 self.parsing_token_tree = parsing_token_tree;
-
-                let close_span = self.span;
                 self.bump();
 
-                let span = Span { lo: open_span.lo, ..close_span };
-                Ok(TokenTree::Delimited(span, Rc::new(Delimited {
+                Ok(TokenTree::Delimited(Span { lo: lo, ..self.prev_span }, Rc::new(Delimited {
                     delim: delim,
-                    open_span: open_span,
                     tts: tts,
-                    close_span: close_span,
                 })))
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
index 8ac39dd462e7cbf254667b21f5e12ae21329d352..bf790b96e37f6c09270284840e549b9d379afc03 100644 (file)
@@ -49,6 +49,12 @@ pub enum DelimToken {
     NoDelim,
 }
 
+impl DelimToken {
+    pub fn len(&self) -> u32 {
+        if *self == NoDelim { 0 } else { 1 }
+    }
+}
+
 #[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Hash, Debug, Copy)]
 pub enum Lit {
     Byte(ast::Name),
index 26e976dc0762fc5d7c41cb47e3ae409d4dd1f011..f29a168e6df196c7131a910074f5833a59dee564 100644 (file)
@@ -25,7 +25,7 @@
 //! ownership of the original.
 
 use ast::{self, AttrStyle, LitKind};
-use syntax_pos::Span;
+use syntax_pos::{BytePos, Span, DUMMY_SP};
 use codemap::Spanned;
 use ext::base;
 use ext::tt::macro_parser;
 pub struct Delimited {
     /// The type of delimiter
     pub delim: token::DelimToken,
-    /// The span covering the opening delimiter
-    pub open_span: Span,
     /// The delimited sequence of token trees
     pub tts: Vec<TokenTree>,
-    /// The span covering the closing delimiter
-    pub close_span: Span,
 }
 
 impl Delimited {
@@ -65,13 +61,21 @@ pub fn close_token(&self) -> token::Token {
     }
 
     /// Returns the opening delimiter as a token tree.
-    pub fn open_tt(&self) -> TokenTree {
-        TokenTree::Token(self.open_span, self.open_token())
+    pub fn open_tt(&self, span: Span) -> TokenTree {
+        let open_span = match span {
+            DUMMY_SP => DUMMY_SP,
+            _ => Span { hi: span.lo + BytePos(self.delim.len()), ..span },
+        };
+        TokenTree::Token(open_span, self.open_token())
     }
 
     /// Returns the closing delimiter as a token tree.
-    pub fn close_tt(&self) -> TokenTree {
-        TokenTree::Token(self.close_span, self.close_token())
+    pub fn close_tt(&self, span: Span) -> TokenTree {
+        let close_span = match span {
+            DUMMY_SP => DUMMY_SP,
+            _ => Span { lo: span.hi - BytePos(self.delim.len()), ..span },
+        };
+        TokenTree::Token(close_span, self.close_token())
     }
 
     /// Returns the token trees inside the delimiters.
@@ -175,23 +179,21 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
 
                 TokenTree::Delimited(sp, Rc::new(Delimited {
                     delim: token::Bracket,
-                    open_span: sp,
                     tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
                               TokenTree::Token(sp, token::Eq),
                               TokenTree::Token(sp, token::Literal(
                                   token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
-                    close_span: sp,
                 }))
             }
             (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
                 delimed.tts[index].clone()
             }
-            (&TokenTree::Delimited(_, ref delimed), _) => {
+            (&TokenTree::Delimited(span, ref delimed), _) => {
                 if index == 0 {
-                    return delimed.open_tt();
+                    return delimed.open_tt(span);
                 }
                 if index == delimed.tts.len() + 1 {
-                    return delimed.close_tt();
+                    return delimed.close_tt(span);
                 }
                 delimed.tts[index - 1].clone()
             }