]> git.lizzy.rs Git - rust.git/commitdiff
Clean up `ext::tt::transcribe`.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Sat, 14 Jan 2017 12:15:26 +0000 (12:15 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Tue, 17 Jan 2017 08:17:28 +0000 (08:17 +0000)
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs

index 585232c5462b4a3cd64e4117493df03e00c7f5ee..f6a25d4aceed7a7923b1e249cbeed92f87c7e7b6 100644 (file)
@@ -16,7 +16,7 @@
 use ext::tt::macro_parser::{Success, Error, Failure};
 use ext::tt::macro_parser::{MatchedSeq, MatchedNonterminal};
 use ext::tt::macro_parser::{parse, parse_failure_msg};
-use ext::tt::transcribe::new_tt_reader;
+use ext::tt::transcribe::transcribe;
 use parse::{Directory, ParseSess};
 use parse::parser::Parser;
 use parse::token::{self, NtTT, Token};
@@ -113,16 +113,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                     _ => cx.span_bug(sp, "malformed macro rhs"),
                 };
                 // rhs has holes ( `$id` and `$(...)` that need filled)
-                let mut trncbr =
-                    new_tt_reader(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
-                let mut tts = Vec::new();
-                loop {
-                    let tok = trncbr.real_token();
-                    if tok.tok == token::Eof {
-                        break
-                    }
-                    tts.push(TokenTree::Token(tok.sp, tok.tok));
-                }
+                let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
                 let directory = Directory {
                     path: cx.current_expansion.module.directory.clone(),
                     ownership: cx.current_expansion.directory_ownership,
index 82f1e18389565f69631b45bb84e7af355edd5b96..bf6851ec1dc014b6f808fbce0377742cc0cb3972 100644 (file)
@@ -13,7 +13,6 @@
 use errors::Handler;
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
 use parse::token::{self, MatchNt, SubstNt, Token, NtIdent};
-use parse::lexer::TokenAndSpan;
 use syntax_pos::{Span, DUMMY_SP};
 use tokenstream::{self, TokenTree};
 use util::small_vector::SmallVector;
@@ -32,8 +31,8 @@ struct TtFrame {
 }
 
 #[derive(Clone)]
-pub struct TtReader<'a> {
-    pub sp_diag: &'a Handler,
+struct TtReader<'a> {
+    sp_diag: &'a Handler,
     /// the unzipped tree:
     stack: SmallVector<TtFrame>,
     /* for MBE-style macro transcription */
@@ -41,24 +40,15 @@ pub struct TtReader<'a> {
 
     repeat_idx: Vec<usize>,
     repeat_len: Vec<usize>,
-    /* cached: */
-    pub cur_tok: Token,
-    pub cur_span: Span,
-}
-
-impl<'a> TtReader<'a> {
-    pub fn real_token(&mut self) -> TokenAndSpan {
-        tt_next_token(self)
-    }
 }
 
 /// This can do Macro-By-Example transcription. On the other hand, if
 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
 /// (and should) be None.
-pub fn new_tt_reader(sp_diag: &Handler,
-                     interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
-                     src: Vec<tokenstream::TokenTree>)
-                     -> TtReader {
+pub fn transcribe(sp_diag: &Handler,
+                  interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
+                  src: Vec<tokenstream::TokenTree>)
+                  -> Vec<TokenTree> {
     let mut r = TtReader {
         sp_diag: sp_diag,
         stack: SmallVector::one(TtFrame {
@@ -77,12 +67,15 @@ pub fn new_tt_reader(sp_diag: &Handler,
         },
         repeat_idx: Vec::new(),
         repeat_len: Vec::new(),
-        /* dummy values, never read: */
-        cur_tok: token::Eof,
-        cur_span: DUMMY_SP,
     };
-    tt_next_token(&mut r); /* get cur_tok and cur_span set up */
-    r
+
+    let mut tts = Vec::new();
+    let mut prev_span = DUMMY_SP;
+    while let Some(tt) = tt_next_token(&mut r, prev_span) {
+        prev_span = tt.span();
+        tts.push(tt);
+    }
+    tts
 }
 
 fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
@@ -156,38 +149,24 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
 
 /// Return the next token from the TtReader.
 /// EFFECT: advances the reader's token field
-pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
-    // FIXME(pcwalton): Bad copy?
-    let ret_val = TokenAndSpan {
-        tok: r.cur_tok.clone(),
-        sp: r.cur_span.clone(),
-    };
+fn tt_next_token(r: &mut TtReader, prev_span: Span) -> Option<TokenTree> {
     loop {
-        let should_pop = match r.stack.last() {
-            None => {
-                assert_eq!(ret_val.tok, token::Eof);
-                return ret_val;
-            }
-            Some(frame) => {
-                if frame.idx < frame.forest.len() {
-                    break;
-                }
-                !frame.dotdotdoted ||
-                    *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
+        let should_pop = if let Some(frame) = r.stack.last() {
+            if frame.idx < frame.forest.len() {
+                break;
             }
+            !frame.dotdotdoted || *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
+        } else {
+            return None;
         };
 
         /* done with this set; pop or repeat? */
         if should_pop {
             let prev = r.stack.pop().unwrap();
-            match r.stack.last_mut() {
-                None => {
-                    r.cur_tok = token::Eof;
-                    return ret_val;
-                }
-                Some(frame) => {
-                    frame.idx += 1;
-                }
+            if let Some(frame) = r.stack.last_mut() {
+                frame.idx += 1;
+            } else {
+                return None;
             }
             if prev.dotdotdoted {
                 r.repeat_idx.pop();
@@ -197,8 +176,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
             *r.repeat_idx.last_mut().unwrap() += 1;
             r.stack.last_mut().unwrap().idx = 0;
             if let Some(tk) = r.stack.last().unwrap().sep.clone() {
-                r.cur_tok = tk; // repeat same span, I guess
-                return ret_val;
+                return Some(TokenTree::Token(prev_span, tk)); // repeat same span, I guess
             }
         }
     }
@@ -234,7 +212,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                             }
 
                             r.stack.last_mut().unwrap().idx += 1;
-                            return tt_next_token(r);
+                            return tt_next_token(r, prev_span);
                         }
                         r.repeat_len.push(len);
                         r.repeat_idx.push(0);
@@ -252,9 +230,7 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 r.stack.last_mut().unwrap().idx += 1;
                 match lookup_cur_matched(r, ident) {
                     None => {
-                        r.cur_span = sp;
-                        r.cur_tok = SubstNt(ident);
-                        return ret_val;
+                        return Some(TokenTree::Token(sp, SubstNt(ident)));
                         // this can't be 0 length, just like TokenTree::Delimited
                     }
                     Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
@@ -263,15 +239,11 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                             // (a) idents can be in lots of places, so it'd be a pain
                             // (b) we actually can, since it's a token.
                             NtIdent(ref sn) => {
-                                r.cur_span = sn.span;
-                                r.cur_tok = token::Ident(sn.node);
-                                return ret_val;
+                                return Some(TokenTree::Token(sn.span, token::Ident(sn.node)));
                             }
                             _ => {
-                                // FIXME(pcwalton): Bad copy.
-                                r.cur_span = sp;
-                                r.cur_tok = token::Interpolated(nt.clone());
-                                return ret_val;
+                                // FIXME(pcwalton): Bad copy
+                                return Some(TokenTree::Token(sp, token::Interpolated(nt.clone())));
                             }
                         }
                     } else {
@@ -292,11 +264,9 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 });
                 // if this could be 0-length, we'd need to potentially recur here
             }
-            TokenTree::Token(sp, tok) => {
-                r.cur_span = sp;
-                r.cur_tok = tok;
+            tt @ TokenTree::Token(..) => {
                 r.stack.last_mut().unwrap().idx += 1;
-                return ret_val;
+                return Some(tt);
             }
         }
     }