]> git.lizzy.rs Git - rust.git/commitdiff
Introduce `syntax::parse::parser::TokenCursor`.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Mon, 20 Feb 2017 05:44:06 +0000 (05:44 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Fri, 3 Mar 2017 02:05:57 +0000 (02:05 +0000)
src/libsyntax/parse/parser.rs
src/libsyntax/tokenstream.rs

index 71274c4fdaa4ead88e9512baac8a1ec75d7b065c..b12b0c03267010e30b2050a101451934bc551f7c 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use abi::{self, Abi};
-use ast::BareFnTy;
+use ast::{AttrStyle, BareFnTy};
 use ast::{RegionTyParamBound, TraitTyParamBound, TraitBoundModifier};
 use ast::Unsafety;
 use ast::{Mod, Arg, Arm, Attribute, BindingMode, TraitItemKind};
 use parse::{self, classify, token};
 use parse::common::SeqSep;
 use parse::lexer::TokenAndSpan;
+use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::obsolete::ObsoleteSyntax;
 use parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
 use util::parser::{AssocOp, Fixity};
 use print::pprust;
 use ptr::P;
 use parse::PResult;
-use tokenstream::{Delimited, TokenTree};
+use tokenstream::{self, Delimited, TokenTree, TokenStream};
 use symbol::{Symbol, keywords};
 use util::ThinVec;
 
 use std::collections::HashSet;
-use std::mem;
+use std::{cmp, mem, slice};
 use std::path::{Path, PathBuf};
 use std::rc::Rc;
-use std::slice;
 
 bitflags! {
     flags Restrictions: u8 {
@@ -175,12 +175,108 @@ pub struct Parser<'a> {
     /// into modules, and sub-parsers have new values for this name.
     pub root_module_name: Option<String>,
     pub expected_tokens: Vec<TokenType>,
-    pub tts: Vec<(TokenTree, usize)>,
+    token_cursor: TokenCursor,
     pub desugar_doc_comments: bool,
     /// Whether we should configure out of line modules as we parse.
     pub cfg_mods: bool,
 }
 
+struct TokenCursor {
+    frame: TokenCursorFrame,
+    stack: Vec<TokenCursorFrame>,
+}
+
+struct TokenCursorFrame {
+    delim: token::DelimToken,
+    span: Span,
+    open_delim: bool,
+    tree_cursor: tokenstream::Cursor,
+    close_delim: bool,
+}
+
+impl TokenCursorFrame {
+    fn new(sp: Span, delimited: &Delimited) -> Self {
+        TokenCursorFrame {
+            delim: delimited.delim,
+            span: sp,
+            open_delim: delimited.delim == token::NoDelim,
+            tree_cursor: delimited.tts.iter().cloned().collect::<TokenStream>().into_trees(),
+            close_delim: delimited.delim == token::NoDelim,
+        }
+    }
+}
+
+impl TokenCursor {
+    fn next(&mut self) -> TokenAndSpan {
+        loop {
+            let tree = if !self.frame.open_delim {
+                self.frame.open_delim = true;
+                Delimited { delim: self.frame.delim, tts: Vec::new() }.open_tt(self.frame.span)
+            } else if let Some(tree) = self.frame.tree_cursor.next() {
+                tree
+            } else if !self.frame.close_delim {
+                self.frame.close_delim = true;
+                Delimited { delim: self.frame.delim, tts: Vec::new() }.close_tt(self.frame.span)
+            } else if let Some(frame) = self.stack.pop() {
+                self.frame = frame;
+                continue
+            } else {
+                return TokenAndSpan { tok: token::Eof, sp: self.frame.span }
+            };
+
+            match tree {
+                TokenTree::Token(sp, tok) => return TokenAndSpan { tok: tok, sp: sp },
+                TokenTree::Delimited(sp, ref delimited) => {
+                    let frame = TokenCursorFrame::new(sp, delimited);
+                    self.stack.push(mem::replace(&mut self.frame, frame));
+                }
+            }
+        }
+    }
+
+    fn next_desugared(&mut self) -> TokenAndSpan {
+        let (sp, name) = match self.next() {
+            TokenAndSpan { sp, tok: token::DocComment(name) } => (sp, name),
+            tok @ _ => return tok,
+        };
+
+        let stripped = strip_doc_comment_decoration(&name.as_str());
+
+        // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
+        // required to wrap the text.
+        let mut num_of_hashes = 0;
+        let mut count = 0;
+        for ch in stripped.chars() {
+            count = match ch {
+                '"' => 1,
+                '#' if count > 0 => count + 1,
+                _ => 0,
+            };
+            num_of_hashes = cmp::max(num_of_hashes, count);
+        }
+
+        let body = TokenTree::Delimited(sp, Rc::new(Delimited {
+            delim: token::Bracket,
+            tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
+                      TokenTree::Token(sp, token::Eq),
+                      TokenTree::Token(sp, token::Literal(
+                          token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
+        }));
+
+        self.stack.push(mem::replace(&mut self.frame, TokenCursorFrame::new(sp, &Delimited {
+            delim: token::NoDelim,
+            tts: if doc_comment_style(&name.as_str()) == AttrStyle::Inner {
+                [TokenTree::Token(sp, token::Pound), TokenTree::Token(sp, token::Not), body]
+                    .iter().cloned().collect()
+            } else {
+                [TokenTree::Token(sp, token::Pound), body].iter().cloned().collect()
+            },
+        })));
+
+        self.next()
+    }
+}
+
 #[derive(PartialEq, Eq, Clone)]
 pub enum TokenType {
     Token(token::Token),
@@ -313,10 +409,6 @@ pub fn new(sess: &'a ParseSess,
                directory: Option<Directory>,
                desugar_doc_comments: bool)
                -> Self {
-        let tt = TokenTree::Delimited(syntax_pos::DUMMY_SP, Rc::new(Delimited {
-            delim: token::NoDelim,
-            tts: tokens,
-        }));
         let mut parser = Parser {
             sess: sess,
             token: token::Underscore,
@@ -328,7 +420,13 @@ pub fn new(sess: &'a ParseSess,
             directory: Directory { path: PathBuf::new(), ownership: DirectoryOwnership::Owned },
             root_module_name: None,
             expected_tokens: Vec::new(),
-            tts: if tt.len() > 0 { vec![(tt, 0)] } else { Vec::new() },
+            token_cursor: TokenCursor {
+                frame: TokenCursorFrame::new(syntax_pos::DUMMY_SP, &Delimited {
+                    delim: token::NoDelim,
+                    tts: tokens,
+                }),
+                stack: Vec::new(),
+            },
             desugar_doc_comments: desugar_doc_comments,
             cfg_mods: true,
         };
@@ -346,28 +444,9 @@ pub fn new(sess: &'a ParseSess,
     }
 
     fn next_tok(&mut self) -> TokenAndSpan {
-        loop {
-            let tok = if let Some((tts, i)) = self.tts.pop() {
-                let tt = tts.get_tt(i);
-                if i + 1 < tts.len() {
-                    self.tts.push((tts, i + 1));
-                }
-                if let TokenTree::Token(sp, tok) = tt {
-                    TokenAndSpan { tok: tok, sp: sp }
-                } else {
-                    self.tts.push((tt, 0));
-                    continue
-                }
-            } else {
-                TokenAndSpan { tok: token::Eof, sp: self.span }
-            };
-
-            match tok.tok {
-                token::DocComment(name) if self.desugar_doc_comments => {
-                    self.tts.push((TokenTree::Token(tok.sp, token::DocComment(name)), 0));
-                }
-                _ => return tok,
-            }
+        match self.desugar_doc_comments {
+            true => self.token_cursor.next_desugared(),
+            false => self.token_cursor.next(),
         }
     }
 
@@ -972,19 +1051,16 @@ pub fn look_ahead<R, F>(&mut self, dist: usize, f: F) -> R where
         F: FnOnce(&token::Token) -> R,
     {
         if dist == 0 {
-            return f(&self.token);
-        }
-        let mut tok = token::Eof;
-        if let Some(&(ref tts, mut i)) = self.tts.last() {
-            i += dist - 1;
-            if i < tts.len() {
-                tok = match tts.get_tt(i) {
-                    TokenTree::Token(_, tok) => tok,
-                    TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
-                };
-            }
+            return f(&self.token)
         }
-        f(&tok)
+
+        f(&match self.token_cursor.frame.tree_cursor.look_ahead(dist - 1) {
+            Some(tree) => match tree {
+                TokenTree::Token(_, tok) => tok,
+                TokenTree::Delimited(_, delimited) => token::OpenDelim(delimited.delim),
+            },
+            None => token::CloseDelim(self.token_cursor.frame.delim),
+        })
     }
     pub fn fatal(&self, m: &str) -> DiagnosticBuilder<'a> {
         self.sess.span_diagnostic.struct_span_fatal(self.span, m)
@@ -2569,10 +2645,14 @@ pub fn check_unknown_macro_variable(&mut self) {
     pub fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
         match self.token {
             token::OpenDelim(..) => {
-                let tt = self.tts.pop().unwrap().0;
-                self.span = tt.span();
+                let frame = mem::replace(&mut self.token_cursor.frame,
+                                         self.token_cursor.stack.pop().unwrap());
+                self.span = frame.span;
                 self.bump();
-                return Ok(tt);
+                return Ok(TokenTree::Delimited(frame.span, Rc::new(Delimited {
+                    delim: frame.delim,
+                    tts: frame.tree_cursor.original_stream().trees().collect(),
+                })));
             },
             token::CloseDelim(_) | token::Eof => unreachable!(),
             _ => Ok(TokenTree::Token(self.span, self.bump_and_get())),
index 552395945a11aa261764adc895418931d9e2e30c..083435a04336201542a00469119d5fde4846c3e6 100644 (file)
 //! and a borrowed TokenStream is sufficient to build an owned TokenStream without taking
 //! ownership of the original.
 
-use ast::{self, AttrStyle, LitKind};
+use ast::{self, LitKind};
 use syntax_pos::{BytePos, Span, DUMMY_SP};
 use codemap::Spanned;
 use ext::base;
 use ext::tt::{macro_parser, quoted};
-use parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
 use parse::{self, Directory};
 use parse::token::{self, Token, Lit};
 use print::pprust;
@@ -103,72 +102,6 @@ pub enum TokenTree {
 }
 
 impl TokenTree {
-    pub fn len(&self) -> usize {
-        match *self {
-            TokenTree::Token(_, token::DocComment(name)) => {
-                match doc_comment_style(&name.as_str()) {
-                    AttrStyle::Outer => 2,
-                    AttrStyle::Inner => 3,
-                }
-            }
-            TokenTree::Delimited(_, ref delimed) => match delimed.delim {
-                token::NoDelim => delimed.tts.len(),
-                _ => delimed.tts.len() + 2,
-            },
-            TokenTree::Token(..) => 0,
-        }
-    }
-
-    pub fn get_tt(&self, index: usize) -> TokenTree {
-        match (self, index) {
-            (&TokenTree::Token(sp, token::DocComment(_)), 0) => TokenTree::Token(sp, token::Pound),
-            (&TokenTree::Token(sp, token::DocComment(name)), 1)
-                if doc_comment_style(&name.as_str()) == AttrStyle::Inner => {
-                TokenTree::Token(sp, token::Not)
-            }
-            (&TokenTree::Token(sp, token::DocComment(name)), _) => {
-                let stripped = strip_doc_comment_decoration(&name.as_str());
-
-                // Searches for the occurrences of `"#*` and returns the minimum number of `#`s
-                // required to wrap the text.
-                let num_of_hashes = stripped.chars()
-                    .scan(0, |cnt, x| {
-                        *cnt = if x == '"' {
-                            1
-                        } else if *cnt != 0 && x == '#' {
-                            *cnt + 1
-                        } else {
-                            0
-                        };
-                        Some(*cnt)
-                    })
-                    .max()
-                    .unwrap_or(0);
-
-                TokenTree::Delimited(sp, Rc::new(Delimited {
-                    delim: token::Bracket,
-                    tts: vec![TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"))),
-                              TokenTree::Token(sp, token::Eq),
-                              TokenTree::Token(sp, token::Literal(
-                                  token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))],
-                }))
-            }
-            (&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
-                delimed.tts[index].clone()
-            }
-            (&TokenTree::Delimited(span, ref delimed), _) => {
-                if index == 0 {
-                    return delimed.open_tt(span);
-                }
-                if index == delimed.tts.len() + 1 {
-                    return delimed.close_tt(span);
-                }
-                delimed.tts[index - 1].clone()
-            }
-            _ => panic!("Cannot expand a token tree"),
-        }
-    }
-
     /// Use this token tree as a matcher to parse given tts.
     pub fn parse(cx: &base::ExtCtxt, mtch: &[quoted::TokenTree], tts: &[TokenTree])
                  -> macro_parser::NamedParseResult {
@@ -416,6 +349,51 @@ fn new(stream: TokenStream) -> Self {
             }
         })
     }
+
+    pub fn original_stream(self) -> TokenStream {
+        match self.0 {
+            CursorKind::Empty => TokenStream::empty(),
+            CursorKind::Tree(tree, _) => tree.into(),
+            CursorKind::Stream(cursor) => TokenStream::concat_rc_slice({
+                cursor.stack.get(0).cloned().map(|(stream, _)| stream).unwrap_or(cursor.stream)
+            }),
+        }
+    }
+
+    pub fn look_ahead(&self, n: usize) -> Option<TokenTree> {
+        fn look_ahead(streams: &[TokenStream], mut n: usize) -> Result<TokenTree, usize> {
+            for stream in streams {
+                n = match stream.kind {
+                    TokenStreamKind::Tree(ref tree) if n == 0 => return Ok(tree.clone()),
+                    TokenStreamKind::Tree(..) => n - 1,
+                    TokenStreamKind::Stream(ref stream) => match look_ahead(stream, n) {
+                        Ok(tree) => return Ok(tree),
+                        Err(n) => n,
+                    },
+                    _ => n,
+                };
+            }
+
+            Err(n)
+        }
+
+        match self.0 {
+            CursorKind::Empty | CursorKind::Tree(_, true) => Err(n),
+            CursorKind::Tree(ref tree, false) => look_ahead(&[tree.clone().into()], n),
+            CursorKind::Stream(ref cursor) => {
+                look_ahead(&cursor.stream[cursor.index ..], n).or_else(|mut n| {
+                    for &(ref stream, index) in cursor.stack.iter().rev() {
+                        n = match look_ahead(&stream[index..], n) {
+                            Ok(tree) => return Ok(tree),
+                            Err(n) => n,
+                        }
+                    }
+
+                    Err(n)
+                })
+            }
+        }.ok()
+    }
 }
 
 impl fmt::Display for TokenStream {