use std::path::Path;
use syntax::ast;
-use syntax::parse::lexer::{self, Reader, StringReader};
+use syntax::parse::filemap_to_tts;
+use syntax::parse::lexer::{self, StringReader};
use syntax::parse::token::{self, Token};
-use syntax::parse::parser::Parser;
use syntax::symbol::keywords;
use syntax::tokenstream::TokenTree;
use syntax_pos::*;
}
fn span_to_tts(&self, span: Span) -> Vec<TokenTree> {
- let srdr = self.retokenise_span(span);
- let mut p = Parser::new(&self.sess.parse_sess, Box::new(srdr), None, false);
- p.parse_all_token_trees().expect("Couldn't re-parse span")
+ let filename = String::from("<anon-dxr>");
+ let filemap = self.sess.codemap().new_filemap(filename, None, self.snippet(span));
+ filemap_to_tts(&self.sess.parse_sess, filemap)
}
// Re-parses a path and returns the span for the last identifier in the path
use feature_gate::{self, Features};
use fold;
use fold::*;
-use parse::{ParseSess, DirectoryOwnership, PResult, lexer};
+use parse::{ParseSess, DirectoryOwnership, PResult, filemap_to_tts};
use parse::parser::Parser;
use parse::token;
use print::pprust;
}
fn string_to_tts(text: String, parse_sess: &ParseSess) -> Vec<TokenTree> {
- let filemap = parse_sess.codemap()
- .new_filemap(String::from("<macro expansion>"), None, text);
-
- let lexer = lexer::StringReader::new(parse_sess, filemap);
- let mut parser = Parser::new(parse_sess, Box::new(lexer), None, false);
- panictry!(parser.parse_all_token_trees())
+ let filename = String::from("<macro expansion>");
+ filemap_to_tts(parse_sess, parse_sess.codemap().new_filemap(filename, None, text))
}
impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
use ast;
use codemap::CodeMap;
use syntax_pos::{BytePos, CharPos, Pos};
-use parse::lexer::is_block_doc_comment;
-use parse::lexer::{StringReader, TokenAndSpan};
-use parse::lexer::{is_pattern_whitespace, Reader};
-use parse::{lexer, ParseSess};
+use parse::lexer::{is_block_doc_comment, is_pattern_whitespace};
+use parse::lexer::{self, ParseSess, StringReader, TokenAndSpan};
use print::pprust;
use str::char_at;
pub use ext::tt::transcribe::{TtReader, new_tt_reader};
pub mod comments;
+mod tokentrees;
mod unicode_chars;
pub trait Reader {
// cache a direct reference to the source text, so that we don't have to
// retrieve it via `self.filemap.src.as_ref().unwrap()` all the time.
source_text: Rc<String>,
+ /// Stack of open delimiters and their spans. Used for error message.
+ token: token::Token,
+ span: Span,
+ open_braces: Vec<(token::DelimToken, Span)>,
}
-impl<'a> Reader for StringReader<'a> {
+impl<'a> StringReader<'a> {
+ fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
+ let res = self.try_next_token();
+ self.unwrap_or_abort(res)
+ }
+ fn unwrap_or_abort(&mut self, res: Result<TokenAndSpan, ()>) -> TokenAndSpan {
+ match res {
+ Ok(tok) => tok,
+ Err(_) => {
+ self.emit_fatal_errors();
+ panic!(FatalError);
+ }
+ }
+ }
+ fn try_real_token(&mut self) -> Result<TokenAndSpan, ()> {
+ let mut t = self.try_next_token()?;
+ loop {
+ match t.tok {
+ token::Whitespace | token::Comment | token::Shebang(_) => {
+ t = self.try_next_token()?;
+ }
+ _ => break,
+ }
+ }
+ self.token = t.tok.clone();
+ self.span = t.sp;
+ Ok(t)
+ }
+ pub fn real_token(&mut self) -> TokenAndSpan {
+ let res = self.try_real_token();
+ self.unwrap_or_abort(res)
+ }
fn is_eof(&self) -> bool {
if self.ch.is_none() {
return true;
fn fatal(&self, m: &str) -> FatalError {
self.fatal_span(self.peek_span, m)
}
- fn err(&self, m: &str) {
- self.err_span(self.peek_span, m)
- }
fn emit_fatal_errors(&mut self) {
for err in &mut self.fatal_errs {
err.emit();
peek_span: syntax_pos::DUMMY_SP,
source_text: source_text,
fatal_errs: Vec::new(),
+ token: token::Eof,
+ span: syntax_pos::DUMMY_SP,
+ open_braces: Vec::new(),
}
}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use print::pprust::token_to_string;
+use parse::lexer::StringReader;
+use parse::{token, PResult};
+use syntax_pos::Span;
+use tokenstream::{Delimited, TokenTree};
+
+use std::rc::Rc;
+
+impl<'a> StringReader<'a> {
+ // Parse a stream of tokens into a list of `TokenTree`s, up to an `Eof`.
+ pub fn parse_all_token_trees(&mut self) -> PResult<'a, Vec<TokenTree>> {
+ let mut tts = Vec::new();
+ while self.token != token::Eof {
+ tts.push(self.parse_token_tree()?);
+ }
+ Ok(tts)
+ }
+
+ // Parse a stream of tokens into a list of `TokenTree`s, up to a `CloseDelim`.
+ fn parse_token_trees_until_close_delim(&mut self) -> Vec<TokenTree> {
+ let mut tts = vec![];
+ loop {
+ if let token::CloseDelim(..) = self.token {
+ return tts;
+ }
+ match self.parse_token_tree() {
+ Ok(tt) => tts.push(tt),
+ Err(mut e) => {
+ e.emit();
+ return tts;
+ }
+ }
+ }
+ }
+
+ fn parse_token_tree(&mut self) -> PResult<'a, TokenTree> {
+ match self.token {
+ token::Eof => {
+ let msg = "this file contains an un-closed delimiter";
+ let mut err = self.sess.span_diagnostic.struct_span_err(self.span, msg);
+ for &(_, sp) in &self.open_braces {
+ err.span_help(sp, "did you mean to close this delimiter?");
+ }
+ Err(err)
+ },
+ token::OpenDelim(delim) => {
+ // The span for beginning of the delimited section
+ let pre_span = self.span;
+
+ // Parse the open delimiter.
+ self.open_braces.push((delim, self.span));
+ let open_span = self.span;
+ self.real_token();
+
+ // Parse the token trees within the delimiters.
+ // We stop at any delimiter so we can try to recover if the user
+ // uses an incorrect delimiter.
+ let tts = self.parse_token_trees_until_close_delim();
+
+ let close_span = self.span;
+ // Expand to cover the entire delimited token tree
+ let span = Span { hi: close_span.hi, ..pre_span };
+
+ match self.token {
+ // Correct delimiter.
+ token::CloseDelim(d) if d == delim => {
+ self.open_braces.pop().unwrap();
+
+ // Parse the close delimiter.
+ self.real_token();
+ }
+ // Incorrect delimiter.
+ token::CloseDelim(other) => {
+ let token_str = token_to_string(&self.token);
+ let msg = format!("incorrect close delimiter: `{}`", token_str);
+ let mut err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
+ // This is a conservative error: only report the last unclosed delimiter.
+ // The previous unclosed delimiters could actually be closed! The parser
+ // just hasn't gotten to them yet.
+ if let Some(&(_, sp)) = self.open_braces.last() {
+ err.span_note(sp, "unclosed delimiter");
+ };
+ err.emit();
+
+ self.open_braces.pop().unwrap();
+
+ // If the incorrect delimiter matches an earlier opening
+ // delimiter, then don't consume it (it can be used to
+ // close the earlier one). Otherwise, consume it.
+ // E.g., we try to recover from:
+ // fn foo() {
+ // bar(baz(
+ // } // Incorrect delimiter but matches the earlier `{`
+ if !self.open_braces.iter().any(|&(b, _)| b == other) {
+ self.real_token();
+ }
+ }
+ token::Eof => {
+ // Silently recover, the EOF token will be seen again
+ // and an error emitted then. Thus we don't pop from
+ // self.open_braces here.
+ },
+ _ => {}
+ }
+
+ Ok(TokenTree::Delimited(span, Rc::new(Delimited {
+ delim: delim,
+ open_span: open_span,
+ tts: tts,
+ close_span: close_span,
+ })))
+ },
+ token::CloseDelim(_) => {
+ // An unexpected closing delimiter (i.e., there is no
+ // matching opening delimiter).
+ let token_str = token_to_string(&self.token);
+ let msg = format!("unexpected close delimiter: `{}`", token_str);
+ let err = self.sess.span_diagnostic.struct_span_err(self.span, &msg);
+ Err(err)
+ },
+ _ => {
+ let tt = TokenTree::Token(self.span, self.token.clone());
+ self.real_token();
+ Ok(tt)
+ }
+ }
+ }
+}
}
/// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>)
- -> Vec<tokenstream::TokenTree> {
- // it appears to me that the cfg doesn't matter here... indeed,
- // parsing tt's probably shouldn't require a parser at all.
- let srdr = lexer::StringReader::new(sess, filemap);
- let mut p1 = Parser::new(sess, Box::new(srdr), None, false);
- panictry!(p1.parse_all_token_trees())
+pub fn filemap_to_tts(sess: &ParseSess, filemap: Rc<FileMap>) -> Vec<tokenstream::TokenTree> {
+ let mut srdr = lexer::StringReader::new(sess, filemap);
+ srdr.real_token();
+ panictry!(srdr.parse_all_token_trees())
}
/// Given tts and the ParseSess, produce a parser