1 //! The main parser interface.
3 #![feature(crate_visibility_modifier)]
4 #![feature(bindings_after_at)]
5 #![feature(iter_order_by)]
6 #![feature(or_patterns)]
7 #![feature(box_syntax)]
8 #![feature(box_patterns)]
11 use rustc_ast::token::{self, Nonterminal};
12 use rustc_ast::tokenstream::{self, CanSynthesizeMissingTokens, LazyTokenStream, TokenStream};
13 use rustc_ast::AstLike;
14 use rustc_ast_pretty::pprust;
15 use rustc_data_structures::sync::Lrc;
16 use rustc_errors::{Diagnostic, FatalError, Level, PResult};
17 use rustc_session::parse::ParseSess;
18 use rustc_span::{FileName, SourceFile, Span};
25 pub const MACRO_ARGUMENTS: Option<&str> = Some("macro arguments");
29 use parser::{emit_unclosed_delims, make_unclosed_delims_error, Parser};
31 pub mod validate_attr;
33 // A bunch of utility functions of the form `parse_<thing>_from_<source>`
34 // where <thing> includes crate, expr, item, stmt, tts, and one that
35 // uses a HOF to parse anything, and <source> includes file and
38 /// A variant of 'panictry!' that works on a Vec<Diagnostic> instead of a single DiagnosticBuilder.
39 macro_rules! panictry_buffer {
40 ($handler:expr, $e:expr) => {{
41 use rustc_errors::FatalError;
42 use std::result::Result::{Err, Ok};
47 $handler.emit_diagnostic(&e);
55 pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> {
56 let mut parser = new_parser_from_file(sess, input, None);
57 parser.parse_crate_mod()
60 pub fn parse_crate_attrs_from_file<'a>(
63 ) -> PResult<'a, Vec<ast::Attribute>> {
64 let mut parser = new_parser_from_file(sess, input, None);
65 parser.parse_inner_attributes()
68 pub fn parse_crate_from_source_str(
72 ) -> PResult<'_, ast::Crate> {
73 new_parser_from_source_str(sess, name, source).parse_crate_mod()
76 pub fn parse_crate_attrs_from_source_str(
80 ) -> PResult<'_, Vec<ast::Attribute>> {
81 new_parser_from_source_str(sess, name, source).parse_inner_attributes()
84 pub fn parse_stream_from_source_str(
88 override_span: Option<Span>,
90 let (stream, mut errors) =
91 source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span);
92 emit_unclosed_delims(&mut errors, &sess);
96 /// Creates a new parser from a source string.
97 pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
98 panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
101 /// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
103 pub fn maybe_new_parser_from_source_str(
107 ) -> Result<Parser<'_>, Vec<Diagnostic>> {
108 maybe_source_file_to_parser(sess, sess.source_map().new_source_file(name, source))
111 /// Creates a new parser, handling errors as appropriate if the file doesn't exist.
112 /// If a span is given, that is used on an error as the source of the problem.
113 pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, sp: Option<Span>) -> Parser<'a> {
114 source_file_to_parser(sess, file_to_source_file(sess, path, sp))
117 /// Given a `source_file` and config, returns a parser.
118 fn source_file_to_parser(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Parser<'_> {
119 panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file))
122 /// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the
123 /// initial token stream.
124 fn maybe_source_file_to_parser(
126 source_file: Lrc<SourceFile>,
127 ) -> Result<Parser<'_>, Vec<Diagnostic>> {
128 let end_pos = source_file.end_pos;
129 let (stream, unclosed_delims) = maybe_file_to_stream(sess, source_file, None)?;
130 let mut parser = stream_to_parser(sess, stream, None);
131 parser.unclosed_delims = unclosed_delims;
132 if parser.token == token::Eof {
133 parser.token.span = Span::new(end_pos, end_pos, parser.token.span.ctxt());
141 /// Given a session and a path and an optional span (for error reporting),
142 /// add the path to the session's source_map and return the new source_file or
143 /// error when a file can't be read.
144 fn try_file_to_source_file(
147 spanopt: Option<Span>,
148 ) -> Result<Lrc<SourceFile>, Diagnostic> {
149 sess.source_map().load_file(path).map_err(|e| {
150 let msg = format!("couldn't read {}: {}", path.display(), e);
151 let mut diag = Diagnostic::new(Level::Fatal, &msg);
152 if let Some(sp) = spanopt {
159 /// Given a session and a path and an optional span (for error reporting),
160 /// adds the path to the session's `source_map` and returns the new `source_file`.
161 fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>) -> Lrc<SourceFile> {
162 match try_file_to_source_file(sess, path, spanopt) {
163 Ok(source_file) => source_file,
165 sess.span_diagnostic.emit_diagnostic(&d);
171 /// Given a `source_file`, produces a sequence of token trees.
172 pub fn source_file_to_stream(
174 source_file: Lrc<SourceFile>,
175 override_span: Option<Span>,
176 ) -> (TokenStream, Vec<lexer::UnmatchedBrace>) {
177 panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
180 /// Given a source file, produces a sequence of token trees. Returns any buffered errors from
181 /// parsing the token stream.
182 pub fn maybe_file_to_stream(
184 source_file: Lrc<SourceFile>,
185 override_span: Option<Span>,
186 ) -> Result<(TokenStream, Vec<lexer::UnmatchedBrace>), Vec<Diagnostic>> {
187 let src = source_file.src.as_ref().unwrap_or_else(|| {
189 .bug(&format!("cannot lex `source_file` without source: {}", source_file.name));
192 let (token_trees, unmatched_braces) =
193 lexer::parse_token_trees(sess, src.as_str(), source_file.start_pos, override_span);
196 Ok(stream) => Ok((stream, unmatched_braces)),
198 let mut buffer = Vec::with_capacity(1);
199 err.buffer(&mut buffer);
200 // Not using `emit_unclosed_delims` to use `db.buffer`
201 for unmatched in unmatched_braces {
202 if let Some(err) = make_unclosed_delims_error(unmatched, &sess) {
203 err.buffer(&mut buffer);
211 /// Given a stream and the `ParseSess`, produces a parser.
212 pub fn stream_to_parser<'a>(
215 subparser_name: Option<&'static str>,
217 Parser::new(sess, stream, false, subparser_name)
220 /// Runs the given subparser `f` on the tokens of the given `attr`'s item.
221 pub fn parse_in<'a, T>(
225 mut f: impl FnMut(&mut Parser<'a>) -> PResult<'a, T>,
226 ) -> PResult<'a, T> {
227 let mut parser = Parser::new(sess, tts, false, Some(name));
228 let result = f(&mut parser)?;
229 if parser.token != token::Eof {
230 parser.unexpected()?;
235 // NOTE(Centril): The following probably shouldn't be here but it acknowledges the
236 // fact that architecturally, we are using parsing (read on below to understand why).
238 pub fn nt_to_tokenstream(
241 synthesize_tokens: CanSynthesizeMissingTokens,
243 // A `Nonterminal` is often a parsed AST item. At this point we now
244 // need to convert the parsed AST to an actual token stream, e.g.
245 // un-parse it basically.
247 // Unfortunately there's not really a great way to do that in a
248 // guaranteed lossless fashion right now. The fallback here is to just
249 // stringify the AST node and reparse it, but this loses all span
252 // As a result, some AST nodes are annotated with the token stream they
253 // came from. Here we attempt to extract these lossless token streams
254 // before we fall back to the stringification.
257 |tokens: Option<&LazyTokenStream>| tokens.as_ref().map(|t| t.create_token_stream());
259 let tokens = match *nt {
260 Nonterminal::NtItem(ref item) => prepend_attrs(sess, &item.attrs, nt, item.tokens.as_ref()),
261 Nonterminal::NtBlock(ref block) => convert_tokens(block.tokens.as_ref()),
262 Nonterminal::NtStmt(ref stmt) => {
263 let do_prepend = |tokens| prepend_attrs(sess, stmt.attrs(), nt, tokens);
264 if let ast::StmtKind::Empty = stmt.kind {
265 let tokens: TokenStream =
266 tokenstream::TokenTree::token(token::Semi, stmt.span).into();
267 do_prepend(Some(&LazyTokenStream::new(tokens)))
269 do_prepend(stmt.tokens())
272 Nonterminal::NtPat(ref pat) => convert_tokens(pat.tokens.as_ref()),
273 Nonterminal::NtTy(ref ty) => convert_tokens(ty.tokens.as_ref()),
274 Nonterminal::NtIdent(ident, is_raw) => {
275 Some(tokenstream::TokenTree::token(token::Ident(ident.name, is_raw), ident.span).into())
277 Nonterminal::NtLifetime(ident) => {
278 Some(tokenstream::TokenTree::token(token::Lifetime(ident.name), ident.span).into())
280 Nonterminal::NtMeta(ref attr) => convert_tokens(attr.tokens.as_ref()),
281 Nonterminal::NtPath(ref path) => convert_tokens(path.tokens.as_ref()),
282 Nonterminal::NtVis(ref vis) => convert_tokens(vis.tokens.as_ref()),
283 Nonterminal::NtTT(ref tt) => Some(tt.clone().into()),
284 Nonterminal::NtExpr(ref expr) | Nonterminal::NtLiteral(ref expr) => {
285 if expr.tokens.is_none() {
286 debug!("missing tokens for expr {:?}", expr);
288 prepend_attrs(sess, &expr.attrs, nt, expr.tokens.as_ref())
292 if let Some(tokens) = tokens {
294 } else if matches!(synthesize_tokens, CanSynthesizeMissingTokens::Yes) {
295 return fake_token_stream(sess, nt);
297 panic!("Missing tokens for nt at {:?}: {:?}", nt.span(), pprust::nonterminal_to_string(nt));
301 pub fn fake_token_stream(sess: &ParseSess, nt: &Nonterminal) -> TokenStream {
302 let source = pprust::nonterminal_to_string(nt);
303 let filename = FileName::macro_expansion_source_code(&source);
304 parse_stream_from_source_str(filename, source, sess, Some(nt.span()))
309 attrs: &[ast::Attribute],
311 tokens: Option<&tokenstream::LazyTokenStream>,
312 ) -> Option<tokenstream::TokenStream> {
313 if attrs.is_empty() {
314 return Some(tokens?.create_token_stream());
316 let mut builder = tokenstream::TokenStreamBuilder::new();
318 // FIXME: Correctly handle tokens for inner attributes.
319 // For now, we fall back to reparsing the original AST node
320 if attr.style == ast::AttrStyle::Inner {
321 return Some(fake_token_stream(sess, nt));
323 builder.push(attr.tokens());
325 builder.push(tokens?.create_token_stream());
326 Some(builder.build())