X-Git-Url: https://git.lizzy.rs/?a=blobdiff_plain;f=src%2Flibsyntax%2Fparse%2Fmod.rs;h=9088f929372c999737844fbc147189a6dc34c9ef;hb=299d696b91e833f01f37e97b69767fcf6f5cccf0;hp=1aac8bbb7aa46589d9d07a3af0017a1757f838ef;hpb=a3cae5740cca893204c4d6d9fe43506f0eec8441;p=rust.git diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 1aac8bbb7aa..6a8a64f369d 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -8,21 +8,22 @@ use crate::parse::parser::emit_unclosed_delims; use crate::parse::token::TokenKind; use crate::tokenstream::{TokenStream, TokenTree}; -use crate::diagnostics::plugin::ErrorMap; use crate::print::pprust; use crate::symbol::Symbol; use errors::{Applicability, FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder}; +use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use rustc_data_structures::sync::{Lrc, Lock, Once}; use syntax_pos::{Span, SourceFile, FileName, MultiSpan}; use syntax_pos::edition::Edition; +use syntax_pos::hygiene::ExpnId; -use rustc_data_structures::fx::{FxHashSet, FxHashMap}; use std::borrow::Cow; use std::path::{Path, PathBuf}; use std::str; -pub type PResult<'a, T> = Result>; +#[cfg(test)] +mod tests; #[macro_use] pub mod parser; @@ -35,6 +36,22 @@ crate mod literal; crate mod unescape_error_reporting; +pub type PResult<'a, T> = Result>; + +/// Collected spans during parsing for places where a certain feature was +/// used and should be feature gated accordingly in `check_crate`. +#[derive(Default)] +pub struct GatedSpans { + /// Spans collected for gating `let_chains`, e.g. `if a && let b = c {}`. + pub let_chains: Lock>, + /// Spans collected for gating `async_closure`, e.g. `async || ..`. + pub async_closure: Lock>, + /// Spans collected for gating `yield e?` expressions (`generators` gate). + pub yields: Lock>, + /// Spans collected for gating `or_patterns`, e.g. `Some(Foo | Bar)`. + pub or_patterns: Lock>, +} + /// Info about a parsing session. pub struct ParseSess { pub span_diagnostic: Handler, @@ -44,8 +61,6 @@ pub struct ParseSess { pub missing_fragment_specifiers: Lock>, /// Places where raw identifiers were used. This is used for feature-gating raw identifiers. pub raw_identifier_spans: Lock>, - /// The registered diagnostics codes. - crate registered_diagnostics: Lock, /// Used to determine and report recursive module inclusions. included_mod_stack: Lock>, source_map: Lrc, @@ -54,41 +69,36 @@ pub struct ParseSess { /// operation token that followed it, but that the parser cannot identify without further /// analysis. pub ambiguous_block_expr_parse: Lock>, - pub param_attr_spans: Lock>, - // Places where `let` exprs were used and should be feature gated according to `let_chains`. - pub let_chains_spans: Lock>, - // Places where `async || ..` exprs were used and should be feature gated. - pub async_closure_spans: Lock>, pub injected_crate_name: Once, + pub gated_spans: GatedSpans, } impl ParseSess { pub fn new(file_path_mapping: FilePathMapping) -> Self { let cm = Lrc::new(SourceMap::new(file_path_mapping)); - let handler = Handler::with_tty_emitter(ColorConfig::Auto, - true, - None, - Some(cm.clone())); + let handler = Handler::with_tty_emitter( + ColorConfig::Auto, + true, + None, + Some(cm.clone()), + ); ParseSess::with_span_handler(handler, cm) } - pub fn with_span_handler(handler: Handler, source_map: Lrc) -> ParseSess { - ParseSess { + pub fn with_span_handler(handler: Handler, source_map: Lrc) -> Self { + Self { span_diagnostic: handler, unstable_features: UnstableFeatures::from_environment(), config: FxHashSet::default(), + edition: ExpnId::root().expn_data().edition, missing_fragment_specifiers: Lock::new(FxHashSet::default()), raw_identifier_spans: Lock::new(Vec::new()), - registered_diagnostics: Lock::new(ErrorMap::new()), included_mod_stack: Lock::new(vec![]), source_map, buffered_lints: Lock::new(vec![]), - edition: Edition::from_session(), ambiguous_block_expr_parse: Lock::new(FxHashMap::default()), - param_attr_spans: Lock::new(Vec::new()), - let_chains_spans: Lock::new(Vec::new()), - async_closure_spans: Lock::new(Vec::new()), injected_crate_name: Once::new(), + gated_spans: GatedSpans::default(), } } @@ -141,17 +151,17 @@ pub struct Directory<'a> { #[derive(Copy, Clone)] pub enum DirectoryOwnership { Owned { - // None if `mod.rs`, `Some("foo")` if we're in `foo.rs` + // None if `mod.rs`, `Some("foo")` if we're in `foo.rs`. relative: Option, }, UnownedViaBlock, UnownedViaMod(bool /* legacy warnings? */), } -// a bunch of utility functions of the form parse__from_ +// A bunch of utility functions of the form `parse__from_` // where includes crate, expr, item, stmt, tts, and one that // uses a HOF to parse anything, and includes file and -// source_str. +// `source_str`. pub fn parse_crate_from_file<'a>(input: &Path, sess: &'a ParseSess) -> PResult<'a, ast::Crate> { let mut parser = new_parser_from_file(sess, input); @@ -205,14 +215,13 @@ pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source Ok(parser) } -/// Creates a new parser, handling errors as appropriate -/// if the file doesn't exist +/// Creates a new parser, handling errors as appropriate if the file doesn't exist. pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> { source_file_to_parser(sess, file_to_source_file(sess, path, None)) } -/// Creates a new parser, returning buffered diagnostics if the file doesn't -/// exist or from lexing the initial token stream. +/// Creates a new parser, returning buffered diagnostics if the file doesn't exist, +/// or from lexing the initial token stream. pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Result, Vec> { let file = try_file_to_source_file(sess, path, None).map_err(|db| vec![db])?; @@ -220,8 +229,8 @@ pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) } /// Given a session, a crate config, a path, and a span, add -/// the file at the given path to the source_map, and return a parser. -/// On an error, use the given span as the source of the problem. +/// the file at the given path to the `source_map`, and returns a parser. +/// On an error, uses the given span as the source of the problem. pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, path: &Path, directory_ownership: DirectoryOwnership, @@ -233,13 +242,13 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess, p } -/// Given a source_file and config, return a parser +/// Given a `source_file` and config, returns a parser. fn source_file_to_parser(sess: &ParseSess, source_file: Lrc) -> Parser<'_> { panictry_buffer!(&sess.span_diagnostic, maybe_source_file_to_parser(sess, source_file)) } -/// Given a source_file and config, return a parser. Returns any buffered errors from lexing the +/// Given a `source_file` and config, return a parser. Returns any buffered errors from lexing the /// initial token stream. fn maybe_source_file_to_parser( sess: &ParseSess, @@ -256,14 +265,14 @@ fn maybe_source_file_to_parser( Ok(parser) } -// must preserve old name for now, because quote! from the *existing* -// compiler expands into it +// Must preserve old name for now, because `quote!` from the *existing* +// compiler expands into it. pub fn new_parser_from_tts(sess: &ParseSess, tts: Vec) -> Parser<'_> { stream_to_parser(sess, tts.into_iter().collect(), crate::MACRO_ARGUMENTS) } -// base abstractions +// Base abstractions /// Given a session and a path and an optional span (for error reporting), /// add the path to the session's source_map and return the new source_file or @@ -282,7 +291,7 @@ fn try_file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } /// Given a session and a path and an optional span (for error reporting), -/// add the path to the session's `source_map` and return the new `source_file`. +/// adds the path to the session's `source_map` and returns the new `source_file`. fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) -> Lrc { match try_file_to_source_file(sess, path, spanopt) { @@ -294,7 +303,7 @@ fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option) } } -/// Given a source_file, produces a sequence of token trees. +/// Given a `source_file`, produces a sequence of token trees. pub fn source_file_to_stream( sess: &ParseSess, source_file: Lrc, @@ -338,7 +347,7 @@ pub fn maybe_file_to_stream( } } -/// Given stream and the `ParseSess`, produces a parser. +/// Given a stream and the `ParseSess`, produces a parser. pub fn stream_to_parser<'a>( sess: &'a ParseSess, stream: TokenStream, @@ -347,7 +356,7 @@ pub fn stream_to_parser<'a>( Parser::new(sess, stream, None, true, false, subparser_name) } -/// Given stream, the `ParseSess` and the base directory, produces a parser. +/// Given a stream, the `ParseSess` and the base directory, produces a parser. /// /// Use this function when you are creating a parser from the token stream /// and also care about the current working directory of the parser (e.g., @@ -368,7 +377,7 @@ pub fn stream_to_parser_with_base_dir<'a>( /// A sequence separator. pub struct SeqSep { - /// The seperator token. + /// The separator token. pub sep: Option, /// `true` if a trailing separator is allowed. pub trailing_sep_allowed: bool, @@ -389,294 +398,3 @@ pub fn none() -> SeqSep { } } } - -#[cfg(test)] -mod tests { - use super::*; - use crate::ast::{self, Name, PatKind}; - use crate::attr::first_attr_value_str_by_name; - use crate::ptr::P; - use crate::parse::token::Token; - use crate::print::pprust::item_to_string; - use crate::symbol::{kw, sym}; - use crate::tokenstream::{DelimSpan, TokenTree}; - use crate::util::parser_testing::string_to_stream; - use crate::util::parser_testing::{string_to_expr, string_to_item}; - use crate::with_default_globals; - use syntax_pos::{Span, BytePos, Pos, NO_EXPANSION}; - - /// Parses an item. - /// - /// Returns `Ok(Some(item))` when successful, `Ok(None)` when no item was found, and `Err` - /// when a syntax error occurred. - fn parse_item_from_source_str(name: FileName, source: String, sess: &ParseSess) - -> PResult<'_, Option>> { - new_parser_from_source_str(sess, name, source).parse_item() - } - - // produce a syntax_pos::span - fn sp(a: u32, b: u32) -> Span { - Span::new(BytePos(a), BytePos(b), NO_EXPANSION) - } - - #[should_panic] - #[test] fn bad_path_expr_1() { - with_default_globals(|| { - string_to_expr("::abc::def::return".to_string()); - }) - } - - // check the token-tree-ization of macros - #[test] - fn string_to_tts_macro () { - with_default_globals(|| { - let tts: Vec<_> = - string_to_stream("macro_rules! zip (($a)=>($a))".to_string()).trees().collect(); - let tts: &[TokenTree] = &tts[..]; - - match tts { - [ - TokenTree::Token(Token { kind: token::Ident(name_macro_rules, false), .. }), - TokenTree::Token(Token { kind: token::Not, .. }), - TokenTree::Token(Token { kind: token::Ident(name_zip, false), .. }), - TokenTree::Delimited(_, macro_delim, macro_tts) - ] - if name_macro_rules == &sym::macro_rules && name_zip.as_str() == "zip" => { - let tts = ¯o_tts.trees().collect::>(); - match &tts[..] { - [ - TokenTree::Delimited(_, first_delim, first_tts), - TokenTree::Token(Token { kind: token::FatArrow, .. }), - TokenTree::Delimited(_, second_delim, second_tts), - ] - if macro_delim == &token::Paren => { - let tts = &first_tts.trees().collect::>(); - match &tts[..] { - [ - TokenTree::Token(Token { kind: token::Dollar, .. }), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }), - ] - if first_delim == &token::Paren && name.as_str() == "a" => {}, - _ => panic!("value 3: {:?} {:?}", first_delim, first_tts), - } - let tts = &second_tts.trees().collect::>(); - match &tts[..] { - [ - TokenTree::Token(Token { kind: token::Dollar, .. }), - TokenTree::Token(Token { kind: token::Ident(name, false), .. }), - ] - if second_delim == &token::Paren && name.as_str() == "a" => {}, - _ => panic!("value 4: {:?} {:?}", second_delim, second_tts), - } - }, - _ => panic!("value 2: {:?} {:?}", macro_delim, macro_tts), - } - }, - _ => panic!("value: {:?}",tts), - } - }) - } - - #[test] - fn string_to_tts_1() { - with_default_globals(|| { - let tts = string_to_stream("fn a (b : i32) { b; }".to_string()); - - let expected = TokenStream::new(vec![ - TokenTree::token(token::Ident(kw::Fn, false), sp(0, 2)).into(), - TokenTree::token(token::Ident(Name::intern("a"), false), sp(3, 4)).into(), - TokenTree::Delimited( - DelimSpan::from_pair(sp(5, 6), sp(13, 14)), - token::DelimToken::Paren, - TokenStream::new(vec![ - TokenTree::token(token::Ident(Name::intern("b"), false), sp(6, 7)).into(), - TokenTree::token(token::Colon, sp(8, 9)).into(), - TokenTree::token(token::Ident(sym::i32, false), sp(10, 13)).into(), - ]).into(), - ).into(), - TokenTree::Delimited( - DelimSpan::from_pair(sp(15, 16), sp(20, 21)), - token::DelimToken::Brace, - TokenStream::new(vec![ - TokenTree::token(token::Ident(Name::intern("b"), false), sp(17, 18)).into(), - TokenTree::token(token::Semi, sp(18, 19)).into(), - ]).into(), - ).into() - ]); - - assert_eq!(tts, expected); - }) - } - - #[test] fn parse_use() { - with_default_globals(|| { - let use_s = "use foo::bar::baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - - let use_s = "use foo::bar as baz;"; - let vitem = string_to_item(use_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], use_s); - }) - } - - #[test] fn parse_extern_crate() { - with_default_globals(|| { - let ex_s = "extern crate foo;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - - let ex_s = "extern crate foo as bar;"; - let vitem = string_to_item(ex_s.to_string()).unwrap(); - let vitem_s = item_to_string(&vitem); - assert_eq!(&vitem_s[..], ex_s); - }) - } - - fn get_spans_of_pat_idents(src: &str) -> Vec { - let item = string_to_item(src.to_string()).unwrap(); - - struct PatIdentVisitor { - spans: Vec - } - impl<'a> crate::visit::Visitor<'a> for PatIdentVisitor { - fn visit_pat(&mut self, p: &'a ast::Pat) { - match p.node { - PatKind::Ident(_ , ref spannedident, _) => { - self.spans.push(spannedident.span.clone()); - } - _ => { - crate::visit::walk_pat(self, p); - } - } - } - } - let mut v = PatIdentVisitor { spans: Vec::new() }; - crate::visit::walk_item(&mut v, &item); - return v.spans; - } - - #[test] fn span_of_self_arg_pat_idents_are_correct() { - with_default_globals(|| { - - let srcs = ["impl z { fn a (&self, &myarg: i32) {} }", - "impl z { fn a (&mut self, &myarg: i32) {} }", - "impl z { fn a (&'a self, &myarg: i32) {} }", - "impl z { fn a (self, &myarg: i32) {} }", - "impl z { fn a (self: Foo, &myarg: i32) {} }", - ]; - - for &src in &srcs { - let spans = get_spans_of_pat_idents(src); - let (lo, hi) = (spans[0].lo(), spans[0].hi()); - assert!("self" == &src[lo.to_usize()..hi.to_usize()], - "\"{}\" != \"self\". src=\"{}\"", - &src[lo.to_usize()..hi.to_usize()], src) - } - }) - } - - #[test] fn parse_exprs () { - with_default_globals(|| { - // just make sure that they parse.... - string_to_expr("3 + 4".to_string()); - string_to_expr("a::z.froob(b,&(987+3))".to_string()); - }) - } - - #[test] fn attrs_fix_bug () { - with_default_globals(|| { - string_to_item("pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) - -> Result, String> { - #[cfg(windows)] - fn wb() -> c_int { - (O_WRONLY | libc::consts::os::extra::O_BINARY) as c_int - } - - #[cfg(unix)] - fn wb() -> c_int { O_WRONLY as c_int } - - let mut fflags: c_int = wb(); -}".to_string()); - }) - } - - #[test] fn crlf_doc_comments() { - with_default_globals(|| { - let sess = ParseSess::new(FilePathMapping::empty()); - - let name_1 = FileName::Custom("crlf_source_1".to_string()); - let source = "/// doc comment\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_1, source, &sess) - .unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap(); - assert_eq!(doc.as_str(), "/// doc comment"); - - let name_2 = FileName::Custom("crlf_source_2".to_string()); - let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_2, source, &sess) - .unwrap().unwrap(); - let docs = item.attrs.iter().filter(|a| a.path == sym::doc) - .map(|a| a.value_str().unwrap().to_string()).collect::>(); - let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()]; - assert_eq!(&docs[..], b); - - let name_3 = FileName::Custom("clrf_source_3".to_string()); - let source = "/** doc comment\r\n * with CRLF */\r\nfn foo() {}".to_string(); - let item = parse_item_from_source_str(name_3, source, &sess).unwrap().unwrap(); - let doc = first_attr_value_str_by_name(&item.attrs, sym::doc).unwrap(); - assert_eq!(doc.as_str(), "/** doc comment\n * with CRLF */"); - }); - } - - #[test] - fn ttdelim_span() { - fn parse_expr_from_source_str( - name: FileName, source: String, sess: &ParseSess - ) -> PResult<'_, P> { - new_parser_from_source_str(sess, name, source).parse_expr() - } - - with_default_globals(|| { - let sess = ParseSess::new(FilePathMapping::empty()); - let expr = parse_expr_from_source_str(PathBuf::from("foo").into(), - "foo!( fn main() { body } )".to_string(), &sess).unwrap(); - - let tts: Vec<_> = match expr.node { - ast::ExprKind::Mac(ref mac) => mac.node.stream().trees().collect(), - _ => panic!("not a macro"), - }; - - let span = tts.iter().rev().next().unwrap().span(); - - match sess.source_map().span_to_snippet(span) { - Ok(s) => assert_eq!(&s[..], "{ body }"), - Err(_) => panic!("could not get snippet"), - } - }); - } - - // This tests that when parsing a string (rather than a file) we don't try - // and read in a file for a module declaration and just parse a stub. - // See `recurse_into_file_modules` in the parser. - #[test] - fn out_of_line_mod() { - with_default_globals(|| { - let sess = ParseSess::new(FilePathMapping::empty()); - let item = parse_item_from_source_str( - PathBuf::from("foo").into(), - "mod foo { struct S; mod this_does_not_exist; }".to_owned(), - &sess, - ).unwrap().unwrap(); - - if let ast::ItemKind::Mod(ref m) = item.node { - assert!(m.items.len() == 2); - } else { - panic!(); - } - }); - } -}