use ext::tt::macro_parser::{parse, parse_failure_msg};
use ext::tt::quoted;
use ext::tt::transcribe::transcribe;
+use feature_gate::{self, emit_feature_err, Features, GateIssue};
use parse::{Directory, ParseSess};
use parse::parser::Parser;
use parse::token::{self, NtTT};
use symbol::Symbol;
use tokenstream::{TokenStream, TokenTree};
+use std::cell::RefCell;
use std::collections::{HashMap};
use std::collections::hash_map::{Entry};
use std::rc::Rc;
// Holy self-referential!
/// Converts a `macro_rules!` invocation into a syntax extension.
-pub fn compile(sess: &ParseSess, def: &ast::Item) -> SyntaxExtension {
+pub fn compile(sess: &ParseSess, features: &RefCell<Features>, def: &ast::Item) -> SyntaxExtension {
let lhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("lhs"));
let rhs_nm = ast::Ident::with_empty_ctxt(Symbol::gensym("rhs"));
if let MatchedNonterminal(ref nt) = **m {
if let NtTT(ref tt) = **nt {
let tt = quoted::parse(tt.clone().into(), true, sess).pop().unwrap();
- valid &= check_lhs_nt_follows(sess, &tt);
+ valid &= check_lhs_nt_follows(sess, features, &tt);
return tt;
}
}
NormalTT(exp, Some(def.span), attr::contains_name(&def.attrs, "allow_internal_unstable"))
}
-fn check_lhs_nt_follows(sess: &ParseSess, lhs: "ed::TokenTree) -> bool {
+fn check_lhs_nt_follows(sess: &ParseSess,
+ features: &RefCell<Features>,
+ lhs: "ed::TokenTree) -> bool {
// lhs is going to be like TokenTree::Delimited(...), where the
// entire lhs is those tts. Or, it can be a "bare sequence", not wrapped in parens.
match lhs {
- "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, &tts.tts),
+ "ed::TokenTree::Delimited(_, ref tts) => check_matcher(sess, features, &tts.tts),
_ => {
let msg = "invalid macro matcher; matchers must be contained in balanced delimiters";
sess.span_diagnostic.span_err(lhs.span(), msg);
false
}
-fn check_matcher(sess: &ParseSess, matcher: &[quoted::TokenTree]) -> bool {
+fn check_matcher(sess: &ParseSess,
+ features: &RefCell<Features>,
+ matcher: &[quoted::TokenTree]) -> bool {
let first_sets = FirstSets::new(matcher);
let empty_suffix = TokenSet::empty();
let err = sess.span_diagnostic.err_count();
- check_matcher_core(sess, &first_sets, matcher, &empty_suffix);
+ check_matcher_core(sess, features, &first_sets, matcher, &empty_suffix);
err == sess.span_diagnostic.err_count()
}
// Requires that `first_sets` is pre-computed for `matcher`;
// see `FirstSets::new`.
fn check_matcher_core(sess: &ParseSess,
+ features: &RefCell<Features>,
first_sets: &FirstSets,
matcher: &[quoted::TokenTree],
follow: &TokenSet) -> TokenSet {
match *token {
TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
let can_be_followed_by_any;
- if let Err(bad_frag) = has_legal_fragment_specifier(token) {
+ if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, token) {
let msg = format!("invalid fragment specifier `{}`", bad_frag);
sess.span_diagnostic.struct_span_err(token.span(), &msg)
- .help("valid fragment specifiers are `ident`, `block`, \
- `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
- and `item`")
+ .help("valid fragment specifiers are `ident`, `block`, `stmt`, `expr`, \
+ `pat`, `ty`, `path`, `meta`, `tt`, `item` and `vis`")
.emit();
// (This eliminates false positives and duplicates
// from error messages.)
}
TokenTree::Delimited(span, ref d) => {
let my_suffix = TokenSet::singleton(d.close_tt(span));
- check_matcher_core(sess, first_sets, &d.tts, &my_suffix);
+ check_matcher_core(sess, features, first_sets, &d.tts, &my_suffix);
// don't track non NT tokens
last.replace_with_irrelevant();
// At this point, `suffix_first` is built, and
// `my_suffix` is some TokenSet that we can use
// for checking the interior of `seq_rep`.
- let next = check_matcher_core(sess, first_sets, &seq_rep.tts, my_suffix);
+ let next = check_matcher_core(sess, features, first_sets, &seq_rep.tts, my_suffix);
if next.maybe_empty {
last.add_all(&next);
} else {
"" => Ok(true), // keywords::Invalid
_ => Err((format!("invalid fragment specifier `{}`", frag),
"valid fragment specifiers are `ident`, `block`, \
- `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt` \
- and `item`"))
+ `stmt`, `expr`, `pat`, `ty`, `path`, `meta`, `tt`, \
+ `item` and `vis`"))
}
}
}
-fn has_legal_fragment_specifier(tok: "ed::TokenTree) -> Result<(), String> {
+fn has_legal_fragment_specifier(sess: &ParseSess,
+ features: &RefCell<Features>,
+ tok: "ed::TokenTree) -> Result<(), String> {
debug!("has_legal_fragment_specifier({:?})", tok);
- if let quoted::TokenTree::MetaVarDecl(_, _, frag_spec) = *tok {
- let s = &frag_spec.name.as_str();
- if !is_legal_fragment_specifier(s) {
- return Err(s.to_string());
+ if let quoted::TokenTree::MetaVarDecl(_, _, ref frag_spec) = *tok {
+ let frag_name = frag_spec.name.as_str();
+ let frag_span = tok.span();
+ if !is_legal_fragment_specifier(sess, features, &frag_name, frag_span) {
+ return Err(frag_name.to_string());
}
}
Ok(())
}
-fn is_legal_fragment_specifier(frag: &str) -> bool {
- match frag {
+fn is_legal_fragment_specifier(sess: &ParseSess,
+ features: &RefCell<Features>,
+ frag_name: &str,
+ frag_span: Span) -> bool {
+ match frag_name {
"item" | "block" | "stmt" | "expr" | "pat" |
- "path" | "ty" | "ident" | "meta" | "tt" | "vis" | "" => true,
+ "path" | "ty" | "ident" | "meta" | "tt" | "" => true,
+ "vis" => {
+ if !features.borrow().macro_vis_matcher {
+ let explain = feature_gate::EXPLAIN_VIS_MATCHER;
+ emit_feature_err(sess,
+ "macro_vis_matcher",
+ frag_span,
+ GateIssue::Language,
+ explain);
+ }
+ true
+ },
_ => false,
}
}