]> git.lizzy.rs Git - rust.git/commitdiff
Simplify `hygiene::Mark` application, and
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Tue, 28 Mar 2017 05:32:43 +0000 (05:32 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Mon, 26 Jun 2017 02:05:45 +0000 (02:05 +0000)
remove variant `Token::SubstNt` in favor of `quoted::TokenTree::MetaVar`.

26 files changed:
src/libproc_macro/lib.rs
src/librustc/ich/impls_syntax.rs
src/librustc_metadata/cstore_impl.rs
src/librustdoc/html/highlight.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/expand.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/quoted.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax/print/pprust.rs
src/libsyntax_ext/concat_idents.rs
src/libsyntax_ext/deriving/custom.rs
src/libsyntax_ext/format.rs
src/libsyntax_ext/proc_macro_impl.rs
src/libsyntax_pos/hygiene.rs
src/libsyntax_pos/lib.rs
src/test/compile-fail/asm-out-assign-imm.rs
src/test/compile-fail/macro-context.rs
src/test/ui/token/macro-incomplete-parse.rs

index f3d0521a2af6cc3811fc4c026faa37ceaf457c44..4744baf1b42feb1da217212e17146b80440c5091 100644 (file)
@@ -87,6 +87,8 @@ pub mod __internal {
     use std::rc::Rc;
 
     use syntax::ast;
+    use syntax::ext::base::ExtCtxt;
+    use syntax::ext::hygiene::Mark;
     use syntax::ptr::P;
     use syntax::parse::{self, token, ParseSess};
     use syntax::tokenstream::{TokenTree, TokenStream as TokenStream_};
@@ -107,7 +109,7 @@ pub fn token_stream_wrap(inner: TokenStream_) -> TokenStream {
     }
 
     pub fn token_stream_parse_items(stream: TokenStream) -> Result<Vec<P<ast::Item>>, LexError> {
-        with_parse_sess(move |sess| {
+        with_sess(move |(sess, _)| {
             let mut parser = parse::stream_to_parser(sess, stream.inner);
             let mut items = Vec::new();
 
@@ -140,13 +142,14 @@ fn register_bang_proc_macro(&mut self,
 
     // Emulate scoped_thread_local!() here essentially
     thread_local! {
-        static CURRENT_SESS: Cell<*const ParseSess> = Cell::new(0 as *const _);
+        static CURRENT_SESS: Cell<(*const ParseSess, Mark)> =
+            Cell::new((0 as *const _, Mark::root()));
     }
 
-    pub fn set_parse_sess<F, R>(sess: &ParseSess, f: F) -> R
+    pub fn set_sess<F, R>(cx: &ExtCtxt, f: F) -> R
         where F: FnOnce() -> R
     {
-        struct Reset { prev: *const ParseSess }
+        struct Reset { prev: (*const ParseSess, Mark) }
 
         impl Drop for Reset {
             fn drop(&mut self) {
@@ -156,18 +159,18 @@ fn drop(&mut self) {
 
         CURRENT_SESS.with(|p| {
             let _reset = Reset { prev: p.get() };
-            p.set(sess);
+            p.set((cx.parse_sess, cx.current_expansion.mark));
             f()
         })
     }
 
-    pub fn with_parse_sess<F, R>(f: F) -> R
-        where F: FnOnce(&ParseSess) -> R
+    pub fn with_sess<F, R>(f: F) -> R
+        where F: FnOnce((&ParseSess, Mark)) -> R
     {
         let p = CURRENT_SESS.with(|p| p.get());
-        assert!(!p.is_null(), "proc_macro::__internal::with_parse_sess() called \
-                               before set_parse_sess()!");
-        f(unsafe { &*p })
+        assert!(!p.0.is_null(), "proc_macro::__internal::with_sess() called \
+                                 before set_parse_sess()!");
+        f(unsafe { (&*p.0, p.1) })
     }
 }
 
@@ -181,10 +184,11 @@ impl FromStr for TokenStream {
     type Err = LexError;
 
     fn from_str(src: &str) -> Result<TokenStream, LexError> {
-        __internal::with_parse_sess(|sess| {
+        __internal::with_sess(|(sess, mark)| {
             let src = src.to_string();
             let name = "<proc-macro source code>".to_string();
-            let stream = parse::parse_stream_from_source_str(name, src, sess);
+            let call_site = mark.expn_info().unwrap().call_site;
+            let stream = parse::parse_stream_from_source_str(name, src, sess, Some(call_site));
             Ok(__internal::token_stream_wrap(stream))
         })
     }
index b9cc3b5fb937fe1927ecba066a1e2d5f4940b3dd..b827284271ed2235925313eadd31a1b662332fd8 100644 (file)
@@ -283,8 +283,7 @@ fn hash_token<'a, 'gcx, 'tcx, W: StableHasherResult>(token: &token::Token,
         }
 
         token::Token::Ident(ident) |
-        token::Token::Lifetime(ident) |
-        token::Token::SubstNt(ident) => ident.name.hash_stable(hcx, hasher),
+        token::Token::Lifetime(ident) => ident.name.hash_stable(hcx, hasher),
 
         token::Token::Interpolated(ref non_terminal) => {
             // FIXME(mw): This could be implemented properly. It's just a
index c49712086d52c2aff46e5483d7629f275535cbce..0649553e382e344621cc699773634ff111a98aac 100644 (file)
@@ -372,7 +372,7 @@ fn load_macro(&self, id: DefId, sess: &Session) -> LoadedMacro {
 
         let filemap = sess.parse_sess.codemap().new_filemap(source_name, def.body);
         let local_span = Span { lo: filemap.start_pos, hi: filemap.end_pos, ctxt: NO_EXPANSION };
-        let body = filemap_to_stream(&sess.parse_sess, filemap);
+        let body = filemap_to_stream(&sess.parse_sess, filemap, None);
 
         // Mark the attrs as used
         let attrs = data.get_item_attrs(id.index, &self.dep_graph);
index a40d1e6bdc91760c5062f5defbf6b2bf42c7b1e3..1f8c88d8ecf96d972623bcc721e533e59ceadada 100644 (file)
@@ -319,7 +319,7 @@ fn write_token<W: Writer>(&mut self,
             token::Lifetime(..) => Class::Lifetime,
 
             token::Underscore | token::Eof | token::Interpolated(..) |
-            token::SubstNt(..) | token::Tilde | token::At => Class::None,
+            token::Tilde | token::At => Class::None,
         };
 
         // Anything that didn't return above is the simple case where we the
index 8089fad5f36d880ecfb5b6d6f0a020e0f2af0b29..af5eabf06f87b99211b5f1d63a16d5ef5c08c3d3 100644 (file)
@@ -903,17 +903,3 @@ pub fn get_exprs_from_tts(cx: &mut ExtCtxt,
     }
     Some(es)
 }
-
-pub struct ChangeSpan {
-    pub span: Span
-}
-
-impl Folder for ChangeSpan {
-    fn new_span(&mut self, _sp: Span) -> Span {
-        self.span
-    }
-
-    fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
-        fold::noop_fold_mac(mac, self)
-    }
-}
index f8a26287bd47bf2051a7c716470b6f4f9feea30e..11efef45499766d4c4262f9cc7f44326218244d4 100644 (file)
@@ -16,7 +16,7 @@
 use errors::FatalError;
 use ext::base::*;
 use ext::derive::{add_derived_markers, collect_derives};
-use ext::hygiene::Mark;
+use ext::hygiene::{Mark, SyntaxContext};
 use ext::placeholders::{placeholder, PlaceholderExpander};
 use feature_gate::{self, Features, is_builtin_attr};
 use fold;
@@ -470,7 +470,6 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
             Ok(())
         };
 
-        let marked_tts = noop_fold_tts(mac.node.stream(), &mut Marker(mark));
         let opt_expanded = match *ext {
             SyntaxExtension::DeclMacro(ref expand, def_span) => {
                 if let Err(msg) = validate_and_set_expn_info(def_span.map(|(_, s)| s),
@@ -478,7 +477,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     self.cx.span_err(path.span, &msg);
                     return kind.dummy(span);
                 }
-                kind.make_from(expand.expand(self.cx, span, marked_tts))
+                kind.make_from(expand.expand(self.cx, span, mac.node.stream()))
             }
 
             NormalTT(ref expandfun, def_info, allow_internal_unstable) => {
@@ -487,7 +486,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     self.cx.span_err(path.span, &msg);
                     return kind.dummy(span);
                 }
-                kind.make_from(expandfun.expand(self.cx, span, marked_tts))
+                kind.make_from(expandfun.expand(self.cx, span, mac.node.stream()))
             }
 
             IdentTT(ref expander, tt_span, allow_internal_unstable) => {
@@ -506,7 +505,7 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     }
                 });
 
-                let input: Vec<_> = marked_tts.into_trees().collect();
+                let input: Vec<_> = mac.node.stream().into_trees().collect();
                 kind.make_from(expander.expand(self.cx, span, ident, input))
             }
 
@@ -541,21 +540,17 @@ fn expand_bang_invoc(&mut self, invoc: Invocation, ext: Rc<SyntaxExtension>) ->
                     },
                 });
 
-                let tok_result = expandfun.expand(self.cx, span, marked_tts);
+                let tok_result = expandfun.expand(self.cx, span, mac.node.stream());
                 Some(self.parse_expansion(tok_result, kind, path, span))
             }
         };
 
-        let expanded = if let Some(expanded) = opt_expanded {
-            expanded
-        } else {
+        unwrap_or!(opt_expanded, {
             let msg = format!("non-{kind} macro in {kind} position: {name}",
                               name = path.segments[0].identifier.name, kind = kind.name());
             self.cx.span_err(path.span, &msg);
-            return kind.dummy(span);
-        };
-
-        expanded.fold_with(&mut Marker(mark))
+            kind.dummy(span)
+        })
     }
 
     /// Expand a derive invocation. Returns the result of expansion.
@@ -621,8 +616,7 @@ fn parse_expansion(&mut self, toks: TokenStream, kind: ExpansionKind, path: &Pat
             }
         };
         parser.ensure_complete_parse(path, kind.name(), span);
-        // FIXME better span info
-        expansion.fold_with(&mut ChangeSpan { span: span })
+        expansion
     }
 }
 
@@ -673,7 +667,9 @@ pub fn ensure_complete_parse(&mut self, macro_path: &Path, kind_name: &str, span
         if self.token != token::Eof {
             let msg = format!("macro expansion ignores token `{}` and any following",
                               self.this_token_to_string());
-            let mut err = self.diagnostic().struct_span_err(self.span, &msg);
+            let mut def_site_span = self.span;
+            def_site_span.ctxt = SyntaxContext::empty(); // Avoid emitting backtrace info twice.
+            let mut err = self.diagnostic().struct_span_err(def_site_span, &msg);
             let msg = format!("caused by the macro expansion here; the usage \
                                of `{}!` is likely invalid in {} context",
                                macro_path, kind_name);
@@ -787,12 +783,12 @@ fn stream_for_item(item: &Annotatable, parse_sess: &ParseSess) -> TokenStream {
         Annotatable::TraitItem(ref ti) => pprust::trait_item_to_string(ti),
         Annotatable::ImplItem(ref ii) => pprust::impl_item_to_string(ii),
     };
-    string_to_stream(text, parse_sess)
+    string_to_stream(text, parse_sess, item.span())
 }
 
-fn string_to_stream(text: String, parse_sess: &ParseSess) -> TokenStream {
+fn string_to_stream(text: String, parse_sess: &ParseSess, span: Span) -> TokenStream {
     let filename = String::from("<macro expansion>");
-    filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text))
+    filemap_to_stream(parse_sess, parse_sess.codemap().new_filemap(filename, text), Some(span))
 }
 
 impl<'a, 'b> Folder for InvocationCollector<'a, 'b> {
@@ -1070,7 +1066,7 @@ fn enable_compile_error = compile_error,
 }
 
 // A Marker adds the given mark to the syntax context.
-struct Marker(Mark);
+pub struct Marker(pub Mark);
 
 impl Folder for Marker {
     fn fold_ident(&mut self, mut ident: Ident) -> Ident {
index f8fac847a053ef63c68e00b8ca7bcb7d4c4f2ebd..314a97496f8cc1a327e829f055630db0f5a71a61 100644 (file)
@@ -364,7 +364,7 @@ fn parse_expr(&self, s: String) -> P<ast::Expr> {
 
         fn parse_tts(&self, s: String) -> Vec<TokenTree> {
             let source_name = "<quote expansion>".to_owned();
-            parse::parse_stream_from_source_str(source_name, s, self.parse_sess())
+            parse::parse_stream_from_source_str(source_name, s, self.parse_sess(), None)
                 .into_trees().collect()
         }
     }
@@ -700,7 +700,7 @@ macro_rules! mk_lit {
         token::Underscore   => "Underscore",
         token::Eof          => "Eof",
 
-        token::Whitespace | token::SubstNt(_) | token::Comment | token::Shebang(_) => {
+        token::Whitespace | token::Comment | token::Shebang(_) => {
             panic!("unhandled token in quote!");
         }
     };
index 61d8fc2941afb71a425abc1ddd3ee01461d9ebe6..e877f1fedd40980c08f1f921cc78e95d80fe7a31 100644 (file)
@@ -158,15 +158,10 @@ fn push_match(&mut self, idx: usize, m: NamedMatch) {
 pub fn count_names(ms: &[TokenTree]) -> usize {
     ms.iter().fold(0, |count, elt| {
         count + match *elt {
-            TokenTree::Sequence(_, ref seq) => {
-                seq.num_captures
-            }
-            TokenTree::Delimited(_, ref delim) => {
-                count_names(&delim.tts)
-            }
-            TokenTree::MetaVarDecl(..) => {
-                1
-            }
+            TokenTree::Sequence(_, ref seq) => seq.num_captures,
+            TokenTree::Delimited(_, ref delim) => count_names(&delim.tts),
+            TokenTree::MetaVar(..) => 0,
+            TokenTree::MetaVarDecl(..) => 1,
             TokenTree::Token(..) => 0,
         }
     })
@@ -244,7 +239,7 @@ fn n_rec<I: Iterator<Item=NamedMatch>>(sess: &ParseSess, m: &TokenTree, mut res:
                     }
                 }
             }
-            TokenTree::Token(..) => (),
+            TokenTree::MetaVar(..) | TokenTree::Token(..) => (),
         }
 
         Ok(())
@@ -409,12 +404,11 @@ fn inner_parse_loop(sess: &ParseSess,
                     ei.idx = 0;
                     cur_eis.push(ei);
                 }
-                TokenTree::Token(_, ref t) => {
-                    if token_name_eq(t, token) {
-                        ei.idx += 1;
-                        next_eis.push(ei);
-                    }
+                TokenTree::Token(_, ref t) if token_name_eq(t, token) => {
+                    ei.idx += 1;
+                    next_eis.push(ei);
                 }
+                TokenTree::Token(..) | TokenTree::MetaVar(..) => {}
             }
         }
     }
index 9c728c9f2ebf0cd7146bacb68b3dbc8d9ce048a2..b732f47ce6a93da34d7fdbb87e2169e5d9a376fc 100644 (file)
@@ -120,7 +120,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                     _ => cx.span_bug(sp, "malformed macro rhs"),
                 };
                 // rhs has holes ( `$id` and `$(...)` that need filled)
-                let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
+                let tts = transcribe(cx, Some(named_matches), rhs);
 
                 if cx.trace_macros() {
                     trace_macros_note(cx, sp, format!("to `{}`", tts));
@@ -292,7 +292,7 @@ fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
     use self::quoted::TokenTree;
     for tt in tts {
         match *tt {
-            TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => (),
+            TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => (),
             TokenTree::Delimited(_, ref del) => if !check_lhs_no_empty_seq(sess, &del.tts) {
                 return false;
             },
@@ -372,7 +372,7 @@ fn build_recur(sets: &mut FirstSets, tts: &[TokenTree]) -> TokenSet {
             let mut first = TokenSet::empty();
             for tt in tts.iter().rev() {
                 match *tt {
-                    TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
+                    TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
                         first.replace_with(tt.clone());
                     }
                     TokenTree::Delimited(span, ref delimited) => {
@@ -432,7 +432,7 @@ fn first(&self, tts: &[quoted::TokenTree]) -> TokenSet {
         for tt in tts.iter() {
             assert!(first.maybe_empty);
             match *tt {
-                TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
+                TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
                     first.add_one(tt.clone());
                     return first;
                 }
@@ -602,7 +602,7 @@ fn check_matcher_core(sess: &ParseSess,
         // First, update `last` so that it corresponds to the set
         // of NT tokens that might end the sequence `... token`.
         match *token {
-            TokenTree::Token(..) | TokenTree::MetaVarDecl(..) => {
+            TokenTree::Token(..) | TokenTree::MetaVar(..) | TokenTree::MetaVarDecl(..) => {
                 let can_be_followed_by_any;
                 if let Err(bad_frag) = has_legal_fragment_specifier(sess, features, token) {
                     let msg = format!("invalid fragment specifier `{}`", bad_frag);
@@ -872,6 +872,7 @@ fn is_legal_fragment_specifier(sess: &ParseSess,
 fn quoted_tt_to_string(tt: &quoted::TokenTree) -> String {
     match *tt {
         quoted::TokenTree::Token(_, ref tok) => ::print::pprust::token_to_string(tok),
+        quoted::TokenTree::MetaVar(_, name) => format!("${}", name),
         quoted::TokenTree::MetaVarDecl(_, name, kind) => format!("${}:{}", name, kind),
         _ => panic!("unexpected quoted::TokenTree::{{Sequence or Delimited}} \
                      in follow set checker"),
index fa65e9501c2bb793aa34c64956aaff717bb4227d..18056f6028745ba7783e8e62c4ba78d11d89d9f6 100644 (file)
@@ -78,9 +78,11 @@ pub enum KleeneOp {
 pub enum TokenTree {
     Token(Span, token::Token),
     Delimited(Span, Rc<Delimited>),
-    /// A kleene-style repetition sequence with a span
+    /// A kleene-style repetition sequence
     Sequence(Span, Rc<SequenceRepetition>),
-    /// Matches a nonterminal. This is only used in the left hand side of MBE macros.
+    /// E.g. `$var`
+    MetaVar(Span, ast::Ident),
+    /// E.g. `$var:expr`. This is only used in the left hand side of MBE macros.
     MetaVarDecl(Span, ast::Ident /* name to bind */, ast::Ident /* kind of nonterminal */),
 }
 
@@ -130,6 +132,7 @@ pub fn get_tt(&self, index: usize) -> TokenTree {
     pub fn span(&self) -> Span {
         match *self {
             TokenTree::Token(sp, _) |
+            TokenTree::MetaVar(sp, _) |
             TokenTree::MetaVarDecl(sp, _, _) |
             TokenTree::Delimited(sp, _) |
             TokenTree::Sequence(sp, _) => sp,
@@ -144,7 +147,7 @@ pub fn parse(input: tokenstream::TokenStream, expect_matchers: bool, sess: &Pars
     while let Some(tree) = trees.next() {
         let tree = parse_tree(tree, &mut trees, expect_matchers, sess);
         match tree {
-            TokenTree::Token(start_sp, token::SubstNt(ident)) if expect_matchers => {
+            TokenTree::MetaVar(start_sp, ident) if expect_matchers => {
                 let span = match trees.next() {
                     Some(tokenstream::TokenTree::Token(span, token::Colon)) => match trees.next() {
                         Some(tokenstream::TokenTree::Token(end_sp, ref tok)) => match tok.ident() {
@@ -199,13 +202,13 @@ fn parse_tree<I>(tree: tokenstream::TokenTree,
                     let ident = ast::Ident { name: Symbol::intern("$crate"), ..ident };
                     TokenTree::Token(span, token::Ident(ident))
                 } else {
-                    TokenTree::Token(span, token::SubstNt(ident))
+                    TokenTree::MetaVar(span, ident)
                 }
             }
             Some(tokenstream::TokenTree::Token(span, tok)) => {
                 let msg = format!("expected identifier, found `{}`", pprust::token_to_string(&tok));
                 sess.span_diagnostic.span_err(span, &msg);
-                TokenTree::Token(span, token::SubstNt(keywords::Invalid.ident()))
+                TokenTree::MetaVar(span, keywords::Invalid.ident())
             }
             None => TokenTree::Token(span, token::Dollar),
         },
index 78e755e73fa30ace18a116c9642979c94eaea366..9438e2fb0e5bffa44466c59ac2bc9e88b215c338 100644 (file)
@@ -9,10 +9,12 @@
 // except according to those terms.
 
 use ast::Ident;
-use errors::Handler;
+use ext::base::ExtCtxt;
+use ext::expand::Marker;
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
 use ext::tt::quoted;
-use parse::token::{self, SubstNt, Token, NtTT};
+use fold::noop_fold_tt;
+use parse::token::{self, Token, NtTT};
 use syntax_pos::{Span, DUMMY_SP};
 use tokenstream::{TokenStream, TokenTree, Delimited};
 use util::small_vector::SmallVector;
@@ -61,9 +63,9 @@ fn next(&mut self) -> Option<quoted::TokenTree> {
 }
 
 /// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TokenTree::{Sequence, Match}`s, or `SubstNt`s, `interp` can
+/// `src` contains no `TokenTree::{Sequence, MetaVar, MetaVarDecl}`s, `interp` can
 /// (and should) be None.
-pub fn transcribe(sp_diag: &Handler,
+pub fn transcribe(cx: &ExtCtxt,
                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                   src: Vec<quoted::TokenTree>)
                   -> TokenStream {
@@ -120,22 +122,20 @@ pub fn transcribe(sp_diag: &Handler,
                                          &interpolations,
                                          &repeats) {
                     LockstepIterSize::Unconstrained => {
-                        panic!(sp_diag.span_fatal(
-                            sp, /* blame macro writer */
+                        cx.span_fatal(sp, /* blame macro writer */
                             "attempted to repeat an expression \
                              containing no syntax \
-                             variables matched as repeating at this depth"));
+                             variables matched as repeating at this depth");
                     }
                     LockstepIterSize::Contradiction(ref msg) => {
                         // FIXME #2887 blame macro invoker instead
-                        panic!(sp_diag.span_fatal(sp, &msg[..]));
+                        cx.span_fatal(sp, &msg[..]);
                     }
                     LockstepIterSize::Constraint(len, _) => {
                         if len == 0 {
                             if seq.op == quoted::KleeneOp::OneOrMore {
                                 // FIXME #2887 blame invoker
-                                panic!(sp_diag.span_fatal(sp,
-                                                          "this must repeat at least once"));
+                                cx.span_fatal(sp, "this must repeat at least once");
                             }
                         } else {
                             repeats.push((0, len));
@@ -149,29 +149,37 @@ pub fn transcribe(sp_diag: &Handler,
                 }
             }
             // FIXME #2887: think about span stuff here
-            quoted::TokenTree::Token(sp, SubstNt(ident)) => {
-                match lookup_cur_matched(ident, &interpolations, &repeats) {
-                    None => result.push(TokenTree::Token(sp, SubstNt(ident)).into()),
-                    Some(cur_matched) => if let MatchedNonterminal(ref nt) = *cur_matched {
-                        match **nt {
-                            NtTT(ref tt) => result.push(tt.clone().into()),
-                            _ => {
-                                let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
-                                result.push(token.into());
-                            }
+            quoted::TokenTree::MetaVar(mut sp, ident) => {
+                if let Some(cur_matched) = lookup_cur_matched(ident, &interpolations, &repeats) {
+                    if let MatchedNonterminal(ref nt) = *cur_matched {
+                        if let NtTT(ref tt) = **nt {
+                            result.push(tt.clone().into());
+                        } else {
+                            sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
+                            let token = TokenTree::Token(sp, token::Interpolated(nt.clone()));
+                            result.push(token.into());
                         }
                     } else {
-                        panic!(sp_diag.span_fatal(
-                            sp, /* blame the macro writer */
-                            &format!("variable '{}' is still repeating at this depth", ident)));
+                        cx.span_fatal(sp, /* blame the macro writer */
+                            &format!("variable '{}' is still repeating at this depth", ident));
                     }
+                } else {
+                    let ident =
+                        Ident { ctxt: ident.ctxt.apply_mark(cx.current_expansion.mark), ..ident };
+                    sp.ctxt = sp.ctxt.apply_mark(cx.current_expansion.mark);
+                    result.push(TokenTree::Token(sp, token::Dollar).into());
+                    result.push(TokenTree::Token(sp, token::Ident(ident)).into());
                 }
             }
-            quoted::TokenTree::Delimited(span, delimited) => {
+            quoted::TokenTree::Delimited(mut span, delimited) => {
+                span.ctxt = span.ctxt.apply_mark(cx.current_expansion.mark);
                 stack.push(Frame::Delimited { forest: delimited, idx: 0, span: span });
                 result_stack.push(mem::replace(&mut result, Vec::new()));
             }
-            quoted::TokenTree::Token(span, tok) => result.push(TokenTree::Token(span, tok).into()),
+            quoted::TokenTree::Token(sp, tok) => {
+                let mut marker = Marker(cx.current_expansion.mark);
+                result.push(noop_fold_tt(TokenTree::Token(sp, tok), &mut marker).into())
+            }
             quoted::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
         }
     }
@@ -240,7 +248,7 @@ fn lockstep_iter_size(tree: &quoted::TokenTree,
                 size + lockstep_iter_size(tt, interpolations, repeats)
             })
         },
-        TokenTree::Token(_, SubstNt(name)) | TokenTree::MetaVarDecl(_, name, _) =>
+        TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) =>
             match lookup_cur_matched(name, interpolations, repeats) {
                 Some(matched) => match *matched {
                     MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
index 4c6cf49a8db43d1b14981a263e2b4507f24672ac..2032aecacbb9102a67d2a46a480c47479c47abb9 100644 (file)
@@ -588,7 +588,6 @@ pub fn noop_fold_token<T: Folder>(t: token::Token, fld: &mut T) -> token::Token
             };
             token::Interpolated(Rc::new(fld.fold_interpolated(nt)))
         }
-        token::SubstNt(ident) => token::SubstNt(fld.fold_ident(ident)),
         _ => t
     }
 }
index e2656bea483396006b5ed530b832500bb566ab12..afc1e583d69bb7a0928f1488ee40f3858b55f03b 100644 (file)
@@ -66,14 +66,15 @@ pub struct StringReader<'a> {
     token: token::Token,
     span: Span,
     open_braces: Vec<(token::DelimToken, Span)>,
-}
-
-fn mk_sp(lo: BytePos, hi: BytePos) -> Span {
-    Span { lo: lo, hi: hi, ctxt: NO_EXPANSION }
+    pub override_span: Option<Span>,
 }
 
 impl<'a> StringReader<'a> {
-    fn next_token(&mut self) -> TokenAndSpan {
+    fn mk_sp(&self, lo: BytePos, hi: BytePos) -> Span {
+        unwrap_or!(self.override_span, Span { lo: lo, hi: hi, ctxt: NO_EXPANSION})
+    }
+
+    fn next_token(&mut self) -> TokenAndSpan where Self: Sized {
         let res = self.try_next_token();
         self.unwrap_or_abort(res)
     }
@@ -175,6 +176,7 @@ fn new_raw_internal(sess: &'a ParseSess, filemap: Rc<syntax_pos::FileMap>) -> Se
             token: token::Eof,
             span: syntax_pos::DUMMY_SP,
             open_braces: Vec::new(),
+            override_span: None,
         }
     }
 
@@ -229,12 +231,12 @@ pub fn err_span(&self, sp: Span, m: &str) {
 
     /// Report a fatal error spanning [`from_pos`, `to_pos`).
     fn fatal_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) -> FatalError {
-        self.fatal_span(mk_sp(from_pos, to_pos), m)
+        self.fatal_span(self.mk_sp(from_pos, to_pos), m)
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`).
     fn err_span_(&self, from_pos: BytePos, to_pos: BytePos, m: &str) {
-        self.err_span(mk_sp(from_pos, to_pos), m)
+        self.err_span(self.mk_sp(from_pos, to_pos), m)
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -258,7 +260,7 @@ fn struct_fatal_span_char(&self,
         for c in c.escape_default() {
             m.push(c)
         }
-        self.sess.span_diagnostic.struct_span_fatal(mk_sp(from_pos, to_pos), &m[..])
+        self.sess.span_diagnostic.struct_span_fatal(self.mk_sp(from_pos, to_pos), &m[..])
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending an
@@ -282,7 +284,7 @@ fn struct_err_span_char(&self,
         for c in c.escape_default() {
             m.push(c)
         }
-        self.sess.span_diagnostic.struct_span_err(mk_sp(from_pos, to_pos), &m[..])
+        self.sess.span_diagnostic.struct_span_err(self.mk_sp(from_pos, to_pos), &m[..])
     }
 
     /// Report a lexical error spanning [`from_pos`, `to_pos`), appending the
@@ -306,11 +308,11 @@ fn advance_token(&mut self) -> Result<(), ()> {
             None => {
                 if self.is_eof() {
                     self.peek_tok = token::Eof;
-                    self.peek_span = mk_sp(self.filemap.end_pos, self.filemap.end_pos);
+                    self.peek_span = self.mk_sp(self.filemap.end_pos, self.filemap.end_pos);
                 } else {
                     let start_bytepos = self.pos;
                     self.peek_tok = self.next_token_inner()?;
-                    self.peek_span = mk_sp(start_bytepos, self.pos);
+                    self.peek_span = self.mk_sp(start_bytepos, self.pos);
                 };
             }
         }
@@ -502,7 +504,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
         if let Some(c) = self.ch {
             if c.is_whitespace() {
                 let msg = "called consume_any_line_comment, but there was whitespace";
-                self.sess.span_diagnostic.span_err(mk_sp(self.pos, self.pos), msg);
+                self.sess.span_diagnostic.span_err(self.mk_sp(self.pos, self.pos), msg);
             }
         }
 
@@ -545,13 +547,13 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
 
                             Some(TokenAndSpan {
                                 tok: tok,
-                                sp: mk_sp(start_bpos, self.pos),
+                                sp: self.mk_sp(start_bpos, self.pos),
                             })
                         })
                     } else {
                         Some(TokenAndSpan {
                             tok: token::Comment,
-                            sp: mk_sp(start_bpos, self.pos),
+                            sp: self.mk_sp(start_bpos, self.pos),
                         })
                     }
                 }
@@ -584,7 +586,7 @@ fn scan_comment(&mut self) -> Option<TokenAndSpan> {
                     }
                     return Some(TokenAndSpan {
                         tok: token::Shebang(self.name_from(start)),
-                        sp: mk_sp(start, self.pos),
+                        sp: self.mk_sp(start, self.pos),
                     });
                 }
             }
@@ -612,7 +614,7 @@ fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
                 }
                 let c = Some(TokenAndSpan {
                     tok: token::Whitespace,
-                    sp: mk_sp(start_bpos, self.pos),
+                    sp: self.mk_sp(start_bpos, self.pos),
                 });
                 debug!("scanning whitespace: {:?}", c);
                 c
@@ -674,7 +676,7 @@ fn scan_block_comment(&mut self) -> Option<TokenAndSpan> {
 
             Some(TokenAndSpan {
                 tok: tok,
-                sp: mk_sp(start_bpos, self.pos),
+                sp: self.mk_sp(start_bpos, self.pos),
             })
         })
     }
@@ -869,7 +871,7 @@ fn scan_char_or_byte(&mut self,
                                 let valid = if self.ch_is('{') {
                                     self.scan_unicode_escape(delim) && !ascii_only
                                 } else {
-                                    let span = mk_sp(start, self.pos);
+                                    let span = self.mk_sp(start, self.pos);
                                     self.sess.span_diagnostic
                                         .struct_span_err(span, "incorrect unicode escape sequence")
                                         .span_help(span,
@@ -907,13 +909,13 @@ fn scan_char_or_byte(&mut self,
                                                                         },
                                                                         c);
                                 if e == '\r' {
-                                    err.span_help(mk_sp(escaped_pos, pos),
+                                    err.span_help(self.mk_sp(escaped_pos, pos),
                                                   "this is an isolated carriage return; consider \
                                                    checking your editor and version control \
                                                    settings");
                                 }
                                 if (e == '{' || e == '}') && !ascii_only {
-                                    err.span_help(mk_sp(escaped_pos, pos),
+                                    err.span_help(self.mk_sp(escaped_pos, pos),
                                                   "if used in a formatting string, curly braces \
                                                    are escaped with `{{` and `}}`");
                                 }
index 3a68a6ba7646c98de89a8cd8354b4db1fab175ef..f917eec2cd0b13ec7f85f856c83c740ce6fe7699 100644 (file)
@@ -141,9 +141,10 @@ pub fn parse_stmt_from_source_str(name: String, source: String, sess: &ParseSess
     new_parser_from_source_str(sess, name, source).parse_stmt()
 }
 
-pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess)
-                                        -> TokenStream {
-    filemap_to_stream(sess, sess.codemap().new_filemap(name, source))
+pub fn parse_stream_from_source_str(name: String, source: String, sess: &ParseSess,
+                                    override_span: Option<Span>)
+                                    -> TokenStream {
+    filemap_to_stream(sess, sess.codemap().new_filemap(name, source), override_span)
 }
 
 // Create a new parser from a source string
@@ -177,7 +178,7 @@ pub fn new_sub_parser_from_file<'a>(sess: &'a ParseSess,
 /// Given a filemap and config, return a parser
 pub fn filemap_to_parser(sess: & ParseSess, filemap: Rc<FileMap>, ) -> Parser {
     let end_pos = filemap.end_pos;
-    let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap));
+    let mut parser = stream_to_parser(sess, filemap_to_stream(sess, filemap, None));
 
     if parser.token == token::Eof && parser.span == syntax_pos::DUMMY_SP {
         parser.span = Span { lo: end_pos, hi: end_pos, ctxt: NO_EXPANSION };
@@ -212,8 +213,10 @@ fn file_to_filemap(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
 }
 
 /// Given a filemap, produce a sequence of token-trees
-pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>) -> TokenStream {
+pub fn filemap_to_stream(sess: &ParseSess, filemap: Rc<FileMap>, override_span: Option<Span>)
+                         -> TokenStream {
     let mut srdr = lexer::StringReader::new(sess, filemap);
+    srdr.override_span = override_span;
     srdr.real_token();
     panictry!(srdr.parse_all_token_trees())
 }
index 851a638e14842af4fd55ce36444e968124805adc..25ab46f6f9e2bad1067f0cf5d16c60a1aefb2ed7 100644 (file)
@@ -2626,7 +2626,10 @@ fn parse_dot_or_call_expr_with_(&mut self, e0: P<Expr>, lo: Span) -> PResult<'a,
 
     pub fn process_potential_macro_variable(&mut self) {
         let ident = match self.token {
-            token::SubstNt(name) => {
+            token::Dollar if self.span.ctxt != syntax_pos::hygiene::SyntaxContext::empty() &&
+                             self.look_ahead(1, |t| t.is_ident()) => {
+                self.bump();
+                let name = match self.token { token::Ident(ident) => ident, _ => unreachable!() };
                 self.fatal(&format!("unknown macro variable `{}`", name)).emit();
                 return
             }
index 77db604c56e118c0596364536b3b1ed3674a2f24..f208b0f56f81ec66a81ec7120ce5b6b08432d299 100644 (file)
@@ -172,9 +172,6 @@ pub enum Token {
     // Can be expanded into several tokens.
     /// Doc comment
     DocComment(ast::Name),
-    // In right-hand-sides of MBE macros:
-    /// A syntactic variable that will be filled in by macro expansion.
-    SubstNt(ast::Ident),
 
     // Junk. These carry no data because we don't really care about the data
     // they *would* carry, and don't really want to allocate a new ident for
index 34cda433d5250ee9c1b0bb47671372b7e695a095..6c6ca556e35ed91ac4712626d14e2e6b4ed39a89 100644 (file)
@@ -270,7 +270,6 @@ pub fn token_to_string(tok: &Token) -> String {
 
         /* Other */
         token::DocComment(s)        => s.to_string(),
-        token::SubstNt(s)           => format!("${}", s),
         token::Eof                  => "<eof>".to_string(),
         token::Whitespace           => " ".to_string(),
         token::Comment              => "/* */".to_string(),
index dc4b8eb24cd0a570ce18531a84024bd2e90ebff9..6f4c112acb6c6430eadb8b34608e2c08bdb5ae8c 100644 (file)
@@ -15,6 +15,8 @@
 use syntax::parse::token;
 use syntax::ptr::P;
 use syntax_pos::Span;
+use syntax_pos::symbol::Symbol;
+use syntax_pos::hygiene::SyntaxContext;
 use syntax::tokenstream::TokenTree;
 
 pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
@@ -50,7 +52,10 @@ pub fn expand_syntax_ext<'cx>(cx: &'cx mut ExtCtxt,
             }
         }
     }
-    let res = ast::Ident::from_str(&res_str);
+    let res = ast::Ident {
+        name: Symbol::intern(&res_str),
+        ctxt: SyntaxContext::empty().apply_mark(cx.current_expansion.mark),
+    };
 
     struct Result {
         ident: ast::Ident,
index b01ef65e5fe5ed41611cce78fe34c554b6d135c7..fa5537b5d8fe3e93f18da7581c992891d8e64f12 100644 (file)
@@ -16,7 +16,6 @@
 use syntax::attr::{mark_used, mark_known};
 use syntax::codemap::Span;
 use syntax::ext::base::*;
-use syntax::fold::Folder;
 use syntax::visit::Visitor;
 
 struct MarkAttrs<'a>(&'a [ast::Name]);
@@ -75,7 +74,7 @@ fn expand(&self,
         MarkAttrs(&self.attrs).visit_item(&item);
 
         let input = __internal::new_token_stream(ecx.resolver.eliminate_crate_var(item.clone()));
-        let res = __internal::set_parse_sess(&ecx.parse_sess, || {
+        let res = __internal::set_sess(ecx, || {
             let inner = self.inner;
             panic::catch_unwind(panic::AssertUnwindSafe(|| inner(input)))
         });
@@ -97,9 +96,9 @@ fn expand(&self,
             }
         };
 
-        let new_items = __internal::set_parse_sess(&ecx.parse_sess, || {
+        __internal::set_sess(ecx, || {
             match __internal::token_stream_parse_items(stream) {
-                Ok(new_items) => new_items,
+                Ok(new_items) => new_items.into_iter().map(Annotatable::Item).collect(),
                 Err(_) => {
                     // FIXME: handle this better
                     let msg = "proc-macro derive produced unparseable tokens";
@@ -107,12 +106,6 @@ fn expand(&self,
                     panic!(FatalError);
                 }
             }
-        });
-
-        // Reassign spans of all expanded items to the input `item`
-        // for better errors here.
-        new_items.into_iter().map(|item| {
-            Annotatable::Item(ChangeSpan { span: span }.fold_item(item).expect_one(""))
-        }).collect()
+        })
     }
 }
index a6768c07fe13b543e9085dd1e08f56fcc35a8886..144d1930df90b54d3b99f77118fe477ec85eb469 100644 (file)
@@ -20,7 +20,7 @@
 use syntax::parse::token;
 use syntax::ptr::P;
 use syntax::symbol::{Symbol, keywords};
-use syntax_pos::{Span, DUMMY_SP};
+use syntax_pos::Span;
 use syntax::tokenstream;
 
 use std::collections::{HashMap, HashSet};
@@ -558,7 +558,9 @@ fn into_expr(mut self) -> P<ast::Expr> {
         // passed to this function.
         for (i, e) in self.args.into_iter().enumerate() {
             let name = self.ecx.ident_of(&format!("__arg{}", i));
-            pats.push(self.ecx.pat_ident(DUMMY_SP, name));
+            let span =
+                Span { ctxt: e.span.ctxt.apply_mark(self.ecx.current_expansion.mark), ..e.span };
+            pats.push(self.ecx.pat_ident(span, name));
             for ref arg_ty in self.arg_unique_types[i].iter() {
                 locals.push(Context::format_arg(self.ecx, self.macsp, e.span, arg_ty, name));
             }
@@ -672,10 +674,10 @@ fn format_arg(ecx: &ExtCtxt,
 }
 
 pub fn expand_format_args<'cx>(ecx: &'cx mut ExtCtxt,
-                               sp: Span,
+                               mut sp: Span,
                                tts: &[tokenstream::TokenTree])
                                -> Box<base::MacResult + 'cx> {
-
+    sp.ctxt = sp.ctxt.apply_mark(ecx.current_expansion.mark);
     match parse_args(ecx, sp, tts) {
         Some((efmt, args, names)) => {
             MacEager::expr(expand_preparsed_format_args(ecx, sp, efmt, args, names))
@@ -696,7 +698,8 @@ pub fn expand_preparsed_format_args(ecx: &mut ExtCtxt,
     // `ArgumentType` does not derive `Clone`.
     let arg_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
     let arg_unique_types: Vec<_> = (0..args.len()).map(|_| Vec::new()).collect();
-    let macsp = ecx.call_site();
+    let mut macsp = ecx.call_site();
+    macsp.ctxt = macsp.ctxt.apply_mark(ecx.current_expansion.mark);
     let msg = "format argument must be a string literal.";
     let fmt = match expr_to_spanned_string(ecx, efmt, msg) {
         Some(fmt) => fmt,
index f60e5824db9627cc67e203060a8409ab58afee7f..5fcedbf50c60f71caeba36d8c8811acab084f204 100644 (file)
@@ -34,7 +34,7 @@ fn expand<'cx>(&self,
         let annotation = __internal::token_stream_wrap(annotation);
         let annotated = __internal::token_stream_wrap(annotated);
 
-        let res = __internal::set_parse_sess(&ecx.parse_sess, || {
+        let res = __internal::set_sess(ecx, || {
             panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(annotation, annotated)))
         });
 
@@ -69,7 +69,7 @@ fn expand<'cx>(&self,
                    -> TokenStream {
         let input = __internal::token_stream_wrap(input);
 
-        let res = __internal::set_parse_sess(&ecx.parse_sess, || {
+        let res = __internal::set_sess(ecx, || {
             panic::catch_unwind(panic::AssertUnwindSafe(|| (self.inner)(input)))
         });
 
index f2ccc3f051e9289eb3f3a14567b06ecf6415eb63..804b91ab09e3c333363305262763886f8de88cbd 100644 (file)
@@ -144,24 +144,18 @@ pub const fn empty() -> Self {
     pub fn apply_mark(self, mark: Mark) -> SyntaxContext {
         HygieneData::with(|data| {
             let syntax_contexts = &mut data.syntax_contexts;
-            let ctxt_data = syntax_contexts[self.0 as usize];
-            if mark == ctxt_data.outer_mark {
-                return ctxt_data.prev_ctxt;
-            }
-
-            let modern = if data.marks[mark.0 as usize].modern {
-                *data.markings.entry((ctxt_data.modern, mark)).or_insert_with(|| {
-                    let modern = SyntaxContext(syntax_contexts.len() as u32);
+            let mut modern = syntax_contexts[self.0 as usize].modern;
+            if data.marks[mark.0 as usize].modern {
+                modern = *data.markings.entry((modern, mark)).or_insert_with(|| {
+                    let len = syntax_contexts.len() as u32;
                     syntax_contexts.push(SyntaxContextData {
                         outer_mark: mark,
-                        prev_ctxt: ctxt_data.modern,
-                        modern: modern,
+                        prev_ctxt: modern,
+                        modern: SyntaxContext(len),
                     });
-                    modern
-                })
-            } else {
-                ctxt_data.modern
-            };
+                    SyntaxContext(len)
+                });
+            }
 
             *data.markings.entry((self, mark)).or_insert_with(|| {
                 syntax_contexts.push(SyntaxContextData {
index bb62efd376a0b8247e9bf3165b1adc06fdecddab..a7c247689cce88b08cbef538fde74eb6cff8e17b 100644 (file)
@@ -186,7 +186,7 @@ pub fn macro_backtrace(mut self) -> Vec<MacroBacktrace> {
 
     pub fn to(self, end: Span) -> Span {
         // FIXME(jseyfried): self.ctxt should always equal end.ctxt here (c.f. issue #23480)
-        if end.ctxt == SyntaxContext::empty() {
+        if self.ctxt == SyntaxContext::empty() {
             Span { lo: self.lo, ..end }
         } else {
             Span { hi: end.hi, ..self }
index 3c4a5dcb7b038356947fdcc613734e2cd6884bec..f95e4410381d96d1df37e111d080b6fc9f692c97 100644 (file)
@@ -28,7 +28,6 @@ pub fn main() {
         asm!("mov $1, $0" : "=r"(x) : "r"(5));
         //~^ ERROR re-assignment of immutable variable `x`
         //~| NOTE re-assignment of immutable
-        //~| NOTE in this expansion of asm!
     }
     foo(x);
 }
index 80802e19f8401de8c6344d2aee2dde3fc4b9050f..cc714a6e43141744d3beeb394e59fdab3694c2fd 100644 (file)
@@ -23,5 +23,5 @@ fn main() {
         m!() => {}  //~ NOTE the usage of `m!` is likely invalid in pattern context
     }
 
-    m!();
+    m!(); //~ NOTE in this expansion
 }
index 47374fc3c608537d671aa2f526ba32d4d316f65f..08749373432f567761c01f2e208497baa93f86fd 100644 (file)
@@ -32,7 +32,7 @@ macro_rules! ignored_pat {
 ignored_item!(); //~ NOTE caused by the macro expansion here
 
 fn main() {
-    ignored_expr!();
+    ignored_expr!(); //~ NOTE in this expansion
     match 1 {
         ignored_pat!() => (), //~ NOTE caused by the macro expansion here
         _ => (),