]> git.lizzy.rs Git - rust.git/commitdiff
Make metavariables hygienic.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Sun, 7 Aug 2016 02:19:10 +0000 (02:19 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Sun, 7 Aug 2016 06:01:01 +0000 (06:01 +0000)
src/libsyntax/ext/tt/macro_parser.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs

index 813afb935762e9e7d9a40ac40e8ee6d6fb29882f..7db03e9a8634a9031cc16ecbf9d5b389506d2e8f 100644 (file)
@@ -79,7 +79,7 @@
 use self::TokenTreeOrTokenTreeVec::*;
 
 use ast;
-use ast::{Name, Ident};
+use ast::Ident;
 use syntax_pos::{self, BytePos, mk_sp, Span};
 use codemap::Spanned;
 use errors::FatalError;
@@ -202,9 +202,9 @@ pub enum NamedMatch {
 }
 
 pub fn nameize(p_s: &ParseSess, ms: &[TokenTree], res: &[Rc<NamedMatch>])
-            -> ParseResult<HashMap<Name, Rc<NamedMatch>>> {
+            -> ParseResult<HashMap<Ident, Rc<NamedMatch>>> {
     fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
-             ret_val: &mut HashMap<Name, Rc<NamedMatch>>, idx: &mut usize)
+             ret_val: &mut HashMap<Ident, Rc<NamedMatch>>, idx: &mut usize)
              -> Result<(), (syntax_pos::Span, String)> {
         match *m {
             TokenTree::Sequence(_, ref seq) => {
@@ -218,7 +218,7 @@ fn n_rec(p_s: &ParseSess, m: &TokenTree, res: &[Rc<NamedMatch>],
                 }
             }
             TokenTree::Token(sp, MatchNt(bind_name, _)) => {
-                match ret_val.entry(bind_name.name) {
+                match ret_val.entry(bind_name) {
                     Vacant(spot) => {
                         spot.insert(res[*idx].clone());
                         *idx += 1;
@@ -257,7 +257,7 @@ pub enum ParseResult<T> {
     Error(syntax_pos::Span, String)
 }
 
-pub type NamedParseResult = ParseResult<HashMap<Name, Rc<NamedMatch>>>;
+pub type NamedParseResult = ParseResult<HashMap<Ident, Rc<NamedMatch>>>;
 pub type PositionalParseResult = ParseResult<Vec<Rc<NamedMatch>>>;
 
 /// Perform a token equality check, ignoring syntax context (that is, an
index db12ef24f7149fd95fd3ab362af7606ff517793b..d197741e9a367bc39d6726cfafc850afe0def94e 100644 (file)
@@ -302,7 +302,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
     let mut valid = true;
 
     // Extract the arguments:
-    let lhses = match **argument_map.get(&lhs_nm.name).unwrap() {
+    let lhses = match **argument_map.get(&lhs_nm).unwrap() {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| match **m {
                 MatchedNonterminal(NtTT(ref tt)) => {
@@ -315,7 +315,7 @@ pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
         _ => cx.span_bug(def.span, "wrong-structured lhs")
     };
 
-    let rhses = match **argument_map.get(&rhs_nm.name).unwrap() {
+    let rhses = match **argument_map.get(&rhs_nm).unwrap() {
         MatchedSeq(ref s, _) => {
             s.iter().map(|m| match **m {
                 MatchedNonterminal(NtTT(ref tt)) => (**tt).clone(),
index 29a300b172e7562972aa22a645dfbff63ee05999..939425378def699da15025a14cff36795f30c360 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 use self::LockstepIterSize::*;
 
-use ast::{Ident, Name};
+use ast::Ident;
 use syntax_pos::{Span, DUMMY_SP};
 use errors::{Handler, DiagnosticBuilder};
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@@ -38,7 +38,7 @@ pub struct TtReader<'a> {
     /// the unzipped tree:
     stack: Vec<TtFrame>,
     /* for MBE-style macro transcription */
-    interpolations: HashMap<Name, Rc<NamedMatch>>,
+    interpolations: HashMap<Ident, Rc<NamedMatch>>,
     imported_from: Option<Ident>,
 
     // Some => return imported_from as the next token
@@ -57,7 +57,7 @@ pub struct TtReader<'a> {
 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
 /// (and should) be None.
 pub fn new_tt_reader(sp_diag: &Handler,
-                     interp: Option<HashMap<Name, Rc<NamedMatch>>>,
+                     interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                      imported_from: Option<Ident>,
                      src: Vec<tokenstream::TokenTree>)
                      -> TtReader {
@@ -71,7 +71,7 @@ pub fn new_tt_reader(sp_diag: &Handler,
 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
 /// (and should) be None.
 pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
-                                   interp: Option<HashMap<Name, Rc<NamedMatch>>>,
+                                   interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
                                    imported_from: Option<Ident>,
                                    src: Vec<tokenstream::TokenTree>,
                                    desugar_doc_comments: bool)
@@ -119,7 +119,7 @@ fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<Name
 }
 
 fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
-    let matched_opt = r.interpolations.get(&name.name).cloned();
+    let matched_opt = r.interpolations.get(&name).cloned();
     matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
 }