]> git.lizzy.rs Git - rust.git/commitdiff
Rename TokenTree variants for clarity
authorBrendan Zabarauskas <bjzaba@yahoo.com.au>
Wed, 22 Oct 2014 12:35:32 +0000 (23:35 +1100)
committerBrendan Zabarauskas <bjzaba@yahoo.com.au>
Sat, 25 Oct 2014 22:53:29 +0000 (09:53 +1100)
This should be clearer, and fits in better with the `TTNonterminal` variant.

Renames:

- `TTTok` -> `TTToken`
- `TTDelim` -> `TTDelimited`
- `TTSeq` -> `TTSequence`

14 files changed:
src/doc/guide-plugin.md
src/libsyntax/ast.rs
src/libsyntax/diagnostics/plugin.rs
src/libsyntax/ext/base.rs
src/libsyntax/ext/concat_idents.rs
src/libsyntax/ext/quote.rs
src/libsyntax/ext/trace_macros.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/ext/tt/transcribe.rs
src/libsyntax/fold.rs
src/libsyntax/parse/mod.rs
src/libsyntax/parse/parser.rs
src/libsyntax/print/pprust.rs
src/test/auxiliary/roman_numerals.rs

index 3830a2126e172709ac58831657891e65a1d3b22e..9bf1d29569ca4ab41c948379ff2d129b4327fb2b 100644 (file)
@@ -56,7 +56,7 @@ extern crate rustc;
 
 use syntax::codemap::Span;
 use syntax::parse::token::{IDENT, get_ident};
-use syntax::ast::{TokenTree, TTTok};
+use syntax::ast::{TokenTree, TTToken};
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
 use syntax::ext::build::AstBuilder;  // trait for expr_uint
 use rustc::plugin::Registry;
@@ -71,7 +71,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
         ("I",    1)];
 
     let text = match args {
-        [TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
+        [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);
index be316ba9f4d89d17c1ac8e2148f345ff0fd6c73e..36373638099d2a1d509ac2e864770063c032b541 100644 (file)
@@ -24,6 +24,9 @@
 use std::rc::Rc;
 use serialize::{Encodable, Decodable, Encoder, Decoder};
 
+#[cfg(stage0)]
+pub use self::TTToken as TTTok;
+
 // FIXME #6993: in librustc, uses of "ident" should be replaced
 // by just "Name".
 
@@ -600,9 +603,9 @@ pub struct Delimiter {
 }
 
 impl Delimiter {
-    /// Convert the delimiter to a `TTTok`
+    /// Convert the delimiter to a `TTToken`
     pub fn to_tt(&self) -> TokenTree {
-        TTTok(self.span, self.token.clone())
+        TTToken(self.span, self.token.clone())
     }
 }
 
@@ -614,9 +617,9 @@ pub fn to_tt(&self) -> TokenTree {
 /// If the syntax extension is an MBE macro, it will attempt to match its
 /// LHS "matchers" against the provided token tree, and if it finds a
 /// match, will transcribe the RHS token tree, splicing in any captured
-/// macro_parser::matched_nonterminals into the TTNonterminals it finds.
+/// `macro_parser::matched_nonterminals` into the `TTNonterminal`s it finds.
 ///
-/// The RHS of an MBE macro is the only place a TTNonterminal or TTSeq
+/// The RHS of an MBE macro is the only place a `TTNonterminal` or `TTSequence`
 /// makes any real sense. You could write them elsewhere but nothing
 /// else knows what to do with them, so you'll probably get a syntax
 /// error.
@@ -624,18 +627,18 @@ pub fn to_tt(&self) -> TokenTree {
 #[doc="For macro invocations; parsing is delegated to the macro"]
 pub enum TokenTree {
     /// A single token
-    TTTok(Span, ::parse::token::Token),
+    TTToken(Span, ::parse::token::Token),
     /// A delimited sequence of token trees
     // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
-    TTDelim(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter),
+    TTDelimited(Span, Delimiter, Rc<Vec<TokenTree>>, Delimiter),
 
     // These only make sense for right-hand-sides of MBE macros:
 
-    /// A kleene-style repetition sequence with a span, a TTForest,
+    /// A kleene-style repetition sequence with a span, a `TTForest`,
     /// an optional separator, and a boolean where true indicates
     /// zero or more (..), and false indicates one or more (+).
     // FIXME(eddyb) #6308 Use Rc<[TokenTree]> after DST.
-    TTSeq(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),
+    TTSequence(Span, Rc<Vec<TokenTree>>, Option<::parse::token::Token>, bool),
 
     /// A syntactic variable that will be filled in by macro expansion.
     TTNonterminal(Span, Ident)
@@ -645,10 +648,10 @@ impl TokenTree {
     /// Returns the `Span` corresponding to this token tree.
     pub fn get_span(&self) -> Span {
         match *self {
-            TTTok(span, _)         => span,
-            TTDelim(span, _, _, _) => span,
-            TTSeq(span, _, _, _)   => span,
-            TTNonterminal(span, _) => span,
+            TTToken(span, _)           => span,
+            TTDelimited(span, _, _, _) => span,
+            TTSequence(span, _, _, _)  => span,
+            TTNonterminal(span, _)     => span,
         }
     }
 }
index d3c39284f55822f5d701d39d1df7176a4cebf859..8ea08c58d065c1dde085a779af6da1e3a9d8b3d3 100644 (file)
@@ -50,7 +50,7 @@ pub fn expand_diagnostic_used<'cx>(ecx: &'cx mut ExtCtxt,
                                    token_tree: &[TokenTree])
                                    -> Box<MacResult+'cx> {
     let code = match token_tree {
-        [ast::TTTok(_, token::IDENT(code, _))] => code,
+        [ast::TTToken(_, token::IDENT(code, _))] => code,
         _ => unreachable!()
     };
     with_registered_diagnostics(|diagnostics| {
@@ -82,12 +82,12 @@ pub fn expand_register_diagnostic<'cx>(ecx: &'cx mut ExtCtxt,
                                        token_tree: &[TokenTree])
                                        -> Box<MacResult+'cx> {
     let (code, description) = match token_tree {
-        [ast::TTTok(_, token::IDENT(ref code, _))] => {
+        [ast::TTToken(_, token::IDENT(ref code, _))] => {
             (code, None)
         },
-        [ast::TTTok(_, token::IDENT(ref code, _)),
-         ast::TTTok(_, token::COMMA),
-         ast::TTTok(_, token::LIT_STR_RAW(description, _))] => {
+        [ast::TTToken(_, token::IDENT(ref code, _)),
+         ast::TTToken(_, token::COMMA),
+         ast::TTToken(_, token::LIT_STR_RAW(description, _))] => {
             (code, Some(description))
         }
         _ => unreachable!()
@@ -110,7 +110,7 @@ pub fn expand_build_diagnostic_array<'cx>(ecx: &'cx mut ExtCtxt,
                                           token_tree: &[TokenTree])
                                           -> Box<MacResult+'cx> {
     let name = match token_tree {
-        [ast::TTTok(_, token::IDENT(ref name, _))] => name,
+        [ast::TTToken(_, token::IDENT(ref name, _))] => name,
         _ => unreachable!()
     };
 
index 5cc2fe03618c46b25df24d5b333d26340f2771fe..b5cc2d95890bb77f071d1d64d58973535ffa36ee 100644 (file)
@@ -684,8 +684,8 @@ pub fn get_single_str_from_tts(cx: &ExtCtxt,
         cx.span_err(sp, format!("{} takes 1 argument.", name).as_slice());
     } else {
         match tts[0] {
-            ast::TTTok(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
-            ast::TTTok(_, token::LIT_STR_RAW(ident, _)) => {
+            ast::TTToken(_, token::LIT_STR(ident)) => return Some(parse::str_lit(ident.as_str())),
+            ast::TTToken(_, token::LIT_STR_RAW(ident, _)) => {
                 return Some(parse::raw_str_lit(ident.as_str()))
             }
             _ => {
index 145412caa0bfe44bbac17fa10e7ac0a512181cd4..e6befdd2aac9224190eedf6c4557ee1646f8f693 100644 (file)
@@ -23,7 +23,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
     for (i, e) in tts.iter().enumerate() {
         if i & 1 == 1 {
             match *e {
-                ast::TTTok(_, token::COMMA) => (),
+                ast::TTToken(_, token::COMMA) => (),
                 _ => {
                     cx.span_err(sp, "concat_idents! expecting comma.");
                     return DummyResult::expr(sp);
@@ -31,7 +31,7 @@ pub fn expand_syntax_ext<'cx>(cx: &mut ExtCtxt, sp: Span, tts: &[ast::TokenTree]
             }
         } else {
             match *e {
-                ast::TTTok(_, token::IDENT(ident,_)) => {
+                ast::TTToken(_, token::IDENT(ident,_)) => {
                     res_str.push_str(token::get_ident(ident).get())
                 }
                 _ => {
index 783c08a44436e3b2603819174c651e990e58191f..93bd66d6eeba6da690dad96de0e9519fa22c9d9a 100644 (file)
@@ -639,10 +639,10 @@ fn mk_token(cx: &ExtCtxt, sp: Span, tok: &token::Token) -> P<ast::Expr> {
 
 fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
     match *tt {
-        ast::TTTok(sp, ref tok) => {
+        ast::TTToken(sp, ref tok) => {
             let e_sp = cx.expr_ident(sp, id_ext("_sp"));
             let e_tok = cx.expr_call(sp,
-                                     mk_ast_path(cx, sp, "TTTok"),
+                                     mk_ast_path(cx, sp, "TTToken"),
                                      vec!(e_sp, mk_token(cx, sp, tok)));
             let e_push =
                 cx.expr_method_call(sp,
@@ -651,14 +651,14 @@ fn mk_tt(cx: &ExtCtxt, _: Span, tt: &ast::TokenTree) -> Vec<P<ast::Stmt>> {
                                     vec!(e_tok));
             vec!(cx.stmt_expr(e_push))
         },
-        ast::TTDelim(sp, ref open, ref tts, ref close) => {
+        ast::TTDelimited(sp, ref open, ref tts, ref close) => {
             let mut stmts = vec![];
             stmts.extend(mk_tt(cx, sp, &open.to_tt()).into_iter());
             stmts.extend(tts.iter().flat_map(|tt| mk_tt(cx, sp, tt).into_iter()));
             stmts.extend(mk_tt(cx, sp, &close.to_tt()).into_iter());
             stmts
         },
-        ast::TTSeq(..) => fail!("TTSeq in quote!"),
+        ast::TTSequence(..) => fail!("TTSequence in quote!"),
         ast::TTNonterminal(sp, ident) => {
             // tt.extend($ident.to_tokens(ext_cx).into_iter())
 
index 1f50eb933bb4e80a1c7b731b9a9b0db2aba178a0..4c3846731f43209f15409a25c179c8602169cdc1 100644 (file)
@@ -20,10 +20,10 @@ pub fn expand_trace_macros(cx: &mut ExtCtxt,
                            tt: &[ast::TokenTree])
                            -> Box<base::MacResult+'static> {
     match tt {
-        [ast::TTTok(_, ref tok)] if is_keyword(keywords::True, tok) => {
+        [ast::TTToken(_, ref tok)] if is_keyword(keywords::True, tok) => {
             cx.set_trace_macros(true);
         }
-        [ast::TTTok(_, ref tok)] if is_keyword(keywords::False, tok) => {
+        [ast::TTToken(_, ref tok)] if is_keyword(keywords::False, tok) => {
             cx.set_trace_macros(false);
         }
         _ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
index fbfe10d004e06d1f5a3cc0db1e4a2a0ec2bc8ee9..4a3828a8043fb6301d36f9ad06c738906ca08e83 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelim};
+use ast::{Ident, Matcher_, Matcher, MatchTok, MatchNonterminal, MatchSeq, TTDelimited};
 use ast;
 use codemap::{Span, Spanned, DUMMY_SP};
 use ext::base::{ExtCtxt, MacResult, MacroDef};
@@ -172,7 +172,7 @@ fn generic_extension<'cx>(cx: &'cx ExtCtxt,
                     MatchedNonterminal(NtTT(ref tt)) => {
                         match **tt {
                             // ignore delimiters
-                            TTDelim(_, _, ref tts, _) => (**tts).clone(),
+                            TTDelimited(_, _, ref tts, _) => (**tts).clone(),
                             _ => cx.span_fatal(sp, "macro rhs must be delimited"),
                         }
                     },
index 472b24be81b93db0cf61fea4bf3150a713e141c6..e705c4d8b33c770438ef2ba69e8ac127af5ec37d 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 use ast;
-use ast::{TokenTree, TTDelim, TTTok, TTSeq, TTNonterminal, Ident};
+use ast::{TokenTree, TTDelimited, TTToken, TTSequence, TTNonterminal, Ident};
 use codemap::{Span, DUMMY_SP};
 use diagnostic::SpanHandler;
 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
@@ -45,7 +45,7 @@ pub struct TtReader<'a> {
 }
 
 /// This can do Macro-By-Example transcription. On the other hand, if
-/// `src` contains no `TTSeq`s and `TTNonterminal`s, `interp` can (and
+/// `src` contains no `TTSequence`s and `TTNonterminal`s, `interp` can (and
 /// should) be none.
 pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
                          interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
@@ -130,12 +130,12 @@ fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
     match *t {
         // The opening and closing delimiters are both tokens, so they are
         // treated as `LisUnconstrained`.
-        TTDelim(_, _, ref tts, _) | TTSeq(_, ref tts, _, _) => {
+        TTDelimited(_, _, ref tts, _) | TTSequence(_, ref tts, _, _) => {
             tts.iter().fold(LisUnconstrained, |size, tt| {
                 size + lockstep_iter_size(tt, r)
             })
         },
-        TTTok(..) => LisUnconstrained,
+        TTToken(..) => LisUnconstrained,
         TTNonterminal(_, name) => match *lookup_cur_matched(r, name) {
             MatchedNonterminal(_) => LisUnconstrained,
             MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name)
@@ -194,15 +194,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
             }
         }
     }
-    loop { /* because it's easiest, this handles `TTDelim` not starting
-              with a `TTTok`, even though it won't happen */
+    loop { /* because it's easiest, this handles `TTDelimited` not starting
+              with a `TTToken`, even though it won't happen */
         let t = {
             let frame = r.stack.last().unwrap();
             // FIXME(pcwalton): Bad copy.
             (*frame.forest)[frame.idx].clone()
         };
         match t {
-            TTDelim(_, open, delimed_tts, close) => {
+            TTDelimited(_, open, delimed_tts, close) => {
                 let mut tts = vec![];
                 tts.push(open.to_tt());
                 tts.extend(delimed_tts.iter().map(|x| (*x).clone()));
@@ -216,15 +216,15 @@ pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
                 });
                 // if this could be 0-length, we'd need to potentially recur here
             }
-            TTTok(sp, tok) => {
+            TTToken(sp, tok) => {
                 r.cur_span = sp;
                 r.cur_tok = tok;
                 r.stack.last_mut().unwrap().idx += 1;
                 return ret_val;
             }
-            TTSeq(sp, tts, sep, zerok) => {
+            TTSequence(sp, tts, sep, zerok) => {
                 // FIXME(pcwalton): Bad copy.
-                match lockstep_iter_size(&TTSeq(sp, tts.clone(), sep.clone(), zerok), r) {
+                match lockstep_iter_size(&TTSequence(sp, tts.clone(), sep.clone(), zerok), r) {
                     LisUnconstrained => {
                         r.sp_diag.span_fatal(
                             sp.clone(), /* blame macro writer */
index ddb2ab49f8b527eadaf3f587d7379bce00564401..9cffce74a095a0f13412cd5bf3c5b6ba9db5b121 100644 (file)
@@ -569,24 +569,24 @@ pub fn noop_fold_arg<T: Folder>(Arg {id, pat, ty}: Arg, fld: &mut T) -> Arg {
 
 pub fn noop_fold_tt<T: Folder>(tt: &TokenTree, fld: &mut T) -> TokenTree {
     match *tt {
-        TTTok(span, ref tok) =>
-            TTTok(span, fld.fold_token(tok.clone())),
-        TTDelim(span, ref open, ref tts, ref close) =>
-            TTDelim(span,
-                    Delimiter {
-                        span: open.span,
-                        token: fld.fold_token(open.token.clone())
-                    },
-                    Rc::new(fld.fold_tts(tts.as_slice())),
-                    Delimiter {
-                        span: close.span,
-                        token: fld.fold_token(close.token.clone())
-                    }),
-        TTSeq(span, ref pattern, ref sep, is_optional) =>
-            TTSeq(span,
-                  Rc::new(fld.fold_tts(pattern.as_slice())),
-                  sep.clone().map(|tok| fld.fold_token(tok)),
-                  is_optional),
+        TTToken(span, ref tok) =>
+            TTToken(span, fld.fold_token(tok.clone())),
+        TTDelimited(span, ref open, ref tts, ref close) =>
+            TTDelimited(span,
+                        Delimiter {
+                            span: open.span,
+                            token: fld.fold_token(open.token.clone())
+                        },
+                        Rc::new(fld.fold_tts(tts.as_slice())),
+                        Delimiter {
+                            span: close.span,
+                            token: fld.fold_token(close.token.clone())
+                        }),
+        TTSequence(span, ref pattern, ref sep, is_optional) =>
+            TTSequence(span,
+                       Rc::new(fld.fold_tts(pattern.as_slice())),
+                       sep.clone().map(|tok| fld.fold_token(tok)),
+                       is_optional),
         TTNonterminal(sp,ref ident) =>
             TTNonterminal(sp,fld.fold_ident(*ident))
     }
index 1c99b608f7aa38df471fa00179e6733eba5de797..a2e4028232100653cf1288dfdec72fcd664becd3 100644 (file)
@@ -793,29 +793,29 @@ fn string_to_tts_macro () {
         let tts = string_to_tts("macro_rules! zip (($a)=>($a))".to_string());
         let tts: &[ast::TokenTree] = tts.as_slice();
         match tts {
-            [ast::TTTok(_, _),
-             ast::TTTok(_, token::NOT),
-             ast::TTTok(_, _),
-             ast::TTDelim(_, ast::TTTok(_, token::LPAREN),
+            [ast::TTToken(_, _),
+             ast::TTToken(_, token::NOT),
+             ast::TTToken(_, _),
+             ast::TTDelimited(_, ast::TTToken(_, token::LPAREN),
                           ref delim_elts,
-                          ast::TTTok(_, token::RPAREN))] => {
+                          ast::TTToken(_, token::RPAREN))] => {
                 let delim_elts: &[ast::TokenTree] = delim_elts.as_slice();
                 match delim_elts {
-                    [ast::TTDelim(_, ast::TTTok(_, token::LPAREN),
+                    [ast::TTDelimited(_, ast::TTToken(_, token::LPAREN),
                                   ref first_set,
-                                  ast::TTTok(_, token::RPAREN)),
-                     ast::TTTok(_, token::FAT_ARROW),
-                     ast::TTDelim(_, ast::TTTok(_, token::LPAREN),
+                                  ast::TTToken(_, token::RPAREN)),
+                     ast::TTToken(_, token::FAT_ARROW),
+                     ast::TTDelimited(_, ast::TTToken(_, token::LPAREN),
                                   ref second_set,
-                                  ast::TTTok(_, token::RPAREN))] => {
+                                  ast::TTToken(_, token::RPAREN))] => {
                         let first_set: &[ast::TokenTree] =
                             first_set.as_slice();
                         match first_set {
-                            [ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => {
+                            [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => {
                                 let second_set: &[ast::TokenTree] =
                                     second_set.as_slice();
                                 match second_set {
-                                    [ast::TTTok(_, token::DOLLAR), ast::TTTok(_, _)] => {
+                                    [ast::TTToken(_, token::DOLLAR), ast::TTToken(_, _)] => {
                                         assert_eq!("correct","correct")
                                     }
                                     _ => assert_eq!("wrong 4","correct")
@@ -845,7 +845,7 @@ fn string_to_tts_macro () {
         assert_eq!(json::encode(&tts),
         "[\
     {\
-        \"variant\":\"TTTok\",\
+        \"variant\":\"TTToken\",\
         \"fields\":[\
             null,\
             {\
@@ -858,7 +858,7 @@ fn string_to_tts_macro () {
         ]\
     },\
     {\
-        \"variant\":\"TTTok\",\
+        \"variant\":\"TTToken\",\
         \"fields\":[\
             null,\
             {\
@@ -871,18 +871,18 @@ fn string_to_tts_macro () {
         ]\
     },\
     {\
-        \"variant\":\"TTDelim\",\
+        \"variant\":\"TTDelimited\",\
         \"fields\":[\
             [\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         \"LPAREN\"\
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         {\
@@ -895,14 +895,14 @@ fn string_to_tts_macro () {
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         \"COLON\"\
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         {\
@@ -915,7 +915,7 @@ fn string_to_tts_macro () {
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         \"RPAREN\"\
@@ -925,18 +925,18 @@ fn string_to_tts_macro () {
         ]\
     },\
     {\
-        \"variant\":\"TTDelim\",\
+        \"variant\":\"TTDelimited\",\
         \"fields\":[\
             [\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         \"LBRACE\"\
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         {\
@@ -949,14 +949,14 @@ fn string_to_tts_macro () {
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         \"SEMI\"\
                     ]\
                 },\
                 {\
-                    \"variant\":\"TTTok\",\
+                    \"variant\":\"TTToken\",\
                     \"fields\":[\
                         null,\
                         \"RBRACE\"\
index 005ed2e7ed3741392f62fb345b2b2e20d94b0135..1ed7baa13b42dfc564eb9c4f39bd54565e68205a 100644 (file)
@@ -48,7 +48,7 @@
 use ast::{StructVariantKind, BiSub};
 use ast::StrStyle;
 use ast::{SelfExplicit, SelfRegion, SelfStatic, SelfValue};
-use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelim, TTSeq, TTTok};
+use ast::{Delimiter, TokenTree, TraitItem, TraitRef, TTDelimited, TTSequence, TTToken};
 use ast::{TTNonterminal, TupleVariantKind, Ty, Ty_, TyBot};
 use ast::{TypeField, TyFixedLengthVec, TyClosure, TyProc, TyBareFn};
 use ast::{TyTypeof, TyInfer, TypeMethod};
@@ -2526,7 +2526,7 @@ fn parse_zerok(parser: &mut Parser) -> Option<bool> {
     /// parse a single token tree from the input.
     pub fn parse_token_tree(&mut self) -> TokenTree {
         // FIXME #6994: currently, this is too eager. It
-        // parses token trees but also identifies TTSeq's
+        // parses token trees but also identifies TTSequence's
         // and TTNonterminal's; it's too early to know yet
         // whether something will be a nonterminal or a seq
         // yet.
@@ -2568,13 +2568,13 @@ fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
                     let seq = match seq {
                         Spanned { node, .. } => node,
                     };
-                    TTSeq(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z)
+                    TTSequence(mk_sp(sp.lo, p.span.hi), Rc::new(seq), s, z)
                 } else {
                     TTNonterminal(sp, p.parse_ident())
                 }
               }
               _ => {
-                  TTTok(p.span, p.bump_and_get())
+                  TTToken(p.span, p.bump_and_get())
               }
             }
         }
@@ -2615,7 +2615,7 @@ fn parse_non_delim_tt_tok(p: &mut Parser) -> TokenTree {
                 // Expand to cover the entire delimited token tree
                 let span = Span { hi: self.span.hi, ..pre_span };
 
-                TTDelim(span, open, Rc::new(tts), close)
+                TTDelimited(span, open, Rc::new(tts), close)
             }
             _ => parse_non_delim_tt_tok(self)
         }
index 4f4b153d3a964286dac85fa39594fd8cf151ba16..9a102d229718f674021798b4a8323297c01418b0 100644 (file)
@@ -1020,14 +1020,14 @@ pub fn print_struct(&mut self,
     /// expression arguments as expressions). It can be done! I think.
     pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
         match *tt {
-            ast::TTDelim(_, ref open, ref tts, ref close) => {
+            ast::TTDelimited(_, ref open, ref tts, ref close) => {
                 try!(word(&mut self.s, parse::token::to_string(&open.token).as_slice()));
                 try!(space(&mut self.s));
                 try!(self.print_tts(tts.as_slice()));
                 try!(space(&mut self.s));
                 word(&mut self.s, parse::token::to_string(&close.token).as_slice())
             },
-            ast::TTTok(_, ref tk) => {
+            ast::TTToken(_, ref tk) => {
                 try!(word(&mut self.s, parse::token::to_string(tk).as_slice()));
                 match *tk {
                     parse::token::DOC_COMMENT(..) => {
@@ -1036,7 +1036,7 @@ pub fn print_tt(&mut self, tt: &ast::TokenTree) -> IoResult<()> {
                     _ => Ok(())
                 }
             }
-            ast::TTSeq(_, ref tts, ref sep, zerok) => {
+            ast::TTSequence(_, ref tts, ref sep, zerok) => {
                 try!(word(&mut self.s, "$("));
                 for tt_elt in (*tts).iter() {
                     try!(self.print_tt(tt_elt));
index 43842fae70f802412bb5ed2261060ef7e8715ab5..0d5abb8fb5dd9bd1cd414ca7822c734ef755f3c5 100644 (file)
@@ -18,7 +18,7 @@
 
 use syntax::codemap::Span;
 use syntax::parse::token::{IDENT, get_ident};
-use syntax::ast::{TokenTree, TTTok};
+use syntax::ast::{TokenTree, TTToken};
 use syntax::ext::base::{ExtCtxt, MacResult, DummyResult, MacExpr};
 use syntax::ext::build::AstBuilder;  // trait for expr_uint
 use rustc::plugin::Registry;
@@ -39,7 +39,7 @@ fn expand_rn(cx: &mut ExtCtxt, sp: Span, args: &[TokenTree])
         ("I",    1)];
 
     let text = match args {
-        [TTTok(_, IDENT(s, _))] => get_ident(s).to_string(),
+        [TTToken(_, IDENT(s, _))] => get_ident(s).to_string(),
         _ => {
             cx.span_err(sp, "argument should be a single identifier");
             return DummyResult::any(sp);