]> git.lizzy.rs Git - rust.git/commitdiff
Fix fallout in `rustdoc` and tests.
authorJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Thu, 17 Nov 2016 14:04:36 +0000 (14:04 +0000)
committerJeffrey Seyfried <jeffrey.seyfried@gmail.com>
Mon, 21 Nov 2016 12:16:46 +0000 (12:16 +0000)
30 files changed:
src/librustc/session/config.rs
src/librustc_driver/driver.rs
src/librustc_driver/test.rs
src/librustdoc/clean/mod.rs
src/libsyntax/codemap.rs
src/libsyntax/fold.rs
src/libsyntax/parse/lexer/mod.rs
src/libsyntax/parse/mod.rs
src/libsyntax/print/pprust.rs
src/libsyntax/tokenstream.rs
src/libsyntax/util/parser_testing.rs
src/test/compile-fail-fulldeps/auxiliary/lint_plugin_test.rs
src/test/compile-fail-fulldeps/auxiliary/macro_crate_test.rs
src/test/compile-fail-fulldeps/qquote.rs
src/test/parse-fail/attr-bad-meta.rs
src/test/run-fail-fulldeps/qquote.rs
src/test/run-make/issue-19371/foo.rs
src/test/run-pass-fulldeps/auxiliary/cond_noprelude_plugin.rs
src/test/run-pass-fulldeps/auxiliary/cond_plugin.rs
src/test/run-pass-fulldeps/auxiliary/cond_prelude_plugin.rs
src/test/run-pass-fulldeps/auxiliary/custom_derive_partial_eq.rs
src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin.rs
src/test/run-pass-fulldeps/auxiliary/custom_derive_plugin_attr.rs
src/test/run-pass-fulldeps/auxiliary/lint_plugin_test.rs
src/test/run-pass-fulldeps/auxiliary/macro_crate_test.rs
src/test/run-pass-fulldeps/auxiliary/plugin_args.rs
src/test/run-pass-fulldeps/auxiliary/proc_macro_def.rs
src/test/run-pass-fulldeps/auxiliary/procedural_mbe_matching.rs
src/test/run-pass-fulldeps/macro-quote-1.rs
src/test/run-pass-fulldeps/qquote.rs

index 99c299861e05cb2b5257d452abf6d2d0917abc4b..103e2a949df19a24177df00930985a4f650441fc 100644 (file)
@@ -1764,9 +1764,7 @@ mod tests {
     use std::rc::Rc;
     use super::{OutputType, OutputTypes, Externs};
     use rustc_back::PanicStrategy;
-    use syntax::{ast, attr};
-    use syntax::parse::token::InternedString;
-    use syntax::codemap::dummy_spanned;
+    use syntax::symbol::Symbol;
 
     fn optgroups() -> Vec<OptGroup> {
         super::rustc_optgroups().into_iter()
@@ -1795,9 +1793,7 @@ fn test_switch_implies_cfg_test() {
         let (sessopts, cfg) = build_session_options_and_crate_config(matches);
         let sess = build_session(sessopts, &dep_graph, None, registry, Rc::new(DummyCrateStore));
         let cfg = build_configuration(&sess, cfg);
-        assert!(attr::contains(&cfg, &dummy_spanned(ast::MetaItemKind::Word({
-            InternedString::new("test")
-        }))));
+        assert!(cfg.contains(&(Symbol::intern("test"), None)));
     }
 
     // When the user supplies --test and --cfg test, don't implicitly add
@@ -1818,7 +1814,7 @@ fn test_switch_implies_cfg_test_unless_cfg_test() {
         let sess = build_session(sessopts, &dep_graph, None, registry,
                                  Rc::new(DummyCrateStore));
         let cfg = build_configuration(&sess, cfg);
-        let mut test_items = cfg.iter().filter(|m| m.name() == "test");
+        let mut test_items = cfg.iter().filter(|&&(name, _)| name == "test");
         assert!(test_items.next().is_some());
         assert!(test_items.next().is_none());
     }
index 3a9729d5d45c10dba7fbe3c13c9a06e9b30dbd3b..228119e6cc7da46206696a7bf6452a38361182cc 100644 (file)
@@ -54,7 +54,7 @@
 use syntax::attr;
 use syntax::ext::base::ExtCtxt;
 use syntax::parse::{self, PResult};
-use syntax::symbol::{self, Symbol};
+use syntax::symbol::Symbol;
 use syntax::util::node_count::NodeCounter;
 use syntax;
 use syntax_ext;
index 782c74c8c78c75b17c294458f33ae2a9caace1e9..a4f0e2903847765b197d0685564fda2bc83b9919 100644 (file)
@@ -34,8 +34,8 @@
 use errors;
 use errors::emitter::Emitter;
 use errors::{Level, DiagnosticBuilder};
-use syntax::parse::token;
 use syntax::feature_gate::UnstableFeatures;
+use syntax::symbol::Symbol;
 
 use rustc::hir;
 
@@ -288,11 +288,11 @@ pub fn t_pair(&self, ty1: Ty<'tcx>, ty2: Ty<'tcx>) -> Ty<'tcx> {
 
     pub fn t_param(&self, index: u32) -> Ty<'tcx> {
         let name = format!("T{}", index);
-        self.infcx.tcx.mk_param(index, token::intern(&name[..]))
+        self.infcx.tcx.mk_param(index, Symbol::intern(&name[..]))
     }
 
     pub fn re_early_bound(&self, index: u32, name: &'static str) -> &'tcx ty::Region {
-        let name = token::intern(name);
+        let name = Symbol::intern(name);
         self.infcx.tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
             index: index,
             name: name,
index a141d0e4788dd845509131780a6d83a9c268ff38..2cc1882ce3eae3411adaf179d03132f1127d51af 100644 (file)
@@ -24,9 +24,9 @@
 use syntax::ast;
 use syntax::attr;
 use syntax::codemap::Spanned;
-use syntax::parse::token::keywords;
 use syntax::ptr::P;
 use syntax::print::pprust as syntax_pprust;
+use syntax::symbol::keywords;
 use syntax_pos::{self, DUMMY_SP, Pos};
 
 use rustc_trans::back::link;
@@ -242,7 +242,7 @@ fn clean(&self, cx: &DocContext) -> ExternalCrate {
             }
         });
         ExternalCrate {
-            name: (&cx.sess().cstore.crate_name(self.0)[..]).to_owned(),
+            name: cx.sess().cstore.crate_name(self.0).to_string(),
             attrs: cx.sess().cstore.item_attrs(root).clean(cx),
             primitives: primitives,
         }
@@ -2577,7 +2577,7 @@ fn clean(&self, cx: &DocContext) -> Vec<Item> {
         // #[doc(no_inline)] attribute is present.
         // Don't inline doc(hidden) imports so they can be stripped at a later stage.
         let denied = self.vis != hir::Public || self.attrs.iter().any(|a| {
-            &a.name()[..] == "doc" && match a.meta_item_list() {
+            a.name() == "doc" && match a.meta_item_list() {
                 Some(l) => attr::list_contains_name(l, "no_inline") ||
                            attr::list_contains_name(l, "hidden"),
                 None => false,
index 49012ad036a9a9211a43d6ac0855beee96eae1e5..3cdfa718eabae2a7f85568b8c3dea57bc42cd494 100644 (file)
@@ -871,6 +871,7 @@ fn merge_spans(&self, sp_lhs: Span, sp_rhs: Span) -> Option<Span> {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use symbol::keywords;
     use std::rc::Rc;
 
     #[test]
@@ -1097,10 +1098,9 @@ fn t10() {
     #[test]
     fn t11() {
         // Test span_to_expanded_string works with expansion
-        use ast::Name;
         let cm = init_code_map();
         let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
-        let format = ExpnFormat::MacroBang(Name(0u32));
+        let format = ExpnFormat::MacroBang(keywords::Invalid.name());
         let callee = NameAndSpan { format: format,
                                    allow_internal_unstable: false,
                                    span: None };
@@ -1197,11 +1197,9 @@ fn span_substr(&self,
     fn init_expansion_chain(cm: &CodeMap) -> Span {
         // Creates an expansion chain containing two recursive calls
         // root -> expA -> expA -> expB -> expB -> end
-        use ast::Name;
-
         let root = Span { lo: BytePos(0), hi: BytePos(11), expn_id: NO_EXPANSION };
 
-        let format_root = ExpnFormat::MacroBang(Name(0u32));
+        let format_root = ExpnFormat::MacroBang(keywords::Invalid.name());
         let callee_root = NameAndSpan { format: format_root,
                                         allow_internal_unstable: false,
                                         span: Some(root) };
@@ -1210,7 +1208,7 @@ fn init_expansion_chain(cm: &CodeMap) -> Span {
         let id_a1 = cm.record_expansion(info_a1);
         let span_a1 = Span { lo: BytePos(12), hi: BytePos(23), expn_id: id_a1 };
 
-        let format_a = ExpnFormat::MacroBang(Name(1u32));
+        let format_a = ExpnFormat::MacroBang(keywords::As.name());
         let callee_a = NameAndSpan { format: format_a,
                                       allow_internal_unstable: false,
                                       span: Some(span_a1) };
@@ -1223,7 +1221,7 @@ fn init_expansion_chain(cm: &CodeMap) -> Span {
         let id_b1 = cm.record_expansion(info_b1);
         let span_b1 = Span { lo: BytePos(25), hi: BytePos(36), expn_id: id_b1 };
 
-        let format_b = ExpnFormat::MacroBang(Name(2u32));
+        let format_b = ExpnFormat::MacroBang(keywords::Box.name());
         let callee_b = NameAndSpan { format: format_b,
                                      allow_internal_unstable: false,
                                      span: None };
index 44ce8668c549aaf93d8274c9a5f8cc5294663957..ff0255a2f21f23f2d255949c53f8f842030c0334 100644 (file)
@@ -1332,9 +1332,8 @@ pub fn noop_fold_vis<T: Folder>(vis: Visibility, folder: &mut T) -> Visibility {
 #[cfg(test)]
 mod tests {
     use std::io;
-    use ast;
+    use ast::{self, Ident};
     use util::parser_testing::{string_to_crate, matches_codepattern};
-    use parse::token;
     use print::pprust;
     use fold;
     use super::*;
@@ -1350,7 +1349,7 @@ fn fake_print_crate(s: &mut pprust::State,
 
     impl Folder for ToZzIdentFolder {
         fn fold_ident(&mut self, _: ast::Ident) -> ast::Ident {
-            token::str_to_ident("zz")
+            Ident::from_str("zz")
         }
         fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
             fold::noop_fold_mac(mac, self)
index 7048be1478b82d969f967970b6217be854ded0d8..681dec0ab564d1c922cf60320583f43314e06331 100644 (file)
@@ -1702,6 +1702,7 @@ mod tests {
     use super::*;
 
     use ast::Ident;
+    use symbol::Symbol;
     use syntax_pos::{BytePos, Span, NO_EXPANSION};
     use codemap::CodeMap;
     use errors;
@@ -1752,7 +1753,7 @@ fn t1() {
         // read another token:
         let tok3 = string_reader.next_token();
         let tok4 = TokenAndSpan {
-            tok: token::Ident(str_to_ident("main")),
+            tok: token::Ident(Ident::from_str("main")),
             sp: Span {
                 lo: BytePos(24),
                 hi: BytePos(28),
@@ -1774,7 +1775,7 @@ fn check_tokenization(mut string_reader: StringReader, expected: Vec<token::Toke
 
     // make the identifier by looking up the string in the interner
     fn mk_ident(id: &str) -> token::Token {
-        token::Ident(str_to_ident(id))
+        token::Ident(Ident::from_str(id))
     }
 
     #[test]
@@ -1838,7 +1839,7 @@ fn lifetime_name() {
         let cm = Rc::new(CodeMap::new());
         let sh = mk_sh(cm.clone());
         assert_eq!(setup(&cm, &sh, "'abc".to_string()).next_token().tok,
-                   token::Lifetime(token::str_to_ident("'abc")));
+                   token::Lifetime(Ident::from_str("'abc")));
     }
 
     #[test]
@@ -1848,7 +1849,7 @@ fn raw_string() {
         assert_eq!(setup(&cm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
                        .next_token()
                        .tok,
-                   token::Literal(token::StrRaw(Symol::intern("\"#a\\b\x00c\""), 3), None));
+                   token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
     }
 
     #[test]
index c811514c3073d3eb9a60ace27bb0c0ae2a764c80..be340a5b5aa93e0a9fca53629de5bfe931f30aa3 100644 (file)
@@ -597,12 +597,11 @@ mod tests {
     use std::rc::Rc;
     use syntax_pos::{self, Span, BytePos, Pos, NO_EXPANSION};
     use codemap::Spanned;
-    use ast::{self, PatKind};
+    use ast::{self, Ident, PatKind};
     use abi::Abi;
     use attr::first_attr_value_str_by_name;
     use parse;
     use parse::parser::Parser;
-    use parse::token::{str_to_ident};
     use print::pprust::item_to_string;
     use ptr::P;
     use tokenstream::{self, TokenTree};
@@ -624,7 +623,7 @@ fn sp(a: u32, b: u32) -> Span {
                         global: false,
                         segments: vec![
                             ast::PathSegment {
-                                identifier: str_to_ident("a"),
+                                identifier: Ident::from_str("a"),
                                 parameters: ast::PathParameters::none(),
                             }
                         ],
@@ -643,11 +642,11 @@ fn sp(a: u32, b: u32) -> Span {
                             global: true,
                             segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("a"),
+                                    identifier: Ident::from_str("a"),
                                     parameters: ast::PathParameters::none(),
                                 },
                                 ast::PathSegment {
-                                    identifier: str_to_ident("b"),
+                                    identifier: Ident::from_str("b"),
                                     parameters: ast::PathParameters::none(),
                                 }
                             ]
@@ -676,8 +675,8 @@ fn string_to_tts_macro () {
                 Some(&TokenTree::Token(_, token::Ident(name_zip))),
                 Some(&TokenTree::Delimited(_, ref macro_delimed)),
             )
-            if name_macro_rules.name.as_str() == "macro_rules"
-            && name_zip.name.as_str() == "zip" => {
+            if name_macro_rules.name == "macro_rules"
+            && name_zip.name == "zip" => {
                 let tts = &macro_delimed.tts[..];
                 match (tts.len(), tts.get(0), tts.get(1), tts.get(2)) {
                     (
@@ -694,8 +693,7 @@ fn string_to_tts_macro () {
                                 Some(&TokenTree::Token(_, token::Dollar)),
                                 Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
-                            if first_delimed.delim == token::Paren
-                            && ident.name.as_str() == "a" => {},
+                            if first_delimed.delim == token::Paren && ident.name == "a" => {},
                             _ => panic!("value 3: {:?}", **first_delimed),
                         }
                         let tts = &second_delimed.tts[..];
@@ -706,7 +704,7 @@ fn string_to_tts_macro () {
                                 Some(&TokenTree::Token(_, token::Ident(ident))),
                             )
                             if second_delimed.delim == token::Paren
-                            && ident.name.as_str() == "a" => {},
+                            && ident.name == "a" => {},
                             _ => panic!("value 4: {:?}", **second_delimed),
                         }
                     },
@@ -722,17 +720,17 @@ fn string_to_tts_1() {
         let tts = string_to_tts("fn a (b : i32) { b; }".to_string());
 
         let expected = vec![
-            TokenTree::Token(sp(0, 2), token::Ident(str_to_ident("fn"))),
-            TokenTree::Token(sp(3, 4), token::Ident(str_to_ident("a"))),
+            TokenTree::Token(sp(0, 2), token::Ident(Ident::from_str("fn"))),
+            TokenTree::Token(sp(3, 4), token::Ident(Ident::from_str("a"))),
             TokenTree::Delimited(
                 sp(5, 14),
                 Rc::new(tokenstream::Delimited {
                     delim: token::DelimToken::Paren,
                     open_span: sp(5, 6),
                     tts: vec![
-                        TokenTree::Token(sp(6, 7), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(6, 7), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(8, 9), token::Colon),
-                        TokenTree::Token(sp(10, 13), token::Ident(str_to_ident("i32"))),
+                        TokenTree::Token(sp(10, 13), token::Ident(Ident::from_str("i32"))),
                     ],
                     close_span: sp(13, 14),
                 })),
@@ -742,7 +740,7 @@ fn string_to_tts_1() {
                     delim: token::DelimToken::Brace,
                     open_span: sp(15, 16),
                     tts: vec![
-                        TokenTree::Token(sp(17, 18), token::Ident(str_to_ident("b"))),
+                        TokenTree::Token(sp(17, 18), token::Ident(Ident::from_str("b"))),
                         TokenTree::Token(sp(18, 19), token::Semi),
                     ],
                     close_span: sp(20, 21),
@@ -763,7 +761,7 @@ fn string_to_tts_1() {
                             global: false,
                             segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("d"),
+                                    identifier: Ident::from_str("d"),
                                     parameters: ast::PathParameters::none(),
                                 }
                             ],
@@ -786,7 +784,7 @@ fn string_to_tts_1() {
                                global:false,
                                segments: vec![
                                 ast::PathSegment {
-                                    identifier: str_to_ident("b"),
+                                    identifier: Ident::from_str("b"),
                                     parameters: ast::PathParameters::none(),
                                 }
                                ],
@@ -810,7 +808,7 @@ fn parser_done(p: Parser){
                 id: ast::DUMMY_NODE_ID,
                 node: PatKind::Ident(ast::BindingMode::ByValue(ast::Mutability::Immutable),
                                     Spanned{ span:sp(0, 1),
-                                             node: str_to_ident("b")
+                                             node: Ident::from_str("b")
                     },
                                     None),
                 span: sp(0,1)}));
@@ -822,7 +820,7 @@ fn parser_done(p: Parser){
         // this test depends on the intern order of "fn" and "i32"
         assert_eq!(string_to_item("fn a (b : i32) { b; }".to_string()),
                   Some(
-                      P(ast::Item{ident:str_to_ident("a"),
+                      P(ast::Item{ident:Ident::from_str("a"),
                             attrs:Vec::new(),
                             id: ast::DUMMY_NODE_ID,
                             node: ast::ItemKind::Fn(P(ast::FnDecl {
@@ -833,8 +831,7 @@ fn parser_done(p: Parser){
                                         global:false,
                                         segments: vec![
                                             ast::PathSegment {
-                                                identifier:
-                                                    str_to_ident("i32"),
+                                                identifier: Ident::from_str("i32"),
                                                 parameters: ast::PathParameters::none(),
                                             }
                                         ],
@@ -847,7 +844,7 @@ fn parser_done(p: Parser){
                                             ast::BindingMode::ByValue(ast::Mutability::Immutable),
                                                 Spanned{
                                                     span: sp(6,7),
-                                                    node: str_to_ident("b")},
+                                                    node: Ident::from_str("b")},
                                                 None
                                                     ),
                                             span: sp(6,7)
@@ -882,9 +879,7 @@ fn parser_done(p: Parser){
                                                         global:false,
                                                         segments: vec![
                                                             ast::PathSegment {
-                                                                identifier:
-                                                                str_to_ident(
-                                                                    "b"),
+                                                                identifier: Ident::from_str("b"),
                                                                 parameters:
                                                                 ast::PathParameters::none(),
                                                             }
@@ -996,12 +991,12 @@ fn wb() -> c_int { O_WRONLY as c_int }
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
         let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-        assert_eq!(&doc[..], "/// doc comment");
+        assert_eq!(doc, "/// doc comment");
 
         let source = "/// doc comment\r\n/// line 2\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name.clone(), source, &sess)
             .unwrap().unwrap();
-        let docs = item.attrs.iter().filter(|a| &*a.name() == "doc")
+        let docs = item.attrs.iter().filter(|a| a.name() == "doc")
                     .map(|a| a.value_str().unwrap().to_string()).collect::<Vec<_>>();
         let b: &[_] = &["/// doc comment".to_string(), "/// line 2".to_string()];
         assert_eq!(&docs[..], b);
@@ -1009,7 +1004,7 @@ fn wb() -> c_int { O_WRONLY as c_int }
         let source = "/** doc comment\r\n *  with CRLF */\r\nfn foo() {}".to_string();
         let item = parse_item_from_source_str(name, source, &sess).unwrap().unwrap();
         let doc = first_attr_value_str_by_name(&item.attrs, "doc").unwrap();
-        assert_eq!(&doc[..], "/** doc comment\n *  with CRLF */");
+        assert_eq!(doc, "/** doc comment\n *  with CRLF */");
     }
 
     #[test]
index 544b431e61cce8df59fab9a9edc3a6fb15c093ee..3820f5ea90ccc6e147e835b9515f63a12f8164dc 100644 (file)
@@ -3080,12 +3080,11 @@ mod tests {
 
     use ast;
     use codemap;
-    use parse::token;
     use syntax_pos;
 
     #[test]
     fn test_fun_to_string() {
-        let abba_ident = token::str_to_ident("abba");
+        let abba_ident = ast::Ident::from_str("abba");
 
         let decl = ast::FnDecl {
             inputs: Vec::new(),
@@ -3101,7 +3100,7 @@ fn test_fun_to_string() {
 
     #[test]
     fn test_variant_to_string() {
-        let ident = token::str_to_ident("principal_skinner");
+        let ident = ast::Ident::from_str("principal_skinner");
 
         let var = codemap::respan(syntax_pos::DUMMY_SP, ast::Variant_ {
             name: ident,
index ee0c2f80891c7e63dbc069153b070e868e255de1..0d5dcaf339feb086f6dddabb627cf161cce958cb 100644 (file)
@@ -872,8 +872,9 @@ fn index(&self, index: usize) -> &TokenTree {
 #[cfg(test)]
 mod tests {
     use super::*;
+    use syntax::ast::Ident;
     use syntax_pos::{Span, BytePos, NO_EXPANSION, DUMMY_SP};
-    use parse::token::{self, str_to_ident, Token};
+    use parse::token::{self, Token};
     use util::parser_testing::string_to_tts;
     use std::rc::Rc;
 
@@ -968,15 +969,17 @@ fn test_slice_1() {
         let test_res = TokenStream::from_tts(string_to_tts("foo::bar::baz".to_string()))
             .slice(2..3);
         let test_eqs = TokenStream::from_tts(vec![TokenTree::Token(sp(5,8),
-                                                    token::Ident(str_to_ident("bar")))]);
+                                                    token::Ident(Ident::from_str("bar")))]);
         assert_eq!(test_res, test_eqs)
     }
 
     #[test]
     fn test_is_empty() {
         let test0 = TokenStream::from_tts(Vec::new());
-        let test1 = TokenStream::from_tts(vec![TokenTree::Token(sp(0, 1),
-                                                                Token::Ident(str_to_ident("a")))]);
+        let test1 = TokenStream::from_tts(
+            vec![TokenTree::Token(sp(0, 1), Token::Ident(Ident::from_str("a")))]
+        );
+
         let test2 = TokenStream::from_tts(string_to_tts("foo(bar::baz)".to_string()));
 
         assert_eq!(test0.is_empty(), true);
@@ -1036,20 +1039,20 @@ fn test_maybe_delimited() {
         assert_eq!(test0, None);
 
         let test1_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
-                                                        token::Ident(str_to_ident("bar"))),
+                                                        token::Ident(Ident::from_str("bar"))),
                                        TokenTree::Token(sp(4, 6), token::ModSep),
                                        TokenTree::Token(sp(6, 9),
-                                                        token::Ident(str_to_ident("baz")))]);
+                                                        token::Ident(Ident::from_str("baz")))]);
         assert_eq!(test1, Some(test1_expected));
 
         let test2_expected = TokenStream::from_tts(vec![TokenTree::Token(sp(1, 4),
-                                                        token::Ident(str_to_ident("foo"))),
+                                                        token::Ident(Ident::from_str("foo"))),
                                        TokenTree::Token(sp(4, 5), token::Comma),
                                        TokenTree::Token(sp(5, 8),
-                                                        token::Ident(str_to_ident("bar"))),
+                                                        token::Ident(Ident::from_str("bar"))),
                                        TokenTree::Token(sp(8, 9), token::Comma),
                                        TokenTree::Token(sp(9, 12),
-                                                        token::Ident(str_to_ident("baz")))]);
+                                                        token::Ident(Ident::from_str("baz")))]);
         assert_eq!(test2, Some(test2_expected));
 
         assert_eq!(test3, None);
@@ -1070,7 +1073,7 @@ fn test_maybe_ident() {
 
         assert_eq!(test0, None);
         assert_eq!(test1, None);
-        assert_eq!(test2, Some(str_to_ident("foo")));
+        assert_eq!(test2, Some(Ident::from_str("foo")));
         assert_eq!(test3, None);
         assert_eq!(test4, None);
     }
@@ -1080,9 +1083,9 @@ fn test_as_delimited_stream() {
         let test0 = as_paren_delimited_stream(string_to_tts("foo,bar,".to_string()));
         let test1 = as_paren_delimited_stream(string_to_tts("baz(foo,bar)".to_string()));
 
-        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("foo"))),
+        let test0_tts = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("foo"))),
                              TokenTree::Token(sp(3, 4), token::Comma),
-                             TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("bar"))),
+                             TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("bar"))),
                              TokenTree::Token(sp(7, 8), token::Comma)];
         let test0_stream = TokenStream::from_tts(vec![TokenTree::Delimited(sp(0, 8),
                                                                Rc::new(Delimited {
@@ -1095,11 +1098,11 @@ fn test_as_delimited_stream() {
         assert_eq!(test0, test0_stream);
 
 
-        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(str_to_ident("foo"))),
+        let test1_tts = vec![TokenTree::Token(sp(4, 7), token::Ident(Ident::from_str("foo"))),
                              TokenTree::Token(sp(7, 8), token::Comma),
-                             TokenTree::Token(sp(8, 11), token::Ident(str_to_ident("bar")))];
+                             TokenTree::Token(sp(8, 11), token::Ident(Ident::from_str("bar")))];
 
-        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(str_to_ident("baz"))),
+        let test1_parse = vec![TokenTree::Token(sp(0, 3), token::Ident(Ident::from_str("baz"))),
                                TokenTree::Delimited(sp(3, 12),
                                                     Rc::new(Delimited {
                                                         delim: token::DelimToken::Paren,
index 76d3f2a063c1884bd0d49b980ecbafe47c2a4e28..e703dc6b4191c50f933fa82e98777eb9cbf189fe 100644 (file)
@@ -8,11 +8,10 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use ast;
+use ast::{self, Ident};
 use parse::{ParseSess,PResult,filemap_to_tts};
 use parse::{lexer, new_parser_from_source_str};
 use parse::parser::Parser;
-use parse::token;
 use ptr::P;
 use tokenstream;
 use std::iter::Peekable;
@@ -78,9 +77,9 @@ pub fn string_to_pat(source_str: String) -> P<ast::Pat> {
     })
 }
 
-/// Convert a vector of strings to a vector of ast::Ident's
-pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<ast::Ident> {
-    ids.iter().map(|u| token::str_to_ident(*u)).collect()
+/// Convert a vector of strings to a vector of Ident's
+pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> {
+    ids.iter().map(|u| Ident::from_str(*u)).collect()
 }
 
 /// Does the given string match the pattern? whitespace in the first string
index 8ea131da338cb1bd61f60fac56e32fe166a8beb6..8647797270f9a841f5092b974faf0ec745509cbb 100644 (file)
@@ -36,7 +36,7 @@ fn get_lints(&self) -> LintArray {
 
 impl EarlyLintPass for Pass {
     fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
-        if it.ident.name.as_str() == "lintme" {
+        if it.ident.name == "lintme" {
             cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
         }
     }
index 409f9dbf03c54f160c7e5157befb172451d9995c..dc88bfc40595f4401da5db5c86f16c2eede12ccd 100644 (file)
@@ -19,8 +19,9 @@
 
 use syntax::ast::{self, Item, MetaItem, ItemKind};
 use syntax::ext::base::*;
-use syntax::parse::{self, token};
+use syntax::parse;
 use syntax::ptr::P;
+use syntax::symbol::Symbol;
 use syntax::tokenstream::TokenTree;
 use syntax_pos::Span;
 use rustc_plugin::Registry;
@@ -34,11 +35,11 @@ pub fn plugin_registrar(reg: &mut Registry) {
     reg.register_macro("make_a_1", expand_make_a_1);
     reg.register_macro("identity", expand_identity);
     reg.register_syntax_extension(
-        token::intern("into_multi_foo"),
+        Symbol::intern("into_multi_foo"),
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         MultiModifier(Box::new(expand_into_foo_multi)));
     reg.register_syntax_extension(
-        token::intern("duplicate"),
+        Symbol::intern("duplicate"),
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         MultiDecorator(Box::new(expand_duplicate)));
 }
@@ -102,9 +103,9 @@ fn expand_duplicate(cx: &mut ExtCtxt,
                     push: &mut FnMut(Annotatable))
 {
     let copy_name = match mi.node {
-        ast::MetaItemKind::List(_, ref xs) => {
+        ast::MetaItemKind::List(ref xs) => {
             if let Some(word) = xs[0].word() {
-                token::str_to_ident(&word.name())
+                ast::Ident::with_empty_ctxt(word.name())
             } else {
                 cx.span_err(mi.span, "Expected word");
                 return;
index 4a7033d44b87858e65e5479c8bb9a4b52825536a..8acab3369e48fb3ef3ba71b3c45160eaf2750970 100644 (file)
@@ -16,8 +16,8 @@
 extern crate syntax_pos;
 
 use syntax::ast;
-use syntax::parse;
 use syntax::print::pprust;
+use syntax::symbol::Symbol;
 use syntax_pos::DUMMY_SP;
 
 fn main() {
@@ -30,7 +30,7 @@ fn main() {
     cx.bt_push(syntax::codemap::ExpnInfo {
         call_site: DUMMY_SP,
         callee: syntax::codemap::NameAndSpan {
-            format: syntax::codemap::MacroBang(parse::token::intern("")),
+            format: syntax::codemap::MacroBang(Symbol::intern("")),
             allow_internal_unstable: false,
             span: None,
         }
index 7def91da5eca4da1926f08578a3397398adff655..092adbf29e340cd52394c00e169fdb4dd746544f 100644 (file)
@@ -10,7 +10,7 @@
 
 // compile-flags: -Z parse-only
 
-// error-pattern:expected `]`
+// error-pattern:expected one of `=` or `]`
 
 // asterisk is bogus
 #[attr*]
index d2a16ac750704de8f3bf49cea7cc6aed7e6db5fb..d692bb519c149ec33c24269302f268505e24f507 100644 (file)
@@ -19,8 +19,8 @@
 
 use syntax::ast;
 use syntax::codemap;
-use syntax::parse;
 use syntax::print::pprust;
+use syntax::symbol::Symbol;
 use syntax_pos::DUMMY_SP;
 
 fn main() {
@@ -33,7 +33,7 @@ fn main() {
     cx.bt_push(syntax::codemap::ExpnInfo {
         call_site: DUMMY_SP,
         callee: syntax::codemap::NameAndSpan {
-            format: syntax::codemap::MacroBang(parse::token::intern("")),
+            format: syntax::codemap::MacroBang(Symbol::intern("")),
             allow_internal_unstable: false,
             span: None,
         }
index ed127b017b6a92cdfe800aadd8a5e2e416942a28..0336fe277c51f8eb6f064d6e248d1f0d00e26653 100644 (file)
@@ -25,6 +25,7 @@
 use rustc_metadata::cstore::CStore;
 use rustc_errors::registry::Registry;
 
+use std::collections::HashSet;
 use std::path::PathBuf;
 use std::rc::Rc;
 
@@ -65,7 +66,7 @@ fn basic_sess(sysroot: PathBuf) -> (Session, Rc<CStore>) {
 
 fn compile(code: String, output: PathBuf, sysroot: PathBuf) {
     let (sess, cstore) = basic_sess(sysroot);
-    let cfg = build_configuration(&sess, vec![]);
+    let cfg = build_configuration(&sess, HashSet::new());
     let control = CompileController::basic();
     let input = Input::Str { name: anon_src(), input: code };
     compile_input(&sess, &cstore, &input, &None, &Some(output), None, &control);
index 48919fe876a221cad38d87ba119f86eae212d934..664bb9da89a5765fa19fb4dd4a94f3f5930dcd06 100644 (file)
 
 use proc_macro_tokens::build::ident_eq;
 
+use syntax::ast::Ident;
 use syntax::ext::base::{ExtCtxt, MacResult};
 use syntax::ext::proc_macro_shim::build_block_emitter;
 use syntax::tokenstream::{TokenTree, TokenStream};
-use syntax::parse::token::str_to_ident;
 use syntax::codemap::Span;
 
 use rustc_plugin::Registry;
@@ -57,7 +57,7 @@ fn cond_rec(input: TokenStream) -> TokenStream {
   let test: TokenStream = clause.slice(0..1);
   let rhs: TokenStream = clause.slice_from(1..);
 
-  if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() {
+  if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
     qquote!({unquote(rhs)})
   } else {
     qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
index 0ea4cec75cdda5ae6b726e59c7f4142664089921..31a5f5968bab6addf652bb73bf1a9fb834826497 100644 (file)
@@ -26,7 +26,7 @@
 use syntax::codemap::{DUMMY_SP, Span};
 use syntax::ext::proc_macro_shim::build_block_emitter;
 use syntax::ext::base::{ExtCtxt, MacResult};
-use syntax::parse::token::{self, Token, DelimToken, keywords, str_to_ident};
+use syntax::parse::token::{self, Token, DelimToken};
 use syntax::tokenstream::{TokenTree, TokenStream};
 
 #[plugin_registrar]
@@ -58,7 +58,7 @@ fn cond_rec(input: TokenStream) -> TokenStream {
   let test: TokenStream = clause.slice(0..1);
   let rhs: TokenStream = clause.slice_from(1..);
 
-  if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() {
+  if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
     qquote!({unquote(rhs)})
   } else {
     qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
index 169c96b438529166ebbdee1f3eef6d9e0dc800a9..6a2d159a4bdd96e774025f72df238e7ddac836ae 100644 (file)
@@ -52,7 +52,7 @@ fn cond_rec(input: TokenStream) -> TokenStream {
   let test: TokenStream = clause.slice(0..1);
   let rhs: TokenStream = clause.slice_from(1..);
 
-  if ident_eq(&test[0], str_to_ident("else")) || rest.is_empty() {
+  if ident_eq(&test[0], Ident::from_str("else")) || rest.is_empty() {
     qquote!({unquote(rhs)})
   } else {
     qquote!({if unquote(test) { unquote(rhs) } else { cond!(unquote(rest)) } })
index e750d1fb1e3e6364dba8cf64655988c0a5a4bec5..da82545ca721077558db271454d522f587890407 100644 (file)
 use syntax::codemap::Span;
 use syntax::ext::base::*;
 use syntax::ext::build::AstBuilder;
-use syntax::parse::token::{intern, InternedString};
+use syntax::symbol::Symbol;
 use syntax::ptr::P;
 
 #[plugin_registrar]
 pub fn plugin_registrar(reg: &mut Registry) {
-    reg.register_syntax_extension(intern("derive_CustomPartialEq"),
+    reg.register_syntax_extension(Symbol::intern("derive_CustomPartialEq"),
                                   MultiDecorator(Box::new(expand_deriving_partial_eq)));
 }
 
@@ -52,7 +52,7 @@ fn cs_eq(cx: &mut ExtCtxt, span: Span, substr: &Substructure) -> P<Expr> {
                 substr)
     }
 
-    let inline = cx.meta_word(span, InternedString::new("inline"));
+    let inline = cx.meta_word(span, Symbol::intern("inline"));
     let attrs = vec![cx.attribute(span, inline)];
     let methods = vec![MethodDef {
         name: "eq",
index 6b688b006bd4a675c8478cb5c64b3bcc0be103f2..07f7d6bad7bf2b40967b8830f4bdc380dafcd0cc 100644 (file)
@@ -23,7 +23,7 @@
 use syntax::ast;
 use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable};
 use syntax::ext::build::AstBuilder;
-use syntax::parse::token;
+use syntax::symbol::Symbol;
 use syntax_ext::deriving::generic::{cs_fold, TraitDef, MethodDef, combine_substructure};
 use syntax_ext::deriving::generic::ty::{Literal, LifetimeBounds, Path, borrowed_explicit_self};
 use syntax_pos::Span;
@@ -32,7 +32,7 @@
 #[plugin_registrar]
 pub fn plugin_registrar(reg: &mut Registry) {
     reg.register_syntax_extension(
-        token::intern("derive_TotalSum"),
+        Symbol::intern("derive_TotalSum"),
         MultiDecorator(box expand));
 }
 
@@ -66,7 +66,7 @@ fn expand(cx: &mut ExtCtxt,
                             |cx, span, subexpr, field, _| {
                                 cx.expr_binary(span, ast::BinOpKind::Add, subexpr,
                                     cx.expr_method_call(span, field,
-                                        token::str_to_ident("total_sum"), vec![]))
+                                        ast::Ident::from_str("total_sum"), vec![]))
                             },
                             zero,
                             box |cx, span, _, _| { cx.span_bug(span, "wtf??"); },
index 6b58fee157584c5255bdeb452671d2a9a7046b23..50b16a0e26fb49b79a951e0b8d019a73e5012aa9 100644 (file)
@@ -23,7 +23,7 @@
 use syntax::ast;
 use syntax::ext::base::{MultiDecorator, ExtCtxt, Annotatable};
 use syntax::ext::build::AstBuilder;
-use syntax::parse::token;
+use syntax::symbol::Symbol;
 use syntax::ptr::P;
 use syntax_ext::deriving::generic::{TraitDef, MethodDef, combine_substructure};
 use syntax_ext::deriving::generic::{Substructure, Struct, EnumMatching};
@@ -34,7 +34,7 @@
 #[plugin_registrar]
 pub fn plugin_registrar(reg: &mut Registry) {
     reg.register_syntax_extension(
-        token::intern("derive_TotalSum"),
+        Symbol::intern("derive_TotalSum"),
         MultiDecorator(box expand));
 }
 
index 8ea131da338cb1bd61f60fac56e32fe166a8beb6..8647797270f9a841f5092b974faf0ec745509cbb 100644 (file)
@@ -36,7 +36,7 @@ fn get_lints(&self) -> LintArray {
 
 impl EarlyLintPass for Pass {
     fn check_item(&mut self, cx: &EarlyContext, it: &ast::Item) {
-        if it.ident.name.as_str() == "lintme" {
+        if it.ident.name == "lintme" {
             cx.span_lint(TEST_LINT, it.span, "item is named 'lintme'");
         }
     }
index 7257444ee8703631675b40983b4c914483c23b67..29cc6b7db94741c4b44b1f0b5e66a65cea1a5b0a 100644 (file)
@@ -23,6 +23,7 @@
 use syntax::ext::quote::rt::ToTokens;
 use syntax::parse::{self, token};
 use syntax::ptr::P;
+use syntax::symbol::Symbol;
 use syntax::tokenstream::TokenTree;
 use syntax_pos::Span;
 use rustc_plugin::Registry;
@@ -36,15 +37,15 @@ pub fn plugin_registrar(reg: &mut Registry) {
     reg.register_macro("make_a_1", expand_make_a_1);
     reg.register_macro("identity", expand_identity);
     reg.register_syntax_extension(
-        token::intern("into_multi_foo"),
+        Symbol::intern("into_multi_foo"),
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         MultiModifier(Box::new(expand_into_foo_multi)));
     reg.register_syntax_extension(
-        token::intern("duplicate"),
+        Symbol::intern("duplicate"),
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         MultiDecorator(Box::new(expand_duplicate)));
     reg.register_syntax_extension(
-        token::intern("caller"),
+        Symbol::intern("caller"),
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         MultiDecorator(Box::new(expand_caller)));
 }
@@ -108,9 +109,9 @@ fn expand_duplicate(cx: &mut ExtCtxt,
                     it: &Annotatable,
                     push: &mut FnMut(Annotatable)) {
     let copy_name = match mi.node {
-        ast::MetaItemKind::List(_, ref xs) => {
+        ast::MetaItemKind::List(ref xs) => {
             if let Some(word) = xs[0].word() {
-                token::str_to_ident(&word.name())
+                ast::Ident::with_empty_ctxt(word.name())
             } else {
                 cx.span_err(mi.span, "Expected word");
                 return;
@@ -179,7 +180,7 @@ fn expand_caller(cx: &mut ExtCtxt,
         }
 
         let fn_name = match list[0].name() {
-            Some(name) => token::str_to_ident(&name),
+            Some(name) => ast::Ident::with_empty_ctxt(name),
             None => cx.span_fatal(list[0].span(), "First parameter must be an ident.")
         };
 
index f21c914a76c9c7406bee608dda9d53a7f7a1b5ef..ba2af77cdb297f75c2cbc2e6e66c6ee08c4031d3 100644 (file)
@@ -22,9 +22,9 @@
 use syntax::ast;
 use syntax::ext::build::AstBuilder;
 use syntax::ext::base::{TTMacroExpander, ExtCtxt, MacResult, MacEager, NormalTT};
-use syntax::parse::token;
 use syntax::print::pprust;
 use syntax::ptr::P;
+use syntax::symbol::Symbol;
 use syntax_pos::Span;
 use syntax::tokenstream;
 use rustc_plugin::Registry;
@@ -40,15 +40,14 @@ fn expand<'cx>(&self,
                    _: &[tokenstream::TokenTree]) -> Box<MacResult+'cx> {
         let args = self.args.iter().map(|i| pprust::meta_list_item_to_string(i))
             .collect::<Vec<_>>().join(", ");
-        let interned = token::intern_and_get_ident(&args[..]);
-        MacEager::expr(ecx.expr_str(sp, interned))
+        MacEager::expr(ecx.expr_str(sp, Symbol::intern(&args)))
     }
 }
 
 #[plugin_registrar]
 pub fn plugin_registrar(reg: &mut Registry) {
     let args = reg.args().to_owned();
-    reg.register_syntax_extension(token::intern("plugin_args"),
+    reg.register_syntax_extension(Symbol::intern("plugin_args"),
         // FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
         NormalTT(Box::new(Expander { args: args, }), None, false));
 }
index 9fce19f46f65c7a4bdfafa445ca69a40289f6ca3..f97fb04aadf6ab2149c47be89aaaa677d9e54f0a 100644 (file)
 use rustc_plugin::Registry;
 use syntax::ext::base::SyntaxExtension;
 use syntax::ext::proc_macro_shim::prelude::*;
+use syntax::symbol::Symbol;
 
 #[plugin_registrar]
 pub fn plugin_registrar(reg: &mut Registry) {
-    reg.register_syntax_extension(token::intern("attr_tru"),
+    reg.register_syntax_extension(Symbol::intern("attr_tru"),
                                   SyntaxExtension::AttrProcMacro(Box::new(attr_tru)));
-    reg.register_syntax_extension(token::intern("attr_identity"),
+    reg.register_syntax_extension(Symbol::intern("attr_identity"),
                                   SyntaxExtension::AttrProcMacro(Box::new(attr_identity)));
-    reg.register_syntax_extension(token::intern("tru"),
+    reg.register_syntax_extension(Symbol::intern("tru"),
                                   SyntaxExtension::ProcMacro(Box::new(tru)));
-    reg.register_syntax_extension(token::intern("ret_tru"),
+    reg.register_syntax_extension(Symbol::intern("ret_tru"),
                                   SyntaxExtension::ProcMacro(Box::new(ret_tru)));
-    reg.register_syntax_extension(token::intern("identity"),
+    reg.register_syntax_extension(Symbol::intern("identity"),
                                   SyntaxExtension::ProcMacro(Box::new(identity)));
 }
 
index 6ac0d5ad1a3bc89e9f145548c16511f60afbebbb..2b3857048f36732acc33f5c524aa8cd6f7fa978c 100644 (file)
@@ -18,8 +18,8 @@
 extern crate rustc;
 extern crate rustc_plugin;
 
-use syntax::parse::token::{str_to_ident, NtExpr, NtPat};
-use syntax::ast::{Pat};
+use syntax::parse::token::{NtExpr, NtPat};
+use syntax::ast::{Ident, Pat};
 use syntax::tokenstream::{TokenTree};
 use syntax::ext::base::{ExtCtxt, MacResult, MacEager};
 use syntax::ext::build::AstBuilder;
@@ -44,12 +44,12 @@ fn expand_mbe_matches(cx: &mut ExtCtxt, _: Span, args: &[TokenTree])
         }
     };
 
-    let matched_nt = match *map[&str_to_ident("matched")] {
+    let matched_nt = match *map[&Ident::from_str("matched")] {
         MatchedNonterminal(ref nt) => nt.clone(),
         _ => unreachable!(),
     };
 
-    let mac_expr = match (&*matched_nt, &*map[&str_to_ident("pat")]) {
+    let mac_expr = match (&*matched_nt, &*map[&Ident::from_str("pat")]) {
         (&NtExpr(ref matched_expr), &MatchedSeq(ref pats, seq_sp)) => {
             let pats: Vec<P<Pat>> = pats.iter().map(|pat_nt| {
                 match **pat_nt {
index 914da3f746773fe69d731c8ee2e24de1c426f960..948b20c14771fa48aaf5c60d9e09366daaf75263 100644 (file)
@@ -18,9 +18,6 @@
 use proc_macro_tokens::prelude::*;
 
 extern crate syntax;
-use syntax::ast::Ident;
-use syntax::codemap::DUMMY_SP;
-use syntax::parse::token::{self, Token, keywords, str_to_ident};
 
 fn main() {
     let lex_true = lex("true");
index 7c0c24163fe617a7486fe21cee25ff1206d171b0..b4ed57192ccf69647015b18639b858c7d7d574df 100644 (file)
@@ -16,7 +16,7 @@
 extern crate syntax_pos;
 
 use syntax::print::pprust::*;
-use syntax::parse::token::intern;
+use syntax::symbol::Symbol;
 use syntax_pos::DUMMY_SP;
 
 fn main() {
@@ -29,7 +29,7 @@ fn main() {
     cx.bt_push(syntax::codemap::ExpnInfo {
         call_site: DUMMY_SP,
         callee: syntax::codemap::NameAndSpan {
-            format: syntax::codemap::MacroBang(intern("")),
+            format: syntax::codemap::MacroBang(Symbol::intern("")),
             allow_internal_unstable: false,
             span: None,
         }
@@ -97,7 +97,7 @@ macro_rules! check {
     // quote_meta_item!
 
     let meta = quote_meta_item!(cx, cfg(foo = "bar"));
-    check!(meta_item_to_string, meta, *quote_meta_item!(cx, $meta); r#"cfg(foo = "bar")"#);
+    check!(meta_item_to_string, meta, quote_meta_item!(cx, $meta); r#"cfg(foo = "bar")"#);
 
     let attr = quote_attr!(cx, #![$meta]);
     check!(attribute_to_string, attr; r#"#![cfg(foo = "bar")]"#);