From: Patrick Walton Date: Fri, 31 Jan 2014 02:46:19 +0000 (-0800) Subject: libsyntax: Fix tests. X-Git-Url: https://git.lizzy.rs/?a=commitdiff_plain;h=b890237e798030ce337933b14f777a1c3810d1ea;p=rust.git libsyntax: Fix tests. --- diff --git a/src/librustc/front/test.rs b/src/librustc/front/test.rs index b653af5b624..2704e828ea4 100644 --- a/src/librustc/front/test.rs +++ b/src/librustc/front/test.rs @@ -134,7 +134,7 @@ fn nomain(cx: &TestCtxt, item: @ast::Item) -> @ast::Item { if !cx.sess.building_library.get() { @ast::Item { attrs: item.attrs.iter().filter_map(|attr| { - if attr.name().equiv(&("main")) { + if !attr.name().equiv(&("main")) { Some(*attr) } else { None diff --git a/src/libsyntax/codemap.rs b/src/libsyntax/codemap.rs index edaddc8e8bc..2ada3ac16ea 100644 --- a/src/libsyntax/codemap.rs +++ b/src/libsyntax/codemap.rs @@ -468,7 +468,7 @@ mod test { #[test] fn t1 () { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line"); + let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line"); fm.next_line(BytePos(0)); assert_eq!(&fm.get_line(0),&~"first line."); // TESTING BROKEN BEHAVIOR: @@ -480,7 +480,7 @@ fn t1 () { #[should_fail] fn t2 () { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"blork.rs",@"first line.\nsecond line"); + let fm = cm.new_filemap(~"blork.rs",~"first line.\nsecond line"); // TESTING *REALLY* BROKEN BEHAVIOR: fm.next_line(BytePos(0)); fm.next_line(BytePos(10)); diff --git a/src/libsyntax/ext/expand.rs b/src/libsyntax/ext/expand.rs index 8e5bc1d61d2..0372be689a7 100644 --- a/src/libsyntax/ext/expand.rs +++ b/src/libsyntax/ext/expand.rs @@ -1021,11 +1021,11 @@ fn get_registrar_symbol(&mut self, _: ast::CrateNum) -> Option<~str> { // make sure that macros can leave scope #[should_fail] #[test] fn macros_cant_escape_fns_test () { - let src = @"fn bogus() {macro_rules! z (() => (3+4))}\ + let src = ~"fn bogus() {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[],sess); // should fail: @@ -1036,11 +1036,11 @@ fn get_registrar_symbol(&mut self, _: ast::CrateNum) -> Option<~str> { // make sure that macros can leave scope for modules #[should_fail] #[test] fn macros_cant_escape_mods_test () { - let src = @"mod foo {macro_rules! z (() => (3+4))}\ + let src = ~"mod foo {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[],sess); // should fail: @@ -1050,11 +1050,11 @@ fn get_registrar_symbol(&mut self, _: ast::CrateNum) -> Option<~str> { // macro_escape modules shouldn't cause macros to leave scope #[test] fn macros_can_escape_flattened_mods_test () { - let src = @"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ + let src = ~"#[macro_escape] mod foo {macro_rules! z (() => (3+4))}\ fn inty() -> int { z!() }"; let sess = parse::new_parse_sess(None); let crate_ast = parse::parse_crate_from_source_str( - @"", + ~"", src, ~[], sess); // should fail: @@ -1063,9 +1063,9 @@ fn get_registrar_symbol(&mut self, _: ast::CrateNum) -> Option<~str> { } #[test] fn test_contains_flatten (){ - let attr1 = make_dummy_attr (@"foo"); - let attr2 = make_dummy_attr (@"bar"); - let escape_attr = make_dummy_attr (@"macro_escape"); + let attr1 = make_dummy_attr ("foo"); + let attr2 = make_dummy_attr ("bar"); + let escape_attr = make_dummy_attr ("macro_escape"); let attrs1 = ~[attr1, escape_attr, attr2]; assert_eq!(contains_macro_escape (attrs1),true); let attrs2 = ~[attr1,attr2]; @@ -1073,13 +1073,13 @@ fn get_registrar_symbol(&mut self, _: ast::CrateNum) -> Option<~str> { } // make a MetaWord outer attribute with the given name - fn make_dummy_attr(s: @str) -> ast::Attribute { + fn make_dummy_attr(s: &str) -> ast::Attribute { Spanned { span:codemap::DUMMY_SP, node: Attribute_ { style: AttrOuter, value: @Spanned { - node: MetaWord(s), + node: MetaWord(token::intern_and_get_ident(s)), span: codemap::DUMMY_SP, }, is_sugared_doc: false, @@ -1089,7 +1089,7 @@ fn make_dummy_attr(s: @str) -> ast::Attribute { #[test] fn renaming () { - let item_ast = string_to_crate(@"fn f() -> int { a }"); + let item_ast = string_to_crate(~"fn f() -> int { a }"); let a_name = intern("a"); let a2_name = gensym("a2"); let mut renamer = new_rename_folder(ast::Ident{name:a_name,ctxt:EMPTY_CTXT}, @@ -1128,7 +1128,7 @@ fn renaming () { // pprust::print_crate_(&mut s, crate); //} - fn expand_crate_str(crate_str: @str) -> ast::Crate { + fn expand_crate_str(crate_str: ~str) -> ast::Crate { let (crate_ast,ps) = string_to_crate_and_sess(crate_str); // the cfg argument actually does matter, here... let mut loader = ErrLoader; @@ -1146,7 +1146,7 @@ fn expand_crate_str(crate_str: @str) -> ast::Crate { //} #[test] fn macro_tokens_should_match(){ - expand_crate_str(@"macro_rules! m((a)=>(13)) fn main(){m!(a);}"); + expand_crate_str(~"macro_rules! m((a)=>(13)) fn main(){m!(a);}"); } // renaming tests expand a crate and then check that the bindings match @@ -1222,7 +1222,7 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) { let (teststr, bound_connections, bound_ident_check) = match *t { (ref str,ref conns, bic) => (str.to_managed(), conns.clone(), bic) }; - let cr = expand_crate_str(teststr.to_managed()); + let cr = expand_crate_str(teststr.to_owned()); // find the bindings: let mut name_finder = new_name_finder(~[]); visit::walk_crate(&mut name_finder,&cr,()); @@ -1285,7 +1285,7 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) { } #[test] fn fmt_in_macro_used_inside_module_macro() { - let crate_str = @"macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) + let crate_str = ~"macro_rules! fmt_wrap(($b:expr)=>($b.to_str())) macro_rules! foo_module (() => (mod generated { fn a() { let xx = 147; fmt_wrap!(xx);}})) foo_module!() "; @@ -1335,7 +1335,7 @@ fn run_renaming_test(t: &RenamingTest, test_idx: uint) { #[test] fn pat_idents(){ - let pat = string_to_pat(@"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); + let pat = string_to_pat(~"(a,Foo{x:c @ (b,9),y:Bar(4,d)})"); let mut pat_idents = new_name_finder(~[]); pat_idents.visit_pat(pat, ()); assert_eq!(pat_idents.ident_accumulator, diff --git a/src/libsyntax/fold.rs b/src/libsyntax/fold.rs index 6e1b2044657..8f5bbc2cdad 100644 --- a/src/libsyntax/fold.rs +++ b/src/libsyntax/fold.rs @@ -899,7 +899,8 @@ macro_rules! assert_pred ( // make sure idents get transformed everywhere #[test] fn ident_transformation () { let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate(@"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); + let ast = string_to_crate( + ~"#[a] mod b {fn c (d : e, f : g) {h!(i,j,k);l;m}}"); assert_pred!(matches_codepattern, "matches_codepattern", pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate, @@ -910,8 +911,9 @@ macro_rules! assert_pred ( // even inside macro defs.... #[test] fn ident_transformation_in_defs () { let mut zz_fold = ToZzIdentFolder; - let ast = string_to_crate(@"macro_rules! a {(b $c:expr $(d $e:token)f+ -=> (g $(d $d $e)+))} "); + let ast = string_to_crate( + ~"macro_rules! a {(b $c:expr $(d $e:token)f+ => \ + (g $(d $d $e)+))} "); assert_pred!(matches_codepattern, "matches_codepattern", pprust::to_str(&mut zz_fold.fold_crate(ast),fake_print_crate, diff --git a/src/libsyntax/parse/lexer.rs b/src/libsyntax/parse/lexer.rs index 469d3d64f24..8c55990289a 100644 --- a/src/libsyntax/parse/lexer.rs +++ b/src/libsyntax/parse/lexer.rs @@ -972,9 +972,9 @@ struct Env { } // open a string reader for the given string - fn setup(teststr: @str) -> Env { + fn setup(teststr: ~str) -> Env { let cm = CodeMap::new(); - let fm = cm.new_filemap(@"zebra.rs", teststr); + let fm = cm.new_filemap(~"zebra.rs", teststr); let span_handler = diagnostic::mk_span_handler(diagnostic::mk_handler(None),@cm); Env { @@ -984,7 +984,7 @@ fn setup(teststr: @str) -> Env { #[test] fn t1 () { let Env {string_reader} = - setup(@"/* my source file */ \ + setup(~"/* my source file */ \ fn main() { println!(\"zebra\"); }\n"); let id = str_to_ident("fn"); let tok1 = string_reader.next_token(); @@ -1020,14 +1020,14 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { } #[test] fn doublecolonparsing () { - let env = setup (@"a b"); + let env = setup (~"a b"); check_tokenization (env, ~[mk_ident("a",false), mk_ident("b",false)]); } #[test] fn dcparsing_2 () { - let env = setup (@"a::b"); + let env = setup (~"a::b"); check_tokenization (env, ~[mk_ident("a",true), token::MOD_SEP, @@ -1035,7 +1035,7 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { } #[test] fn dcparsing_3 () { - let env = setup (@"a ::b"); + let env = setup (~"a ::b"); check_tokenization (env, ~[mk_ident("a",false), token::MOD_SEP, @@ -1043,7 +1043,7 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { } #[test] fn dcparsing_4 () { - let env = setup (@"a:: b"); + let env = setup (~"a:: b"); check_tokenization (env, ~[mk_ident("a",true), token::MOD_SEP, @@ -1051,28 +1051,28 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { } #[test] fn character_a() { - let env = setup(@"'a'"); + let env = setup(~"'a'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok,token::LIT_CHAR('a' as u32)); } #[test] fn character_space() { - let env = setup(@"' '"); + let env = setup(~"' '"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok, token::LIT_CHAR(' ' as u32)); } #[test] fn character_escaped() { - let env = setup(@"'\\n'"); + let env = setup(~"'\\n'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok, token::LIT_CHAR('\n' as u32)); } #[test] fn lifetime_name() { - let env = setup(@"'abc"); + let env = setup(~"'abc"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); let id = token::str_to_ident("abc"); @@ -1080,7 +1080,7 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { } #[test] fn raw_string() { - let env = setup(@"r###\"\"#a\\b\x00c\"\"###"); + let env = setup(~"r###\"\"#a\\b\x00c\"\"###"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); let id = token::str_to_ident("\"#a\\b\x00c\""); @@ -1094,7 +1094,7 @@ fn mk_ident (id: &str, is_mod_name: bool) -> token::Token { } #[test] fn nested_block_comments() { - let env = setup(@"/* /* */ */'a'"); + let env = setup(~"/* /* */ */'a'"); let TokenAndSpan {tok, sp: _} = env.string_reader.next_token(); assert_eq!(tok,token::LIT_CHAR('a' as u32)); diff --git a/src/libsyntax/parse/mod.rs b/src/libsyntax/parse/mod.rs index 76ccc53b846..cec9f7c2d9f 100644 --- a/src/libsyntax/parse/mod.rs +++ b/src/libsyntax/parse/mod.rs @@ -314,7 +314,7 @@ fn sp(a: u32, b: u32) -> Span { } #[test] fn path_exprs_1() { - assert_eq!(string_to_expr(@"a"), + assert_eq!(string_to_expr(~"a"), @ast::Expr{ id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { @@ -333,7 +333,7 @@ fn sp(a: u32, b: u32) -> Span { } #[test] fn path_exprs_2 () { - assert_eq!(string_to_expr(@"::a::b"), + assert_eq!(string_to_expr(~"::a::b"), @ast::Expr { id: ast::DUMMY_NODE_ID, node: ast::ExprPath(ast::Path { @@ -358,12 +358,12 @@ fn sp(a: u32, b: u32) -> Span { #[should_fail] #[test] fn bad_path_expr_1() { - string_to_expr(@"::abc::def::return"); + string_to_expr(~"::abc::def::return"); } // check the token-tree-ization of macros #[test] fn string_to_tts_macro () { - let tts = string_to_tts(@"macro_rules! zip (($a)=>($a))"); + let tts = string_to_tts(~"macro_rules! zip (($a)=>($a))"); match tts { [ast::TTTok(_,_), ast::TTTok(_,token::NOT), @@ -407,7 +407,7 @@ fn sp(a: u32, b: u32) -> Span { } #[test] fn string_to_tts_1 () { - let tts = string_to_tts(@"fn a (b : int) { b; }"); + let tts = string_to_tts(~"fn a (b : int) { b; }"); assert_eq!(to_json_str(&tts), ~"[\ {\ @@ -536,7 +536,7 @@ fn sp(a: u32, b: u32) -> Span { } #[test] fn ret_expr() { - assert_eq!(string_to_expr(@"return d"), + assert_eq!(string_to_expr(~"return d"), @ast::Expr{ id: ast::DUMMY_NODE_ID, node:ast::ExprRet(Some(@ast::Expr{ @@ -559,7 +559,7 @@ fn sp(a: u32, b: u32) -> Span { } #[test] fn parse_stmt_1 () { - assert_eq!(string_to_stmt(@"b;"), + assert_eq!(string_to_stmt(~"b;"), @Spanned{ node: ast::StmtExpr(@ast::Expr { id: ast::DUMMY_NODE_ID, @@ -585,7 +585,7 @@ fn parser_done(p: Parser){ } #[test] fn parse_ident_pat () { - let mut parser = string_to_parser(@"b"); + let mut parser = string_to_parser(~"b"); assert_eq!(parser.parse_pat(), @ast::Pat{id: ast::DUMMY_NODE_ID, node: ast::PatIdent( @@ -609,7 +609,7 @@ fn parser_done(p: Parser){ // check the contents of the tt manually: #[test] fn parse_fundecl () { // this test depends on the intern order of "fn" and "int" - assert_eq!(string_to_item(@"fn a (b : int) { b; }"), + assert_eq!(string_to_item(~"fn a (b : int) { b; }"), Some( @ast::Item{ident:str_to_ident("a"), attrs:~[], @@ -701,12 +701,12 @@ fn parser_done(p: Parser){ #[test] fn parse_exprs () { // just make sure that they parse.... - string_to_expr(@"3 + 4"); - string_to_expr(@"a::z.froob(b,@(987+3))"); + string_to_expr(~"3 + 4"); + string_to_expr(~"a::z.froob(b,@(987+3))"); } #[test] fn attrs_fix_bug () { - string_to_item(@"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) + string_to_item(~"pub fn mk_file_writer(path: &Path, flags: &[FileFlag]) -> Result<@Writer, ~str> { #[cfg(windows)] fn wb() -> c_int { diff --git a/src/libsyntax/util/parser_testing.rs b/src/libsyntax/util/parser_testing.rs index 02bfa31d0e1..58c2bed7a45 100644 --- a/src/libsyntax/util/parser_testing.rs +++ b/src/libsyntax/util/parser_testing.rs @@ -19,7 +19,7 @@ // and the ParseSess pub fn string_to_tts_and_sess (source_str : ~str) -> (~[ast::TokenTree], @ParseSess) { let ps = new_parse_sess(None); - (filemap_to_tts(ps,string_to_filemap(ps,source_str,@"bogofile")),ps) + (filemap_to_tts(ps,string_to_filemap(ps,source_str,~"bogofile")),ps) } // map a string to tts, using a made-up filename: @@ -30,7 +30,7 @@ pub fn string_to_tts(source_str : ~str) -> ~[ast::TokenTree] { pub fn string_to_parser_and_sess(source_str: ~str) -> (Parser,@ParseSess) { let ps = new_parse_sess(None); - (new_parser_from_source_str(ps,~[],@"bogofile",source_str),ps) + (new_parser_from_source_str(ps,~[],~"bogofile",source_str),ps) } // map string to parser (via tts)