]> git.lizzy.rs Git - rust.git/blob - src/librustc_expand/parse/lexer/tests.rs
Merge branch 'master' into feature/incorporate-tracing
[rust.git] / src / librustc_expand / parse / lexer / tests.rs
1 use rustc_ast::ast::AttrStyle;
2 use rustc_ast::token::{self, CommentKind, Token, TokenKind};
3 use rustc_data_structures::sync::Lrc;
4 use rustc_errors::{emitter::EmitterWriter, Handler};
5 use rustc_parse::lexer::StringReader;
6 use rustc_session::parse::ParseSess;
7 use rustc_span::source_map::{FilePathMapping, SourceMap};
8 use rustc_span::symbol::Symbol;
9 use rustc_span::with_default_session_globals;
10 use rustc_span::{BytePos, Span};
11
12 use std::io;
13 use std::path::PathBuf;
14
15 fn mk_sess(sm: Lrc<SourceMap>) -> ParseSess {
16     let emitter = EmitterWriter::new(
17         Box::new(io::sink()),
18         Some(sm.clone()),
19         false,
20         false,
21         false,
22         None,
23         false,
24     );
25     ParseSess::with_span_handler(Handler::with_emitter(true, None, Box::new(emitter)), sm)
26 }
27
28 // Creates a string reader for the given string.
29 fn setup<'a>(sm: &SourceMap, sess: &'a ParseSess, teststr: String) -> StringReader<'a> {
30     let sf = sm.new_source_file(PathBuf::from(teststr.clone()).into(), teststr);
31     StringReader::new(sess, sf, None)
32 }
33
34 #[test]
35 fn t1() {
36     with_default_session_globals(|| {
37         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
38         let sh = mk_sess(sm.clone());
39         let mut string_reader = setup(
40             &sm,
41             &sh,
42             "/* my source file */ fn main() { println!(\"zebra\"); }\n".to_string(),
43         );
44         assert_eq!(string_reader.next_token(), token::Comment);
45         assert_eq!(string_reader.next_token(), token::Whitespace);
46         let tok1 = string_reader.next_token();
47         let tok2 = Token::new(mk_ident("fn"), Span::with_root_ctxt(BytePos(21), BytePos(23)));
48         assert_eq!(tok1.kind, tok2.kind);
49         assert_eq!(tok1.span, tok2.span);
50         assert_eq!(string_reader.next_token(), token::Whitespace);
51         // Read another token.
52         let tok3 = string_reader.next_token();
53         assert_eq!(string_reader.pos(), BytePos(28));
54         let tok4 = Token::new(mk_ident("main"), Span::with_root_ctxt(BytePos(24), BytePos(28)));
55         assert_eq!(tok3.kind, tok4.kind);
56         assert_eq!(tok3.span, tok4.span);
57
58         assert_eq!(string_reader.next_token(), token::OpenDelim(token::Paren));
59         assert_eq!(string_reader.pos(), BytePos(29))
60     })
61 }
62
63 // Checks that the given reader produces the desired stream
64 // of tokens (stop checking after exhausting `expected`).
65 fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<TokenKind>) {
66     for expected_tok in &expected {
67         assert_eq!(&string_reader.next_token(), expected_tok);
68     }
69 }
70
71 // Makes the identifier by looking up the string in the interner.
72 fn mk_ident(id: &str) -> TokenKind {
73     token::Ident(Symbol::intern(id), false)
74 }
75
76 fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> TokenKind {
77     TokenKind::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
78 }
79
80 #[test]
81 fn doublecolon_parsing() {
82     with_default_session_globals(|| {
83         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
84         let sh = mk_sess(sm.clone());
85         check_tokenization(
86             setup(&sm, &sh, "a b".to_string()),
87             vec![mk_ident("a"), token::Whitespace, mk_ident("b")],
88         );
89     })
90 }
91
92 #[test]
93 fn doublecolon_parsing_2() {
94     with_default_session_globals(|| {
95         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
96         let sh = mk_sess(sm.clone());
97         check_tokenization(
98             setup(&sm, &sh, "a::b".to_string()),
99             vec![mk_ident("a"), token::Colon, token::Colon, mk_ident("b")],
100         );
101     })
102 }
103
104 #[test]
105 fn doublecolon_parsing_3() {
106     with_default_session_globals(|| {
107         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
108         let sh = mk_sess(sm.clone());
109         check_tokenization(
110             setup(&sm, &sh, "a ::b".to_string()),
111             vec![mk_ident("a"), token::Whitespace, token::Colon, token::Colon, mk_ident("b")],
112         );
113     })
114 }
115
116 #[test]
117 fn doublecolon_parsing_4() {
118     with_default_session_globals(|| {
119         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
120         let sh = mk_sess(sm.clone());
121         check_tokenization(
122             setup(&sm, &sh, "a:: b".to_string()),
123             vec![mk_ident("a"), token::Colon, token::Colon, token::Whitespace, mk_ident("b")],
124         );
125     })
126 }
127
128 #[test]
129 fn character_a() {
130     with_default_session_globals(|| {
131         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
132         let sh = mk_sess(sm.clone());
133         assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token(), mk_lit(token::Char, "a", None),);
134     })
135 }
136
137 #[test]
138 fn character_space() {
139     with_default_session_globals(|| {
140         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
141         let sh = mk_sess(sm.clone());
142         assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token(), mk_lit(token::Char, " ", None),);
143     })
144 }
145
146 #[test]
147 fn character_escaped() {
148     with_default_session_globals(|| {
149         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
150         let sh = mk_sess(sm.clone());
151         assert_eq!(
152             setup(&sm, &sh, "'\\n'".to_string()).next_token(),
153             mk_lit(token::Char, "\\n", None),
154         );
155     })
156 }
157
158 #[test]
159 fn lifetime_name() {
160     with_default_session_globals(|| {
161         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
162         let sh = mk_sess(sm.clone());
163         assert_eq!(
164             setup(&sm, &sh, "'abc".to_string()).next_token(),
165             token::Lifetime(Symbol::intern("'abc")),
166         );
167     })
168 }
169
170 #[test]
171 fn raw_string() {
172     with_default_session_globals(|| {
173         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
174         let sh = mk_sess(sm.clone());
175         assert_eq!(
176             setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token(),
177             mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None),
178         );
179     })
180 }
181
182 #[test]
183 fn literal_suffixes() {
184     with_default_session_globals(|| {
185         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
186         let sh = mk_sess(sm.clone());
187         macro_rules! test {
188             ($input: expr, $tok_type: ident, $tok_contents: expr) => {{
189                 assert_eq!(
190                     setup(&sm, &sh, format!("{}suffix", $input)).next_token(),
191                     mk_lit(token::$tok_type, $tok_contents, Some("suffix")),
192                 );
193                 // with a whitespace separator
194                 assert_eq!(
195                     setup(&sm, &sh, format!("{} suffix", $input)).next_token(),
196                     mk_lit(token::$tok_type, $tok_contents, None),
197                 );
198             }};
199         }
200
201         test!("'a'", Char, "a");
202         test!("b'a'", Byte, "a");
203         test!("\"a\"", Str, "a");
204         test!("b\"a\"", ByteStr, "a");
205         test!("1234", Integer, "1234");
206         test!("0b101", Integer, "0b101");
207         test!("0xABC", Integer, "0xABC");
208         test!("1.0", Float, "1.0");
209         test!("1.0e10", Float, "1.0e10");
210
211         assert_eq!(
212             setup(&sm, &sh, "2us".to_string()).next_token(),
213             mk_lit(token::Integer, "2", Some("us")),
214         );
215         assert_eq!(
216             setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token(),
217             mk_lit(token::StrRaw(3), "raw", Some("suffix")),
218         );
219         assert_eq!(
220             setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token(),
221             mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")),
222         );
223     })
224 }
225
226 #[test]
227 fn nested_block_comments() {
228     with_default_session_globals(|| {
229         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
230         let sh = mk_sess(sm.clone());
231         let mut lexer = setup(&sm, &sh, "/* /* */ */'a'".to_string());
232         assert_eq!(lexer.next_token(), token::Comment);
233         assert_eq!(lexer.next_token(), mk_lit(token::Char, "a", None));
234     })
235 }
236
237 #[test]
238 fn crlf_comments() {
239     with_default_session_globals(|| {
240         let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
241         let sh = mk_sess(sm.clone());
242         let mut lexer = setup(&sm, &sh, "// test\r\n/// test\r\n".to_string());
243         let comment = lexer.next_token();
244         assert_eq!(comment.kind, token::Comment);
245         assert_eq!((comment.span.lo(), comment.span.hi()), (BytePos(0), BytePos(7)));
246         assert_eq!(lexer.next_token(), token::Whitespace);
247         assert_eq!(
248             lexer.next_token(),
249             token::DocComment(CommentKind::Line, AttrStyle::Outer, Symbol::intern(" test"))
250         );
251     })
252 }