3 use expect_test::{expect, Expect};
5 fn check_raw_str(s: &str, expected: Result<u8, RawStrError>) {
6 let s = &format!("r{}", s);
7 let mut cursor = Cursor::new(s);
9 let res = cursor.raw_double_quoted_string(0);
10 assert_eq!(res, expected);
14 fn test_naked_raw_str() {
15 check_raw_str(r#""abc""#, Ok(0));
19 fn test_raw_no_start() {
20 check_raw_str(r##""abc"#"##, Ok(0));
24 fn test_too_many_terminators() {
25 // this error is handled in the parser later
26 check_raw_str(r###"#"abc"##"###, Ok(1));
30 fn test_unterminated() {
33 Err(RawStrError::NoTerminator { expected: 1, found: 0, possible_terminator_offset: None }),
37 Err(RawStrError::NoTerminator {
40 possible_terminator_offset: Some(7),
43 // We're looking for "# not just any #
46 Err(RawStrError::NoTerminator { expected: 2, found: 0, possible_terminator_offset: None }),
51 fn test_invalid_start() {
52 check_raw_str(r##"#~"abc"#"##, Err(RawStrError::InvalidStarter { bad_char: '~' }));
56 fn test_unterminated_no_pound() {
57 // https://github.com/rust-lang/rust/issues/70677
60 Err(RawStrError::NoTerminator { expected: 0, found: 0, possible_terminator_offset: None }),
65 fn test_too_many_hashes() {
66 let max_count = u8::MAX;
67 let hashes1 = "#".repeat(max_count as usize);
68 let hashes2 = "#".repeat(max_count as usize + 1);
69 let middle = "\"abc\"";
70 let s1 = [&hashes1, middle, &hashes1].join("");
71 let s2 = [&hashes2, middle, &hashes2].join("");
73 // Valid number of hashes (255 = 2^8 - 1 = u8::MAX).
74 check_raw_str(&s1, Ok(255));
76 // One more hash sign (256 = 2^8) becomes too many.
77 check_raw_str(&s2, Err(RawStrError::TooManyDelimiters { found: u32::from(max_count) + 1 }));
81 fn test_valid_shebang() {
82 // https://github.com/rust-lang/rust/issues/70528
83 let input = "#!/usr/bin/rustrun\nlet x = 5;";
84 assert_eq!(strip_shebang(input), Some(18));
88 fn test_invalid_shebang_valid_rust_syntax() {
89 // https://github.com/rust-lang/rust/issues/70528
90 let input = "#! [bad_attribute]";
91 assert_eq!(strip_shebang(input), None);
95 fn test_shebang_second_line() {
96 // Because shebangs are interpreted by the kernel, they must be on the first line
97 let input = "\n#!/bin/bash";
98 assert_eq!(strip_shebang(input), None);
102 fn test_shebang_space() {
103 let input = "#! /bin/bash";
104 assert_eq!(strip_shebang(input), Some(input.len()));
108 fn test_shebang_empty_shebang() {
109 let input = "#! \n[attribute(foo)]";
110 assert_eq!(strip_shebang(input), None);
114 fn test_invalid_shebang_comment() {
115 let input = "#!//bin/ami/a/comment\n[";
116 assert_eq!(strip_shebang(input), None)
120 fn test_invalid_shebang_another_comment() {
121 let input = "#!/*bin/ami/a/comment*/\n[attribute";
122 assert_eq!(strip_shebang(input), None)
126 fn test_shebang_valid_rust_after() {
127 let input = "#!/*bin/ami/a/comment*/\npub fn main() {}";
128 assert_eq!(strip_shebang(input), Some(23))
132 fn test_shebang_followed_by_attrib() {
133 let input = "#!/bin/rust-scripts\n#![allow_unused(true)]";
134 assert_eq!(strip_shebang(input), Some(19));
137 fn check_lexing(src: &str, expect: Expect) {
138 let actual: String = tokenize(src).map(|token| format!("{:?}\n", token)).collect();
139 expect.assert_eq(&actual)
145 "/* my source file */ fn main() { println!(\"zebra\"); }\n",
147 Token { kind: BlockComment { doc_style: None, terminated: true }, len: 20 }
148 Token { kind: Whitespace, len: 1 }
149 Token { kind: Ident, len: 2 }
150 Token { kind: Whitespace, len: 1 }
151 Token { kind: Ident, len: 4 }
152 Token { kind: OpenParen, len: 1 }
153 Token { kind: CloseParen, len: 1 }
154 Token { kind: Whitespace, len: 1 }
155 Token { kind: OpenBrace, len: 1 }
156 Token { kind: Whitespace, len: 1 }
157 Token { kind: Ident, len: 7 }
158 Token { kind: Bang, len: 1 }
159 Token { kind: OpenParen, len: 1 }
160 Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 7 }, len: 7 }
161 Token { kind: CloseParen, len: 1 }
162 Token { kind: Semi, len: 1 }
163 Token { kind: Whitespace, len: 1 }
164 Token { kind: CloseBrace, len: 1 }
165 Token { kind: Whitespace, len: 1 }
171 fn comment_flavors() {
181 /** outer doc block */
182 /*! inner doc block */
185 Token { kind: Whitespace, len: 1 }
186 Token { kind: LineComment { doc_style: None }, len: 7 }
187 Token { kind: Whitespace, len: 1 }
188 Token { kind: LineComment { doc_style: None }, len: 17 }
189 Token { kind: Whitespace, len: 1 }
190 Token { kind: LineComment { doc_style: Some(Outer) }, len: 18 }
191 Token { kind: Whitespace, len: 1 }
192 Token { kind: LineComment { doc_style: Some(Inner) }, len: 18 }
193 Token { kind: Whitespace, len: 1 }
194 Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
195 Token { kind: Whitespace, len: 1 }
196 Token { kind: BlockComment { doc_style: None, terminated: true }, len: 4 }
197 Token { kind: Whitespace, len: 1 }
198 Token { kind: BlockComment { doc_style: None, terminated: true }, len: 18 }
199 Token { kind: Whitespace, len: 1 }
200 Token { kind: BlockComment { doc_style: Some(Outer), terminated: true }, len: 22 }
201 Token { kind: Whitespace, len: 1 }
202 Token { kind: BlockComment { doc_style: Some(Inner), terminated: true }, len: 22 }
203 Token { kind: Whitespace, len: 1 }
209 fn nested_block_comments() {
213 Token { kind: BlockComment { doc_style: None, terminated: true }, len: 11 }
214 Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
224 Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
225 Token { kind: Whitespace, len: 1 }
226 Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
227 Token { kind: Whitespace, len: 1 }
228 Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 4 }, len: 4 }
238 Token { kind: Lifetime { starts_with_number: false }, len: 4 }
246 "r###\"\"#a\\b\x00c\"\"###",
248 Token { kind: Literal { kind: RawStr { n_hashes: Some(3) }, suffix_start: 17 }, len: 17 }
254 fn literal_suffixes() {
271 Token { kind: Whitespace, len: 1 }
272 Token { kind: Literal { kind: Char { terminated: true }, suffix_start: 3 }, len: 3 }
273 Token { kind: Whitespace, len: 1 }
274 Token { kind: Literal { kind: Byte { terminated: true }, suffix_start: 4 }, len: 4 }
275 Token { kind: Whitespace, len: 1 }
276 Token { kind: Literal { kind: Str { terminated: true }, suffix_start: 3 }, len: 3 }
277 Token { kind: Whitespace, len: 1 }
278 Token { kind: Literal { kind: ByteStr { terminated: true }, suffix_start: 4 }, len: 4 }
279 Token { kind: Whitespace, len: 1 }
280 Token { kind: Literal { kind: Int { base: Decimal, empty_int: false }, suffix_start: 4 }, len: 4 }
281 Token { kind: Whitespace, len: 1 }
282 Token { kind: Literal { kind: Int { base: Binary, empty_int: false }, suffix_start: 5 }, len: 5 }
283 Token { kind: Whitespace, len: 1 }
284 Token { kind: Literal { kind: Int { base: Hexadecimal, empty_int: false }, suffix_start: 5 }, len: 5 }
285 Token { kind: Whitespace, len: 1 }
286 Token { kind: Literal { kind: Float { base: Decimal, empty_exponent: false }, suffix_start: 3 }, len: 3 }
287 Token { kind: Whitespace, len: 1 }
288 Token { kind: Literal { kind: Float { base: Decimal, empty_exponent: false }, suffix_start: 6 }, len: 6 }
289 Token { kind: Whitespace, len: 1 }
290 Token { kind: Literal { kind: Int { base: Decimal, empty_int: false }, suffix_start: 1 }, len: 3 }
291 Token { kind: Whitespace, len: 1 }
292 Token { kind: Literal { kind: RawStr { n_hashes: Some(3) }, suffix_start: 12 }, len: 18 }
293 Token { kind: Whitespace, len: 1 }
294 Token { kind: Literal { kind: RawByteStr { n_hashes: Some(3) }, suffix_start: 13 }, len: 19 }
295 Token { kind: Whitespace, len: 1 }