1 // Copyright 2012-2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
11 use std::cmp::Ordering::{Equal, Greater, Less};
12 use std::str::{Utf8Error, from_utf8};
18 assert!("foo" <= "foo");
19 assert!("foo" != "bar");
25 assert_eq!("".len(), 0);
26 assert_eq!("hello world".len(), 11);
27 assert_eq!("\x63".len(), 1);
28 assert_eq!("\u{a2}".len(), 2);
29 assert_eq!("\u{3c0}".len(), 2);
30 assert_eq!("\u{2620}".len(), 3);
31 assert_eq!("\u{1d11e}".len(), 4);
33 assert_eq!("".chars().count(), 0);
34 assert_eq!("hello world".chars().count(), 11);
35 assert_eq!("\x63".chars().count(), 1);
36 assert_eq!("\u{a2}".chars().count(), 1);
37 assert_eq!("\u{3c0}".chars().count(), 1);
38 assert_eq!("\u{2620}".chars().count(), 1);
39 assert_eq!("\u{1d11e}".chars().count(), 1);
40 assert_eq!("ประเทศไทย中华Việt Nam".chars().count(), 19);
42 assert_eq!("hello".width(false), 10);
43 assert_eq!("hello".width(true), 10);
44 assert_eq!("\0\0\0\0\0".width(false), 0);
45 assert_eq!("\0\0\0\0\0".width(true), 0);
46 assert_eq!("".width(false), 0);
47 assert_eq!("".width(true), 0);
48 assert_eq!("\u{2081}\u{2082}\u{2083}\u{2084}".width(false), 4);
49 assert_eq!("\u{2081}\u{2082}\u{2083}\u{2084}".width(true), 8);
54 assert_eq!("hello".find('l'), Some(2));
55 assert_eq!("hello".find(|c:char| c == 'o'), Some(4));
56 assert!("hello".find('x').is_none());
57 assert!("hello".find(|c:char| c == 'x').is_none());
58 assert_eq!("ประเทศไทย中华Việt Nam".find('华'), Some(30));
59 assert_eq!("ประเทศไทย中华Việt Nam".find(|c: char| c == '华'), Some(30));
64 assert_eq!("hello".rfind('l'), Some(3));
65 assert_eq!("hello".rfind(|c:char| c == 'o'), Some(4));
66 assert!("hello".rfind('x').is_none());
67 assert!("hello".rfind(|c:char| c == 'x').is_none());
68 assert_eq!("ประเทศไทย中华Việt Nam".rfind('华'), Some(30));
69 assert_eq!("ประเทศไทย中华Việt Nam".rfind(|c: char| c == '华'), Some(30));
74 let empty = String::from("");
75 let s: String = empty.chars().collect();
77 let data = String::from("ประเทศไทย中");
78 let s: String = data.chars().collect();
83 fn test_into_bytes() {
84 let data = String::from("asdf");
85 let buf = data.into_bytes();
86 assert_eq!(buf, b"asdf");
92 assert_eq!("".find(""), Some(0));
93 assert!("banana".find("apple pie").is_none());
96 assert_eq!(data[0..6].find("ab"), Some(0));
97 assert_eq!(data[2..6].find("ab"), Some(3 - 2));
98 assert!(data[2..4].find("ab").is_none());
100 let string = "ประเทศไทย中华Việt Nam";
101 let mut data = String::from(string);
102 data.push_str(string);
103 assert!(data.find("ไท华").is_none());
104 assert_eq!(data[0..43].find(""), Some(0));
105 assert_eq!(data[6..43].find(""), Some(6 - 6));
107 assert_eq!(data[0..43].find("ประ"), Some( 0));
108 assert_eq!(data[0..43].find("ทศไ"), Some(12));
109 assert_eq!(data[0..43].find("ย中"), Some(24));
110 assert_eq!(data[0..43].find("iệt"), Some(34));
111 assert_eq!(data[0..43].find("Nam"), Some(40));
113 assert_eq!(data[43..86].find("ประ"), Some(43 - 43));
114 assert_eq!(data[43..86].find("ทศไ"), Some(55 - 43));
115 assert_eq!(data[43..86].find("ย中"), Some(67 - 43));
116 assert_eq!(data[43..86].find("iệt"), Some(77 - 43));
117 assert_eq!(data[43..86].find("Nam"), Some(83 - 43));
121 fn test_slice_chars() {
122 fn t(a: &str, b: &str, start: usize) {
123 assert_eq!(a.slice_chars(start, start + b.chars().count()), b);
126 t("hello", "llo", 2);
130 assert_eq!("ะเทศไท", "ประเทศไทย中华Việt Nam".slice_chars(2, 8));
133 fn s(x: &str) -> String { x.to_string() }
135 macro_rules! test_concat {
136 ($expected: expr, $string: expr) => {
138 let s: String = $string.concat();
139 assert_eq!($expected, s);
145 fn test_concat_for_different_types() {
146 test_concat!("ab", vec![s("a"), s("b")]);
147 test_concat!("ab", vec!["a", "b"]);
148 test_concat!("ab", vec!["a", "b"]);
149 test_concat!("ab", vec![s("a"), s("b")]);
153 fn test_concat_for_different_lengths() {
154 let empty: &[&str] = &[];
155 test_concat!("", empty);
156 test_concat!("a", ["a"]);
157 test_concat!("ab", ["a", "b"]);
158 test_concat!("abc", ["", "a", "bc"]);
161 macro_rules! test_connect {
162 ($expected: expr, $string: expr, $delim: expr) => {
164 let s = $string.connect($delim);
165 assert_eq!($expected, s);
171 fn test_connect_for_different_types() {
172 test_connect!("a-b", ["a", "b"], "-");
173 let hyphen = "-".to_string();
174 test_connect!("a-b", [s("a"), s("b")], &*hyphen);
175 test_connect!("a-b", vec!["a", "b"], &*hyphen);
176 test_connect!("a-b", &*vec!["a", "b"], "-");
177 test_connect!("a-b", vec![s("a"), s("b")], "-");
181 fn test_connect_for_different_lengths() {
182 let empty: &[&str] = &[];
183 test_connect!("", empty, "-");
184 test_connect!("a", ["a"], "-");
185 test_connect!("a-b", ["a", "b"], "-");
186 test_connect!("-a-bc", ["", "a", "bc"], "-");
190 fn test_unsafe_slice() {
191 assert_eq!("ab", unsafe {"abc".slice_unchecked(0, 2)});
192 assert_eq!("bc", unsafe {"abc".slice_unchecked(1, 3)});
193 assert_eq!("", unsafe {"abc".slice_unchecked(1, 1)});
194 fn a_million_letter_a() -> String {
196 let mut rs = String::new();
198 rs.push_str("aaaaaaaaaa");
203 fn half_a_million_letter_a() -> String {
205 let mut rs = String::new();
207 rs.push_str("aaaaa");
212 let letters = a_million_letter_a();
213 assert!(half_a_million_letter_a() ==
214 unsafe {String::from(letters.slice_unchecked(
220 fn test_starts_with() {
221 assert!(("".starts_with("")));
222 assert!(("abc".starts_with("")));
223 assert!(("abc".starts_with("a")));
224 assert!((!"a".starts_with("abc")));
225 assert!((!"".starts_with("abc")));
226 assert!((!"ödd".starts_with("-")));
227 assert!(("ödd".starts_with("öd")));
231 fn test_ends_with() {
232 assert!(("".ends_with("")));
233 assert!(("abc".ends_with("")));
234 assert!(("abc".ends_with("c")));
235 assert!((!"a".ends_with("abc")));
236 assert!((!"".ends_with("abc")));
237 assert!((!"ddö".ends_with("-")));
238 assert!(("ddö".ends_with("dö")));
243 assert!("".is_empty());
244 assert!(!"a".is_empty());
250 assert_eq!("".replace(a, "b"), String::from(""));
251 assert_eq!("a".replace(a, "b"), String::from("b"));
252 assert_eq!("ab".replace(a, "b"), String::from("bb"));
254 assert!(" test test ".replace(test, "toast") ==
255 String::from(" toast toast "));
256 assert_eq!(" test test ".replace(test, ""), String::from(" "));
260 fn test_replace_2a() {
261 let data = "ประเทศไทย中华";
262 let repl = "دولة الكويت";
265 let a2 = "دولة الكويتทศไทย中华";
266 assert_eq!(data.replace(a, repl), a2);
270 fn test_replace_2b() {
271 let data = "ประเทศไทย中华";
272 let repl = "دولة الكويت";
275 let b2 = "ปรدولة الكويتทศไทย中华";
276 assert_eq!(data.replace(b, repl), b2);
280 fn test_replace_2c() {
281 let data = "ประเทศไทย中华";
282 let repl = "دولة الكويت";
285 let c2 = "ประเทศไทยدولة الكويت";
286 assert_eq!(data.replace(c, repl), c2);
290 fn test_replace_2d() {
291 let data = "ประเทศไทย中华";
292 let repl = "دولة الكويت";
295 assert_eq!(data.replace(d, repl), data);
300 assert_eq!("ab", &"abc"[0..2]);
301 assert_eq!("bc", &"abc"[1..3]);
302 assert_eq!("", &"abc"[1..1]);
303 assert_eq!("\u{65e5}", &"\u{65e5}\u{672c}"[0..3]);
305 let data = "ประเทศไทย中华";
306 assert_eq!("ป", &data[0..3]);
307 assert_eq!("ร", &data[3..6]);
308 assert_eq!("", &data[3..3]);
309 assert_eq!("华", &data[30..33]);
311 fn a_million_letter_x() -> String {
313 let mut rs = String::new();
315 rs.push_str("华华华华华华华华华华");
320 fn half_a_million_letter_x() -> String {
322 let mut rs = String::new();
324 rs.push_str("华华华华华");
329 let letters = a_million_letter_x();
330 assert!(half_a_million_letter_x() ==
331 String::from(&letters[0..3 * 500000]));
336 let ss = "中华Việt Nam";
338 assert_eq!("华", &ss[3..6]);
339 assert_eq!("Việt Nam", &ss[6..16]);
341 assert_eq!("ab", &"abc"[0..2]);
342 assert_eq!("bc", &"abc"[1..3]);
343 assert_eq!("", &"abc"[1..1]);
345 assert_eq!("中", &ss[0..3]);
346 assert_eq!("华V", &ss[3..7]);
347 assert_eq!("", &ss[3..3]);
362 fn test_slice_fail() {
367 fn test_slice_from() {
368 assert_eq!(&"abcd"[0..], "abcd");
369 assert_eq!(&"abcd"[2..], "cd");
370 assert_eq!(&"abcd"[4..], "");
374 assert_eq!(&"abcd"[..0], "");
375 assert_eq!(&"abcd"[..2], "ab");
376 assert_eq!(&"abcd"[..4], "abcd");
380 fn test_trim_left_matches() {
381 let v: &[char] = &[];
382 assert_eq!(" *** foo *** ".trim_left_matches(v), " *** foo *** ");
383 let chars: &[char] = &['*', ' '];
384 assert_eq!(" *** foo *** ".trim_left_matches(chars), "foo *** ");
385 assert_eq!(" *** *** ".trim_left_matches(chars), "");
386 assert_eq!("foo *** ".trim_left_matches(chars), "foo *** ");
388 assert_eq!("11foo1bar11".trim_left_matches('1'), "foo1bar11");
389 let chars: &[char] = &['1', '2'];
390 assert_eq!("12foo1bar12".trim_left_matches(chars), "foo1bar12");
391 assert_eq!("123foo1bar123".trim_left_matches(|c: char| c.is_numeric()), "foo1bar123");
395 fn test_trim_right_matches() {
396 let v: &[char] = &[];
397 assert_eq!(" *** foo *** ".trim_right_matches(v), " *** foo *** ");
398 let chars: &[char] = &['*', ' '];
399 assert_eq!(" *** foo *** ".trim_right_matches(chars), " *** foo");
400 assert_eq!(" *** *** ".trim_right_matches(chars), "");
401 assert_eq!(" *** foo".trim_right_matches(chars), " *** foo");
403 assert_eq!("11foo1bar11".trim_right_matches('1'), "11foo1bar");
404 let chars: &[char] = &['1', '2'];
405 assert_eq!("12foo1bar12".trim_right_matches(chars), "12foo1bar");
406 assert_eq!("123foo1bar123".trim_right_matches(|c: char| c.is_numeric()), "123foo1bar");
410 fn test_trim_matches() {
411 let v: &[char] = &[];
412 assert_eq!(" *** foo *** ".trim_matches(v), " *** foo *** ");
413 let chars: &[char] = &['*', ' '];
414 assert_eq!(" *** foo *** ".trim_matches(chars), "foo");
415 assert_eq!(" *** *** ".trim_matches(chars), "");
416 assert_eq!("foo".trim_matches(chars), "foo");
418 assert_eq!("11foo1bar11".trim_matches('1'), "foo1bar");
419 let chars: &[char] = &['1', '2'];
420 assert_eq!("12foo1bar12".trim_matches(chars), "foo1bar");
421 assert_eq!("123foo1bar123".trim_matches(|c: char| c.is_numeric()), "foo1bar");
425 fn test_trim_left() {
426 assert_eq!("".trim_left(), "");
427 assert_eq!("a".trim_left(), "a");
428 assert_eq!(" ".trim_left(), "");
429 assert_eq!(" blah".trim_left(), "blah");
430 assert_eq!(" \u{3000} wut".trim_left(), "wut");
431 assert_eq!("hey ".trim_left(), "hey ");
435 fn test_trim_right() {
436 assert_eq!("".trim_right(), "");
437 assert_eq!("a".trim_right(), "a");
438 assert_eq!(" ".trim_right(), "");
439 assert_eq!("blah ".trim_right(), "blah");
440 assert_eq!("wut \u{3000} ".trim_right(), "wut");
441 assert_eq!(" hey".trim_right(), " hey");
446 assert_eq!("".trim(), "");
447 assert_eq!("a".trim(), "a");
448 assert_eq!(" ".trim(), "");
449 assert_eq!(" blah ".trim(), "blah");
450 assert_eq!("\nwut \u{3000} ".trim(), "wut");
451 assert_eq!(" hey dude ".trim(), "hey dude");
455 fn test_is_whitespace() {
456 assert!("".chars().all(|c| c.is_whitespace()));
457 assert!(" ".chars().all(|c| c.is_whitespace()));
458 assert!("\u{2009}".chars().all(|c| c.is_whitespace())); // Thin space
459 assert!(" \n\t ".chars().all(|c| c.is_whitespace()));
460 assert!(!" _ ".chars().all(|c| c.is_whitespace()));
464 fn test_slice_shift_char() {
465 let data = "ประเทศไทย中";
466 assert_eq!(data.slice_shift_char(), Some(('ป', "ระเทศไทย中")));
470 fn test_slice_shift_char_2() {
472 assert_eq!(empty.slice_shift_char(), None);
477 // deny overlong encodings
478 assert!(from_utf8(&[0xc0, 0x80]).is_err());
479 assert!(from_utf8(&[0xc0, 0xae]).is_err());
480 assert!(from_utf8(&[0xe0, 0x80, 0x80]).is_err());
481 assert!(from_utf8(&[0xe0, 0x80, 0xaf]).is_err());
482 assert!(from_utf8(&[0xe0, 0x81, 0x81]).is_err());
483 assert!(from_utf8(&[0xf0, 0x82, 0x82, 0xac]).is_err());
484 assert!(from_utf8(&[0xf4, 0x90, 0x80, 0x80]).is_err());
487 assert!(from_utf8(&[0xED, 0xA0, 0x80]).is_err());
488 assert!(from_utf8(&[0xED, 0xBF, 0xBF]).is_err());
490 assert!(from_utf8(&[0xC2, 0x80]).is_ok());
491 assert!(from_utf8(&[0xDF, 0xBF]).is_ok());
492 assert!(from_utf8(&[0xE0, 0xA0, 0x80]).is_ok());
493 assert!(from_utf8(&[0xED, 0x9F, 0xBF]).is_ok());
494 assert!(from_utf8(&[0xEE, 0x80, 0x80]).is_ok());
495 assert!(from_utf8(&[0xEF, 0xBF, 0xBF]).is_ok());
496 assert!(from_utf8(&[0xF0, 0x90, 0x80, 0x80]).is_ok());
497 assert!(from_utf8(&[0xF4, 0x8F, 0xBF, 0xBF]).is_ok());
502 use rustc_unicode::str::is_utf16;
505 ($($e:expr),*) => { { $(assert!(is_utf16($e));)* } }
514 // surrogate pairs (randomly generated with Python 3's
515 // .encode('utf-16be'))
516 pos!(&[0xdb54, 0xdf16, 0xd880, 0xdee0, 0xdb6a, 0xdd45],
517 &[0xd91f, 0xdeb1, 0xdb31, 0xdd84, 0xd8e2, 0xde14],
518 &[0xdb9f, 0xdc26, 0xdb6f, 0xde58, 0xd850, 0xdfae]);
520 // mixtures (also random)
521 pos!(&[0xd921, 0xdcc2, 0x002d, 0x004d, 0xdb32, 0xdf65],
522 &[0xdb45, 0xdd2d, 0x006a, 0xdacd, 0xddfe, 0x0006],
523 &[0x0067, 0xd8ff, 0xddb7, 0x000f, 0xd900, 0xdc80]);
527 ($($e:expr),*) => { { $(assert!(!is_utf16($e));)* } }
531 // surrogate + regular unit
533 // surrogate + lead surrogate
535 // unterminated surrogate
537 // trail surrogate without a lead
540 // random byte sequences that Python 3's .decode('utf-16be')
542 neg!(&[0x5b3d, 0x0141, 0xde9e, 0x8fdc, 0xc6e7],
543 &[0xdf5a, 0x82a5, 0x62b9, 0xb447, 0x92f3],
544 &[0xda4e, 0x42bc, 0x4462, 0xee98, 0xc2ca],
545 &[0xbe00, 0xb04a, 0x6ecb, 0xdd89, 0xe278],
546 &[0x0465, 0xab56, 0xdbb6, 0xa893, 0x665e],
547 &[0x6b7f, 0x0a19, 0x40f4, 0xa657, 0xdcc5],
548 &[0x9b50, 0xda5e, 0x24ec, 0x03ad, 0x6dee],
549 &[0x8d17, 0xcaa7, 0xf4ae, 0xdf6e, 0xbed7],
550 &[0xdaee, 0x2584, 0x7d30, 0xa626, 0x121a],
551 &[0xd956, 0x4b43, 0x7570, 0xccd6, 0x4f4a],
552 &[0x9dcf, 0x1b49, 0x4ba5, 0xfce9, 0xdffe],
553 &[0x6572, 0xce53, 0xb05a, 0xf6af, 0xdacf],
554 &[0x1b90, 0x728c, 0x9906, 0xdb68, 0xf46e],
555 &[0x1606, 0xbeca, 0xbe76, 0x860f, 0xdfa5],
556 &[0x8b4f, 0xde7a, 0xd220, 0x9fac, 0x2b6f],
557 &[0xb8fe, 0xebbe, 0xda32, 0x1a5f, 0x8b8b],
558 &[0x934b, 0x8956, 0xc434, 0x1881, 0xddf7],
559 &[0x5a95, 0x13fc, 0xf116, 0xd89b, 0x93f9],
560 &[0xd640, 0x71f1, 0xdd7d, 0x77eb, 0x1cd8],
561 &[0x348b, 0xaef0, 0xdb2c, 0xebf1, 0x1282],
562 &[0x50d7, 0xd824, 0x5010, 0xb369, 0x22ea]);
569 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
570 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
574 assert_eq!("".as_bytes(), b);
575 assert_eq!("abc".as_bytes(), b"abc");
576 assert_eq!("ศไทย中华Việt Nam".as_bytes(), v);
581 fn test_as_bytes_fail() {
582 // Don't double free. (I'm not sure if this exercises the
583 // original problem code path anymore.)
584 let s = String::from("");
585 let _bytes = s.as_bytes();
591 let buf = "hello".as_ptr();
593 assert_eq!(*buf.offset(0), b'h');
594 assert_eq!(*buf.offset(1), b'e');
595 assert_eq!(*buf.offset(2), b'l');
596 assert_eq!(*buf.offset(3), b'l');
597 assert_eq!(*buf.offset(4), b'o');
602 fn test_subslice_offset() {
603 let a = "kernelsprite";
604 let b = &a[7..a.len()];
605 let c = &a[0..a.len() - 6];
606 assert_eq!(a.subslice_offset(b), 7);
607 assert_eq!(a.subslice_offset(c), 0);
609 let string = "a\nb\nc";
610 let lines: Vec<&str> = string.lines().collect();
611 assert_eq!(string.subslice_offset(lines[0]), 0);
612 assert_eq!(string.subslice_offset(lines[1]), 2);
613 assert_eq!(string.subslice_offset(lines[2]), 4);
618 fn test_subslice_offset_2() {
619 let a = "alchemiter";
620 let b = "cruxtruder";
621 a.subslice_offset(b);
625 fn vec_str_conversions() {
626 let s1: String = String::from("All mimsy were the borogoves");
628 let v: Vec<u8> = s1.as_bytes().to_vec();
629 let s2: String = String::from(from_utf8(&v).unwrap());
635 let a: u8 = s1.as_bytes()[i];
636 let b: u8 = s2.as_bytes()[i];
646 assert!("abcde".contains("bcd"));
647 assert!("abcde".contains("abcd"));
648 assert!("abcde".contains("bcde"));
649 assert!("abcde".contains(""));
650 assert!("".contains(""));
651 assert!(!"abcde".contains("def"));
652 assert!(!"".contains("a"));
654 let data = "ประเทศไทย中华Việt Nam";
655 assert!(data.contains("ประเ"));
656 assert!(data.contains("ะเ"));
657 assert!(data.contains("中华"));
658 assert!(!data.contains("ไท华"));
662 fn test_contains_char() {
663 assert!("abc".contains('b'));
664 assert!("a".contains('a'));
665 assert!(!"abc".contains('d'));
666 assert!(!"".contains('a'));
671 let s = "ศไทย中华Việt Nam";
672 let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
675 assert!(s.char_at(pos) == *ch);
676 pos += ch.to_string().len();
681 fn test_char_at_reverse() {
682 let s = "ศไทย中华Việt Nam";
683 let v = vec!['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
684 let mut pos = s.len();
685 for ch in v.iter().rev() {
686 assert!(s.char_at_reverse(pos) == *ch);
687 pos -= ch.to_string().len();
692 fn test_escape_unicode() {
693 assert_eq!("abc".escape_unicode(),
694 String::from("\\u{61}\\u{62}\\u{63}"));
695 assert_eq!("a c".escape_unicode(),
696 String::from("\\u{61}\\u{20}\\u{63}"));
697 assert_eq!("\r\n\t".escape_unicode(),
698 String::from("\\u{d}\\u{a}\\u{9}"));
699 assert_eq!("'\"\\".escape_unicode(),
700 String::from("\\u{27}\\u{22}\\u{5c}"));
701 assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode(),
702 String::from("\\u{0}\\u{1}\\u{fe}\\u{ff}"));
703 assert_eq!("\u{100}\u{ffff}".escape_unicode(),
704 String::from("\\u{100}\\u{ffff}"));
705 assert_eq!("\u{10000}\u{10ffff}".escape_unicode(),
706 String::from("\\u{10000}\\u{10ffff}"));
707 assert_eq!("ab\u{fb00}".escape_unicode(),
708 String::from("\\u{61}\\u{62}\\u{fb00}"));
709 assert_eq!("\u{1d4ea}\r".escape_unicode(),
710 String::from("\\u{1d4ea}\\u{d}"));
714 fn test_escape_default() {
715 assert_eq!("abc".escape_default(), String::from("abc"));
716 assert_eq!("a c".escape_default(), String::from("a c"));
717 assert_eq!("\r\n\t".escape_default(), String::from("\\r\\n\\t"));
718 assert_eq!("'\"\\".escape_default(), String::from("\\'\\\"\\\\"));
719 assert_eq!("\u{100}\u{ffff}".escape_default(),
720 String::from("\\u{100}\\u{ffff}"));
721 assert_eq!("\u{10000}\u{10ffff}".escape_default(),
722 String::from("\\u{10000}\\u{10ffff}"));
723 assert_eq!("ab\u{fb00}".escape_default(),
724 String::from("ab\\u{fb00}"));
725 assert_eq!("\u{1d4ea}\r".escape_default(),
726 String::from("\\u{1d4ea}\\r"));
730 fn test_total_ord() {
731 "1234".cmp("123") == Greater;
732 "123".cmp("1234") == Less;
733 "1234".cmp("1234") == Equal;
734 "12345555".cmp("123456") == Less;
735 "22".cmp("1234") == Greater;
739 fn test_char_range_at() {
740 let data = "b¢€𤭢𤭢€¢b";
741 assert_eq!('b', data.char_range_at(0).ch);
742 assert_eq!('¢', data.char_range_at(1).ch);
743 assert_eq!('€', data.char_range_at(3).ch);
744 assert_eq!('𤭢', data.char_range_at(6).ch);
745 assert_eq!('𤭢', data.char_range_at(10).ch);
746 assert_eq!('€', data.char_range_at(14).ch);
747 assert_eq!('¢', data.char_range_at(17).ch);
748 assert_eq!('b', data.char_range_at(19).ch);
752 fn test_char_range_at_reverse_underflow() {
753 assert_eq!("abc".char_range_at_reverse(0).next, 0);
758 let s = "ศไทย中华Việt Nam";
759 let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
765 assert_eq!(c, v[pos]);
768 assert_eq!(pos, v.len());
772 fn test_rev_iterator() {
773 let s = "ศไทย中华Việt Nam";
774 let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ'];
777 let it = s.chars().rev();
780 assert_eq!(c, v[pos]);
783 assert_eq!(pos, v.len());
787 fn test_chars_decoding() {
788 let mut bytes = [0; 4];
789 for c in (0..0x110000).filter_map(::std::char::from_u32) {
790 let len = c.encode_utf8(&mut bytes).unwrap_or(0);
791 let s = ::std::str::from_utf8(&bytes[..len]).unwrap();
792 if Some(c) != s.chars().next() {
793 panic!("character {:x}={} does not decode correctly", c as u32, c);
799 fn test_chars_rev_decoding() {
800 let mut bytes = [0; 4];
801 for c in (0..0x110000).filter_map(::std::char::from_u32) {
802 let len = c.encode_utf8(&mut bytes).unwrap_or(0);
803 let s = ::std::str::from_utf8(&bytes[..len]).unwrap();
804 if Some(c) != s.chars().rev().next() {
805 panic!("character {:x}={} does not decode correctly", c as u32, c);
811 fn test_iterator_clone() {
812 let s = "ศไทย中华Việt Nam";
813 let mut it = s.chars();
815 assert!(it.clone().zip(it).all(|(x,y)| x == y));
819 fn test_bytesator() {
820 let s = "ศไทย中华Việt Nam";
822 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
823 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
829 assert_eq!(b, v[pos]);
835 fn test_bytes_revator() {
836 let s = "ศไทย中华Việt Nam";
838 224, 184, 168, 224, 185, 132, 224, 184, 151, 224, 184, 162, 228,
839 184, 173, 229, 141, 142, 86, 105, 225, 187, 135, 116, 32, 78, 97,
842 let mut pos = v.len();
844 for b in s.bytes().rev() {
846 assert_eq!(b, v[pos]);
851 fn test_char_indicesator() {
852 let s = "ศไทย中华Việt Nam";
853 let p = [0, 3, 6, 9, 12, 15, 18, 19, 20, 23, 24, 25, 26, 27];
854 let v = ['ศ','ไ','ท','ย','中','华','V','i','ệ','t',' ','N','a','m'];
857 let it = s.char_indices();
860 assert_eq!(c, (p[pos], v[pos]));
863 assert_eq!(pos, v.len());
864 assert_eq!(pos, p.len());
868 fn test_char_indices_revator() {
869 let s = "ศไทย中华Việt Nam";
870 let p = [27, 26, 25, 24, 23, 20, 19, 18, 15, 12, 9, 6, 3, 0];
871 let v = ['m', 'a', 'N', ' ', 't', 'ệ','i','V','华','中','ย','ท','ไ','ศ'];
874 let it = s.char_indices().rev();
877 assert_eq!(c, (p[pos], v[pos]));
880 assert_eq!(pos, v.len());
881 assert_eq!(pos, p.len());
885 fn test_splitn_char_iterator() {
886 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
888 let split: Vec<&str> = data.splitn(4, ' ').collect();
889 assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
891 let split: Vec<&str> = data.splitn(4, |c: char| c == ' ').collect();
892 assert_eq!(split, ["\nMäry", "häd", "ä", "little lämb\nLittle lämb\n"]);
895 let split: Vec<&str> = data.splitn(4, 'ä').collect();
896 assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
898 let split: Vec<&str> = data.splitn(4, |c: char| c == 'ä').collect();
899 assert_eq!(split, ["\nM", "ry h", "d ", " little lämb\nLittle lämb\n"]);
903 fn test_split_char_iterator_no_trailing() {
904 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
906 let split: Vec<&str> = data.split('\n').collect();
907 assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
909 let split: Vec<&str> = data.split_terminator('\n').collect();
910 assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
915 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
917 let split: Vec<&str> = data.rsplit(' ').collect();
918 assert_eq!(split, ["lämb\n", "lämb\nLittle", "little", "ä", "häd", "\nMäry"]);
920 let split: Vec<&str> = data.rsplit("lämb").collect();
921 assert_eq!(split, ["\n", "\nLittle ", "\nMäry häd ä little "]);
923 let split: Vec<&str> = data.rsplit(|c: char| c == 'ä').collect();
924 assert_eq!(split, ["mb\n", "mb\nLittle l", " little l", "d ", "ry h", "\nM"]);
929 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
931 let split: Vec<&str> = data.rsplitn(2, ' ').collect();
932 assert_eq!(split, ["lämb\n", "\nMäry häd ä little lämb\nLittle"]);
934 let split: Vec<&str> = data.rsplitn(2, "lämb").collect();
935 assert_eq!(split, ["\n", "\nMäry häd ä little lämb\nLittle "]);
937 let split: Vec<&str> = data.rsplitn(2, |c: char| c == 'ä').collect();
938 assert_eq!(split, ["mb\n", "\nMäry häd ä little lämb\nLittle l"]);
942 fn test_split_whitespace() {
943 let data = "\n \tMäry häd\tä little lämb\nLittle lämb\n";
944 let words: Vec<&str> = data.split_whitespace().collect();
945 assert_eq!(words, ["Märy", "häd", "ä", "little", "lämb", "Little", "lämb"])
950 fn test_nfd_chars() {
952 ($input: expr, $expected: expr) => {
953 assert_eq!($input.nfd_chars().collect::<String>(), $expected);
957 t!("\u{1e0b}\u{1c4}", "d\u{307}\u{1c4}");
958 t!("\u{2026}", "\u{2026}");
959 t!("\u{2126}", "\u{3a9}");
960 t!("\u{1e0b}\u{323}", "d\u{323}\u{307}");
961 t!("\u{1e0d}\u{307}", "d\u{323}\u{307}");
962 t!("a\u{301}", "a\u{301}");
963 t!("\u{301}a", "\u{301}a");
964 t!("\u{d4db}", "\u{1111}\u{1171}\u{11b6}");
965 t!("\u{ac1c}", "\u{1100}\u{1162}");
970 fn test_nfkd_chars() {
972 ($input: expr, $expected: expr) => {
973 assert_eq!($input.nfkd_chars().collect::<String>(), $expected);
977 t!("\u{1e0b}\u{1c4}", "d\u{307}DZ\u{30c}");
978 t!("\u{2026}", "...");
979 t!("\u{2126}", "\u{3a9}");
980 t!("\u{1e0b}\u{323}", "d\u{323}\u{307}");
981 t!("\u{1e0d}\u{307}", "d\u{323}\u{307}");
982 t!("a\u{301}", "a\u{301}");
983 t!("\u{301}a", "\u{301}a");
984 t!("\u{d4db}", "\u{1111}\u{1171}\u{11b6}");
985 t!("\u{ac1c}", "\u{1100}\u{1162}");
990 fn test_nfc_chars() {
992 ($input: expr, $expected: expr) => {
993 assert_eq!($input.nfc_chars().collect::<String>(), $expected);
997 t!("\u{1e0b}\u{1c4}", "\u{1e0b}\u{1c4}");
998 t!("\u{2026}", "\u{2026}");
999 t!("\u{2126}", "\u{3a9}");
1000 t!("\u{1e0b}\u{323}", "\u{1e0d}\u{307}");
1001 t!("\u{1e0d}\u{307}", "\u{1e0d}\u{307}");
1002 t!("a\u{301}", "\u{e1}");
1003 t!("\u{301}a", "\u{301}a");
1004 t!("\u{d4db}", "\u{d4db}");
1005 t!("\u{ac1c}", "\u{ac1c}");
1006 t!("a\u{300}\u{305}\u{315}\u{5ae}b", "\u{e0}\u{5ae}\u{305}\u{315}b");
1009 #[allow(deprecated)]
1011 fn test_nfkc_chars() {
1013 ($input: expr, $expected: expr) => {
1014 assert_eq!($input.nfkc_chars().collect::<String>(), $expected);
1018 t!("\u{1e0b}\u{1c4}", "\u{1e0b}D\u{17d}");
1019 t!("\u{2026}", "...");
1020 t!("\u{2126}", "\u{3a9}");
1021 t!("\u{1e0b}\u{323}", "\u{1e0d}\u{307}");
1022 t!("\u{1e0d}\u{307}", "\u{1e0d}\u{307}");
1023 t!("a\u{301}", "\u{e1}");
1024 t!("\u{301}a", "\u{301}a");
1025 t!("\u{d4db}", "\u{d4db}");
1026 t!("\u{ac1c}", "\u{ac1c}");
1027 t!("a\u{300}\u{305}\u{315}\u{5ae}b", "\u{e0}\u{5ae}\u{305}\u{315}b");
1032 let data = "\nMäry häd ä little lämb\n\nLittle lämb\n";
1033 let lines: Vec<&str> = data.lines().collect();
1034 assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]);
1036 let data = "\nMäry häd ä little lämb\n\nLittle lämb"; // no trailing \n
1037 let lines: Vec<&str> = data.lines().collect();
1038 assert_eq!(lines, ["", "Märy häd ä little lämb", "", "Little lämb"]);
1041 #[allow(deprecated)]
1043 fn test_graphemes() {
1044 use std::iter::order;
1046 // official Unicode test data
1047 // from http://www.unicode.org/Public/UCD/latest/ucd/auxiliary/GraphemeBreakTest.txt
1048 let test_same: [(_, &[_]); 325] = [
1049 ("\u{20}\u{20}", &["\u{20}", "\u{20}"]),
1050 ("\u{20}\u{308}\u{20}", &["\u{20}\u{308}", "\u{20}"]),
1051 ("\u{20}\u{D}", &["\u{20}", "\u{D}"]),
1052 ("\u{20}\u{308}\u{D}", &["\u{20}\u{308}", "\u{D}"]),
1053 ("\u{20}\u{A}", &["\u{20}", "\u{A}"]),
1054 ("\u{20}\u{308}\u{A}", &["\u{20}\u{308}", "\u{A}"]),
1055 ("\u{20}\u{1}", &["\u{20}", "\u{1}"]),
1056 ("\u{20}\u{308}\u{1}", &["\u{20}\u{308}", "\u{1}"]),
1057 ("\u{20}\u{300}", &["\u{20}\u{300}"]),
1058 ("\u{20}\u{308}\u{300}", &["\u{20}\u{308}\u{300}"]),
1059 ("\u{20}\u{1100}", &["\u{20}", "\u{1100}"]),
1060 ("\u{20}\u{308}\u{1100}", &["\u{20}\u{308}", "\u{1100}"]),
1061 ("\u{20}\u{1160}", &["\u{20}", "\u{1160}"]),
1062 ("\u{20}\u{308}\u{1160}", &["\u{20}\u{308}", "\u{1160}"]),
1063 ("\u{20}\u{11A8}", &["\u{20}", "\u{11A8}"]),
1064 ("\u{20}\u{308}\u{11A8}", &["\u{20}\u{308}", "\u{11A8}"]),
1065 ("\u{20}\u{AC00}", &["\u{20}", "\u{AC00}"]),
1066 ("\u{20}\u{308}\u{AC00}", &["\u{20}\u{308}", "\u{AC00}"]),
1067 ("\u{20}\u{AC01}", &["\u{20}", "\u{AC01}"]),
1068 ("\u{20}\u{308}\u{AC01}", &["\u{20}\u{308}", "\u{AC01}"]),
1069 ("\u{20}\u{1F1E6}", &["\u{20}", "\u{1F1E6}"]),
1070 ("\u{20}\u{308}\u{1F1E6}", &["\u{20}\u{308}", "\u{1F1E6}"]),
1071 ("\u{20}\u{378}", &["\u{20}", "\u{378}"]),
1072 ("\u{20}\u{308}\u{378}", &["\u{20}\u{308}", "\u{378}"]),
1073 ("\u{D}\u{20}", &["\u{D}", "\u{20}"]),
1074 ("\u{D}\u{308}\u{20}", &["\u{D}", "\u{308}", "\u{20}"]),
1075 ("\u{D}\u{D}", &["\u{D}", "\u{D}"]),
1076 ("\u{D}\u{308}\u{D}", &["\u{D}", "\u{308}", "\u{D}"]),
1077 ("\u{D}\u{A}", &["\u{D}\u{A}"]),
1078 ("\u{D}\u{308}\u{A}", &["\u{D}", "\u{308}", "\u{A}"]),
1079 ("\u{D}\u{1}", &["\u{D}", "\u{1}"]),
1080 ("\u{D}\u{308}\u{1}", &["\u{D}", "\u{308}", "\u{1}"]),
1081 ("\u{D}\u{300}", &["\u{D}", "\u{300}"]),
1082 ("\u{D}\u{308}\u{300}", &["\u{D}", "\u{308}\u{300}"]),
1083 ("\u{D}\u{903}", &["\u{D}", "\u{903}"]),
1084 ("\u{D}\u{1100}", &["\u{D}", "\u{1100}"]),
1085 ("\u{D}\u{308}\u{1100}", &["\u{D}", "\u{308}", "\u{1100}"]),
1086 ("\u{D}\u{1160}", &["\u{D}", "\u{1160}"]),
1087 ("\u{D}\u{308}\u{1160}", &["\u{D}", "\u{308}", "\u{1160}"]),
1088 ("\u{D}\u{11A8}", &["\u{D}", "\u{11A8}"]),
1089 ("\u{D}\u{308}\u{11A8}", &["\u{D}", "\u{308}", "\u{11A8}"]),
1090 ("\u{D}\u{AC00}", &["\u{D}", "\u{AC00}"]),
1091 ("\u{D}\u{308}\u{AC00}", &["\u{D}", "\u{308}", "\u{AC00}"]),
1092 ("\u{D}\u{AC01}", &["\u{D}", "\u{AC01}"]),
1093 ("\u{D}\u{308}\u{AC01}", &["\u{D}", "\u{308}", "\u{AC01}"]),
1094 ("\u{D}\u{1F1E6}", &["\u{D}", "\u{1F1E6}"]),
1095 ("\u{D}\u{308}\u{1F1E6}", &["\u{D}", "\u{308}", "\u{1F1E6}"]),
1096 ("\u{D}\u{378}", &["\u{D}", "\u{378}"]),
1097 ("\u{D}\u{308}\u{378}", &["\u{D}", "\u{308}", "\u{378}"]),
1098 ("\u{A}\u{20}", &["\u{A}", "\u{20}"]),
1099 ("\u{A}\u{308}\u{20}", &["\u{A}", "\u{308}", "\u{20}"]),
1100 ("\u{A}\u{D}", &["\u{A}", "\u{D}"]),
1101 ("\u{A}\u{308}\u{D}", &["\u{A}", "\u{308}", "\u{D}"]),
1102 ("\u{A}\u{A}", &["\u{A}", "\u{A}"]),
1103 ("\u{A}\u{308}\u{A}", &["\u{A}", "\u{308}", "\u{A}"]),
1104 ("\u{A}\u{1}", &["\u{A}", "\u{1}"]),
1105 ("\u{A}\u{308}\u{1}", &["\u{A}", "\u{308}", "\u{1}"]),
1106 ("\u{A}\u{300}", &["\u{A}", "\u{300}"]),
1107 ("\u{A}\u{308}\u{300}", &["\u{A}", "\u{308}\u{300}"]),
1108 ("\u{A}\u{903}", &["\u{A}", "\u{903}"]),
1109 ("\u{A}\u{1100}", &["\u{A}", "\u{1100}"]),
1110 ("\u{A}\u{308}\u{1100}", &["\u{A}", "\u{308}", "\u{1100}"]),
1111 ("\u{A}\u{1160}", &["\u{A}", "\u{1160}"]),
1112 ("\u{A}\u{308}\u{1160}", &["\u{A}", "\u{308}", "\u{1160}"]),
1113 ("\u{A}\u{11A8}", &["\u{A}", "\u{11A8}"]),
1114 ("\u{A}\u{308}\u{11A8}", &["\u{A}", "\u{308}", "\u{11A8}"]),
1115 ("\u{A}\u{AC00}", &["\u{A}", "\u{AC00}"]),
1116 ("\u{A}\u{308}\u{AC00}", &["\u{A}", "\u{308}", "\u{AC00}"]),
1117 ("\u{A}\u{AC01}", &["\u{A}", "\u{AC01}"]),
1118 ("\u{A}\u{308}\u{AC01}", &["\u{A}", "\u{308}", "\u{AC01}"]),
1119 ("\u{A}\u{1F1E6}", &["\u{A}", "\u{1F1E6}"]),
1120 ("\u{A}\u{308}\u{1F1E6}", &["\u{A}", "\u{308}", "\u{1F1E6}"]),
1121 ("\u{A}\u{378}", &["\u{A}", "\u{378}"]),
1122 ("\u{A}\u{308}\u{378}", &["\u{A}", "\u{308}", "\u{378}"]),
1123 ("\u{1}\u{20}", &["\u{1}", "\u{20}"]),
1124 ("\u{1}\u{308}\u{20}", &["\u{1}", "\u{308}", "\u{20}"]),
1125 ("\u{1}\u{D}", &["\u{1}", "\u{D}"]),
1126 ("\u{1}\u{308}\u{D}", &["\u{1}", "\u{308}", "\u{D}"]),
1127 ("\u{1}\u{A}", &["\u{1}", "\u{A}"]),
1128 ("\u{1}\u{308}\u{A}", &["\u{1}", "\u{308}", "\u{A}"]),
1129 ("\u{1}\u{1}", &["\u{1}", "\u{1}"]),
1130 ("\u{1}\u{308}\u{1}", &["\u{1}", "\u{308}", "\u{1}"]),
1131 ("\u{1}\u{300}", &["\u{1}", "\u{300}"]),
1132 ("\u{1}\u{308}\u{300}", &["\u{1}", "\u{308}\u{300}"]),
1133 ("\u{1}\u{903}", &["\u{1}", "\u{903}"]),
1134 ("\u{1}\u{1100}", &["\u{1}", "\u{1100}"]),
1135 ("\u{1}\u{308}\u{1100}", &["\u{1}", "\u{308}", "\u{1100}"]),
1136 ("\u{1}\u{1160}", &["\u{1}", "\u{1160}"]),
1137 ("\u{1}\u{308}\u{1160}", &["\u{1}", "\u{308}", "\u{1160}"]),
1138 ("\u{1}\u{11A8}", &["\u{1}", "\u{11A8}"]),
1139 ("\u{1}\u{308}\u{11A8}", &["\u{1}", "\u{308}", "\u{11A8}"]),
1140 ("\u{1}\u{AC00}", &["\u{1}", "\u{AC00}"]),
1141 ("\u{1}\u{308}\u{AC00}", &["\u{1}", "\u{308}", "\u{AC00}"]),
1142 ("\u{1}\u{AC01}", &["\u{1}", "\u{AC01}"]),
1143 ("\u{1}\u{308}\u{AC01}", &["\u{1}", "\u{308}", "\u{AC01}"]),
1144 ("\u{1}\u{1F1E6}", &["\u{1}", "\u{1F1E6}"]),
1145 ("\u{1}\u{308}\u{1F1E6}", &["\u{1}", "\u{308}", "\u{1F1E6}"]),
1146 ("\u{1}\u{378}", &["\u{1}", "\u{378}"]),
1147 ("\u{1}\u{308}\u{378}", &["\u{1}", "\u{308}", "\u{378}"]),
1148 ("\u{300}\u{20}", &["\u{300}", "\u{20}"]),
1149 ("\u{300}\u{308}\u{20}", &["\u{300}\u{308}", "\u{20}"]),
1150 ("\u{300}\u{D}", &["\u{300}", "\u{D}"]),
1151 ("\u{300}\u{308}\u{D}", &["\u{300}\u{308}", "\u{D}"]),
1152 ("\u{300}\u{A}", &["\u{300}", "\u{A}"]),
1153 ("\u{300}\u{308}\u{A}", &["\u{300}\u{308}", "\u{A}"]),
1154 ("\u{300}\u{1}", &["\u{300}", "\u{1}"]),
1155 ("\u{300}\u{308}\u{1}", &["\u{300}\u{308}", "\u{1}"]),
1156 ("\u{300}\u{300}", &["\u{300}\u{300}"]),
1157 ("\u{300}\u{308}\u{300}", &["\u{300}\u{308}\u{300}"]),
1158 ("\u{300}\u{1100}", &["\u{300}", "\u{1100}"]),
1159 ("\u{300}\u{308}\u{1100}", &["\u{300}\u{308}", "\u{1100}"]),
1160 ("\u{300}\u{1160}", &["\u{300}", "\u{1160}"]),
1161 ("\u{300}\u{308}\u{1160}", &["\u{300}\u{308}", "\u{1160}"]),
1162 ("\u{300}\u{11A8}", &["\u{300}", "\u{11A8}"]),
1163 ("\u{300}\u{308}\u{11A8}", &["\u{300}\u{308}", "\u{11A8}"]),
1164 ("\u{300}\u{AC00}", &["\u{300}", "\u{AC00}"]),
1165 ("\u{300}\u{308}\u{AC00}", &["\u{300}\u{308}", "\u{AC00}"]),
1166 ("\u{300}\u{AC01}", &["\u{300}", "\u{AC01}"]),
1167 ("\u{300}\u{308}\u{AC01}", &["\u{300}\u{308}", "\u{AC01}"]),
1168 ("\u{300}\u{1F1E6}", &["\u{300}", "\u{1F1E6}"]),
1169 ("\u{300}\u{308}\u{1F1E6}", &["\u{300}\u{308}", "\u{1F1E6}"]),
1170 ("\u{300}\u{378}", &["\u{300}", "\u{378}"]),
1171 ("\u{300}\u{308}\u{378}", &["\u{300}\u{308}", "\u{378}"]),
1172 ("\u{903}\u{20}", &["\u{903}", "\u{20}"]),
1173 ("\u{903}\u{308}\u{20}", &["\u{903}\u{308}", "\u{20}"]),
1174 ("\u{903}\u{D}", &["\u{903}", "\u{D}"]),
1175 ("\u{903}\u{308}\u{D}", &["\u{903}\u{308}", "\u{D}"]),
1176 ("\u{903}\u{A}", &["\u{903}", "\u{A}"]),
1177 ("\u{903}\u{308}\u{A}", &["\u{903}\u{308}", "\u{A}"]),
1178 ("\u{903}\u{1}", &["\u{903}", "\u{1}"]),
1179 ("\u{903}\u{308}\u{1}", &["\u{903}\u{308}", "\u{1}"]),
1180 ("\u{903}\u{300}", &["\u{903}\u{300}"]),
1181 ("\u{903}\u{308}\u{300}", &["\u{903}\u{308}\u{300}"]),
1182 ("\u{903}\u{1100}", &["\u{903}", "\u{1100}"]),
1183 ("\u{903}\u{308}\u{1100}", &["\u{903}\u{308}", "\u{1100}"]),
1184 ("\u{903}\u{1160}", &["\u{903}", "\u{1160}"]),
1185 ("\u{903}\u{308}\u{1160}", &["\u{903}\u{308}", "\u{1160}"]),
1186 ("\u{903}\u{11A8}", &["\u{903}", "\u{11A8}"]),
1187 ("\u{903}\u{308}\u{11A8}", &["\u{903}\u{308}", "\u{11A8}"]),
1188 ("\u{903}\u{AC00}", &["\u{903}", "\u{AC00}"]),
1189 ("\u{903}\u{308}\u{AC00}", &["\u{903}\u{308}", "\u{AC00}"]),
1190 ("\u{903}\u{AC01}", &["\u{903}", "\u{AC01}"]),
1191 ("\u{903}\u{308}\u{AC01}", &["\u{903}\u{308}", "\u{AC01}"]),
1192 ("\u{903}\u{1F1E6}", &["\u{903}", "\u{1F1E6}"]),
1193 ("\u{903}\u{308}\u{1F1E6}", &["\u{903}\u{308}", "\u{1F1E6}"]),
1194 ("\u{903}\u{378}", &["\u{903}", "\u{378}"]),
1195 ("\u{903}\u{308}\u{378}", &["\u{903}\u{308}", "\u{378}"]),
1196 ("\u{1100}\u{20}", &["\u{1100}", "\u{20}"]),
1197 ("\u{1100}\u{308}\u{20}", &["\u{1100}\u{308}", "\u{20}"]),
1198 ("\u{1100}\u{D}", &["\u{1100}", "\u{D}"]),
1199 ("\u{1100}\u{308}\u{D}", &["\u{1100}\u{308}", "\u{D}"]),
1200 ("\u{1100}\u{A}", &["\u{1100}", "\u{A}"]),
1201 ("\u{1100}\u{308}\u{A}", &["\u{1100}\u{308}", "\u{A}"]),
1202 ("\u{1100}\u{1}", &["\u{1100}", "\u{1}"]),
1203 ("\u{1100}\u{308}\u{1}", &["\u{1100}\u{308}", "\u{1}"]),
1204 ("\u{1100}\u{300}", &["\u{1100}\u{300}"]),
1205 ("\u{1100}\u{308}\u{300}", &["\u{1100}\u{308}\u{300}"]),
1206 ("\u{1100}\u{1100}", &["\u{1100}\u{1100}"]),
1207 ("\u{1100}\u{308}\u{1100}", &["\u{1100}\u{308}", "\u{1100}"]),
1208 ("\u{1100}\u{1160}", &["\u{1100}\u{1160}"]),
1209 ("\u{1100}\u{308}\u{1160}", &["\u{1100}\u{308}", "\u{1160}"]),
1210 ("\u{1100}\u{11A8}", &["\u{1100}", "\u{11A8}"]),
1211 ("\u{1100}\u{308}\u{11A8}", &["\u{1100}\u{308}", "\u{11A8}"]),
1212 ("\u{1100}\u{AC00}", &["\u{1100}\u{AC00}"]),
1213 ("\u{1100}\u{308}\u{AC00}", &["\u{1100}\u{308}", "\u{AC00}"]),
1214 ("\u{1100}\u{AC01}", &["\u{1100}\u{AC01}"]),
1215 ("\u{1100}\u{308}\u{AC01}", &["\u{1100}\u{308}", "\u{AC01}"]),
1216 ("\u{1100}\u{1F1E6}", &["\u{1100}", "\u{1F1E6}"]),
1217 ("\u{1100}\u{308}\u{1F1E6}", &["\u{1100}\u{308}", "\u{1F1E6}"]),
1218 ("\u{1100}\u{378}", &["\u{1100}", "\u{378}"]),
1219 ("\u{1100}\u{308}\u{378}", &["\u{1100}\u{308}", "\u{378}"]),
1220 ("\u{1160}\u{20}", &["\u{1160}", "\u{20}"]),
1221 ("\u{1160}\u{308}\u{20}", &["\u{1160}\u{308}", "\u{20}"]),
1222 ("\u{1160}\u{D}", &["\u{1160}", "\u{D}"]),
1223 ("\u{1160}\u{308}\u{D}", &["\u{1160}\u{308}", "\u{D}"]),
1224 ("\u{1160}\u{A}", &["\u{1160}", "\u{A}"]),
1225 ("\u{1160}\u{308}\u{A}", &["\u{1160}\u{308}", "\u{A}"]),
1226 ("\u{1160}\u{1}", &["\u{1160}", "\u{1}"]),
1227 ("\u{1160}\u{308}\u{1}", &["\u{1160}\u{308}", "\u{1}"]),
1228 ("\u{1160}\u{300}", &["\u{1160}\u{300}"]),
1229 ("\u{1160}\u{308}\u{300}", &["\u{1160}\u{308}\u{300}"]),
1230 ("\u{1160}\u{1100}", &["\u{1160}", "\u{1100}"]),
1231 ("\u{1160}\u{308}\u{1100}", &["\u{1160}\u{308}", "\u{1100}"]),
1232 ("\u{1160}\u{1160}", &["\u{1160}\u{1160}"]),
1233 ("\u{1160}\u{308}\u{1160}", &["\u{1160}\u{308}", "\u{1160}"]),
1234 ("\u{1160}\u{11A8}", &["\u{1160}\u{11A8}"]),
1235 ("\u{1160}\u{308}\u{11A8}", &["\u{1160}\u{308}", "\u{11A8}"]),
1236 ("\u{1160}\u{AC00}", &["\u{1160}", "\u{AC00}"]),
1237 ("\u{1160}\u{308}\u{AC00}", &["\u{1160}\u{308}", "\u{AC00}"]),
1238 ("\u{1160}\u{AC01}", &["\u{1160}", "\u{AC01}"]),
1239 ("\u{1160}\u{308}\u{AC01}", &["\u{1160}\u{308}", "\u{AC01}"]),
1240 ("\u{1160}\u{1F1E6}", &["\u{1160}", "\u{1F1E6}"]),
1241 ("\u{1160}\u{308}\u{1F1E6}", &["\u{1160}\u{308}", "\u{1F1E6}"]),
1242 ("\u{1160}\u{378}", &["\u{1160}", "\u{378}"]),
1243 ("\u{1160}\u{308}\u{378}", &["\u{1160}\u{308}", "\u{378}"]),
1244 ("\u{11A8}\u{20}", &["\u{11A8}", "\u{20}"]),
1245 ("\u{11A8}\u{308}\u{20}", &["\u{11A8}\u{308}", "\u{20}"]),
1246 ("\u{11A8}\u{D}", &["\u{11A8}", "\u{D}"]),
1247 ("\u{11A8}\u{308}\u{D}", &["\u{11A8}\u{308}", "\u{D}"]),
1248 ("\u{11A8}\u{A}", &["\u{11A8}", "\u{A}"]),
1249 ("\u{11A8}\u{308}\u{A}", &["\u{11A8}\u{308}", "\u{A}"]),
1250 ("\u{11A8}\u{1}", &["\u{11A8}", "\u{1}"]),
1251 ("\u{11A8}\u{308}\u{1}", &["\u{11A8}\u{308}", "\u{1}"]),
1252 ("\u{11A8}\u{300}", &["\u{11A8}\u{300}"]),
1253 ("\u{11A8}\u{308}\u{300}", &["\u{11A8}\u{308}\u{300}"]),
1254 ("\u{11A8}\u{1100}", &["\u{11A8}", "\u{1100}"]),
1255 ("\u{11A8}\u{308}\u{1100}", &["\u{11A8}\u{308}", "\u{1100}"]),
1256 ("\u{11A8}\u{1160}", &["\u{11A8}", "\u{1160}"]),
1257 ("\u{11A8}\u{308}\u{1160}", &["\u{11A8}\u{308}", "\u{1160}"]),
1258 ("\u{11A8}\u{11A8}", &["\u{11A8}\u{11A8}"]),
1259 ("\u{11A8}\u{308}\u{11A8}", &["\u{11A8}\u{308}", "\u{11A8}"]),
1260 ("\u{11A8}\u{AC00}", &["\u{11A8}", "\u{AC00}"]),
1261 ("\u{11A8}\u{308}\u{AC00}", &["\u{11A8}\u{308}", "\u{AC00}"]),
1262 ("\u{11A8}\u{AC01}", &["\u{11A8}", "\u{AC01}"]),
1263 ("\u{11A8}\u{308}\u{AC01}", &["\u{11A8}\u{308}", "\u{AC01}"]),
1264 ("\u{11A8}\u{1F1E6}", &["\u{11A8}", "\u{1F1E6}"]),
1265 ("\u{11A8}\u{308}\u{1F1E6}", &["\u{11A8}\u{308}", "\u{1F1E6}"]),
1266 ("\u{11A8}\u{378}", &["\u{11A8}", "\u{378}"]),
1267 ("\u{11A8}\u{308}\u{378}", &["\u{11A8}\u{308}", "\u{378}"]),
1268 ("\u{AC00}\u{20}", &["\u{AC00}", "\u{20}"]),
1269 ("\u{AC00}\u{308}\u{20}", &["\u{AC00}\u{308}", "\u{20}"]),
1270 ("\u{AC00}\u{D}", &["\u{AC00}", "\u{D}"]),
1271 ("\u{AC00}\u{308}\u{D}", &["\u{AC00}\u{308}", "\u{D}"]),
1272 ("\u{AC00}\u{A}", &["\u{AC00}", "\u{A}"]),
1273 ("\u{AC00}\u{308}\u{A}", &["\u{AC00}\u{308}", "\u{A}"]),
1274 ("\u{AC00}\u{1}", &["\u{AC00}", "\u{1}"]),
1275 ("\u{AC00}\u{308}\u{1}", &["\u{AC00}\u{308}", "\u{1}"]),
1276 ("\u{AC00}\u{300}", &["\u{AC00}\u{300}"]),
1277 ("\u{AC00}\u{308}\u{300}", &["\u{AC00}\u{308}\u{300}"]),
1278 ("\u{AC00}\u{1100}", &["\u{AC00}", "\u{1100}"]),
1279 ("\u{AC00}\u{308}\u{1100}", &["\u{AC00}\u{308}", "\u{1100}"]),
1280 ("\u{AC00}\u{1160}", &["\u{AC00}\u{1160}"]),
1281 ("\u{AC00}\u{308}\u{1160}", &["\u{AC00}\u{308}", "\u{1160}"]),
1282 ("\u{AC00}\u{11A8}", &["\u{AC00}\u{11A8}"]),
1283 ("\u{AC00}\u{308}\u{11A8}", &["\u{AC00}\u{308}", "\u{11A8}"]),
1284 ("\u{AC00}\u{AC00}", &["\u{AC00}", "\u{AC00}"]),
1285 ("\u{AC00}\u{308}\u{AC00}", &["\u{AC00}\u{308}", "\u{AC00}"]),
1286 ("\u{AC00}\u{AC01}", &["\u{AC00}", "\u{AC01}"]),
1287 ("\u{AC00}\u{308}\u{AC01}", &["\u{AC00}\u{308}", "\u{AC01}"]),
1288 ("\u{AC00}\u{1F1E6}", &["\u{AC00}", "\u{1F1E6}"]),
1289 ("\u{AC00}\u{308}\u{1F1E6}", &["\u{AC00}\u{308}", "\u{1F1E6}"]),
1290 ("\u{AC00}\u{378}", &["\u{AC00}", "\u{378}"]),
1291 ("\u{AC00}\u{308}\u{378}", &["\u{AC00}\u{308}", "\u{378}"]),
1292 ("\u{AC01}\u{20}", &["\u{AC01}", "\u{20}"]),
1293 ("\u{AC01}\u{308}\u{20}", &["\u{AC01}\u{308}", "\u{20}"]),
1294 ("\u{AC01}\u{D}", &["\u{AC01}", "\u{D}"]),
1295 ("\u{AC01}\u{308}\u{D}", &["\u{AC01}\u{308}", "\u{D}"]),
1296 ("\u{AC01}\u{A}", &["\u{AC01}", "\u{A}"]),
1297 ("\u{AC01}\u{308}\u{A}", &["\u{AC01}\u{308}", "\u{A}"]),
1298 ("\u{AC01}\u{1}", &["\u{AC01}", "\u{1}"]),
1299 ("\u{AC01}\u{308}\u{1}", &["\u{AC01}\u{308}", "\u{1}"]),
1300 ("\u{AC01}\u{300}", &["\u{AC01}\u{300}"]),
1301 ("\u{AC01}\u{308}\u{300}", &["\u{AC01}\u{308}\u{300}"]),
1302 ("\u{AC01}\u{1100}", &["\u{AC01}", "\u{1100}"]),
1303 ("\u{AC01}\u{308}\u{1100}", &["\u{AC01}\u{308}", "\u{1100}"]),
1304 ("\u{AC01}\u{1160}", &["\u{AC01}", "\u{1160}"]),
1305 ("\u{AC01}\u{308}\u{1160}", &["\u{AC01}\u{308}", "\u{1160}"]),
1306 ("\u{AC01}\u{11A8}", &["\u{AC01}\u{11A8}"]),
1307 ("\u{AC01}\u{308}\u{11A8}", &["\u{AC01}\u{308}", "\u{11A8}"]),
1308 ("\u{AC01}\u{AC00}", &["\u{AC01}", "\u{AC00}"]),
1309 ("\u{AC01}\u{308}\u{AC00}", &["\u{AC01}\u{308}", "\u{AC00}"]),
1310 ("\u{AC01}\u{AC01}", &["\u{AC01}", "\u{AC01}"]),
1311 ("\u{AC01}\u{308}\u{AC01}", &["\u{AC01}\u{308}", "\u{AC01}"]),
1312 ("\u{AC01}\u{1F1E6}", &["\u{AC01}", "\u{1F1E6}"]),
1313 ("\u{AC01}\u{308}\u{1F1E6}", &["\u{AC01}\u{308}", "\u{1F1E6}"]),
1314 ("\u{AC01}\u{378}", &["\u{AC01}", "\u{378}"]),
1315 ("\u{AC01}\u{308}\u{378}", &["\u{AC01}\u{308}", "\u{378}"]),
1316 ("\u{1F1E6}\u{20}", &["\u{1F1E6}", "\u{20}"]),
1317 ("\u{1F1E6}\u{308}\u{20}", &["\u{1F1E6}\u{308}", "\u{20}"]),
1318 ("\u{1F1E6}\u{D}", &["\u{1F1E6}", "\u{D}"]),
1319 ("\u{1F1E6}\u{308}\u{D}", &["\u{1F1E6}\u{308}", "\u{D}"]),
1320 ("\u{1F1E6}\u{A}", &["\u{1F1E6}", "\u{A}"]),
1321 ("\u{1F1E6}\u{308}\u{A}", &["\u{1F1E6}\u{308}", "\u{A}"]),
1322 ("\u{1F1E6}\u{1}", &["\u{1F1E6}", "\u{1}"]),
1323 ("\u{1F1E6}\u{308}\u{1}", &["\u{1F1E6}\u{308}", "\u{1}"]),
1324 ("\u{1F1E6}\u{300}", &["\u{1F1E6}\u{300}"]),
1325 ("\u{1F1E6}\u{308}\u{300}", &["\u{1F1E6}\u{308}\u{300}"]),
1326 ("\u{1F1E6}\u{1100}", &["\u{1F1E6}", "\u{1100}"]),
1327 ("\u{1F1E6}\u{308}\u{1100}", &["\u{1F1E6}\u{308}", "\u{1100}"]),
1328 ("\u{1F1E6}\u{1160}", &["\u{1F1E6}", "\u{1160}"]),
1329 ("\u{1F1E6}\u{308}\u{1160}", &["\u{1F1E6}\u{308}", "\u{1160}"]),
1330 ("\u{1F1E6}\u{11A8}", &["\u{1F1E6}", "\u{11A8}"]),
1331 ("\u{1F1E6}\u{308}\u{11A8}", &["\u{1F1E6}\u{308}", "\u{11A8}"]),
1332 ("\u{1F1E6}\u{AC00}", &["\u{1F1E6}", "\u{AC00}"]),
1333 ("\u{1F1E6}\u{308}\u{AC00}", &["\u{1F1E6}\u{308}", "\u{AC00}"]),
1334 ("\u{1F1E6}\u{AC01}", &["\u{1F1E6}", "\u{AC01}"]),
1335 ("\u{1F1E6}\u{308}\u{AC01}", &["\u{1F1E6}\u{308}", "\u{AC01}"]),
1336 ("\u{1F1E6}\u{1F1E6}", &["\u{1F1E6}\u{1F1E6}"]),
1337 ("\u{1F1E6}\u{308}\u{1F1E6}", &["\u{1F1E6}\u{308}", "\u{1F1E6}"]),
1338 ("\u{1F1E6}\u{378}", &["\u{1F1E6}", "\u{378}"]),
1339 ("\u{1F1E6}\u{308}\u{378}", &["\u{1F1E6}\u{308}", "\u{378}"]),
1340 ("\u{378}\u{20}", &["\u{378}", "\u{20}"]),
1341 ("\u{378}\u{308}\u{20}", &["\u{378}\u{308}", "\u{20}"]),
1342 ("\u{378}\u{D}", &["\u{378}", "\u{D}"]),
1343 ("\u{378}\u{308}\u{D}", &["\u{378}\u{308}", "\u{D}"]),
1344 ("\u{378}\u{A}", &["\u{378}", "\u{A}"]),
1345 ("\u{378}\u{308}\u{A}", &["\u{378}\u{308}", "\u{A}"]),
1346 ("\u{378}\u{1}", &["\u{378}", "\u{1}"]),
1347 ("\u{378}\u{308}\u{1}", &["\u{378}\u{308}", "\u{1}"]),
1348 ("\u{378}\u{300}", &["\u{378}\u{300}"]),
1349 ("\u{378}\u{308}\u{300}", &["\u{378}\u{308}\u{300}"]),
1350 ("\u{378}\u{1100}", &["\u{378}", "\u{1100}"]),
1351 ("\u{378}\u{308}\u{1100}", &["\u{378}\u{308}", "\u{1100}"]),
1352 ("\u{378}\u{1160}", &["\u{378}", "\u{1160}"]),
1353 ("\u{378}\u{308}\u{1160}", &["\u{378}\u{308}", "\u{1160}"]),
1354 ("\u{378}\u{11A8}", &["\u{378}", "\u{11A8}"]),
1355 ("\u{378}\u{308}\u{11A8}", &["\u{378}\u{308}", "\u{11A8}"]),
1356 ("\u{378}\u{AC00}", &["\u{378}", "\u{AC00}"]),
1357 ("\u{378}\u{308}\u{AC00}", &["\u{378}\u{308}", "\u{AC00}"]),
1358 ("\u{378}\u{AC01}", &["\u{378}", "\u{AC01}"]),
1359 ("\u{378}\u{308}\u{AC01}", &["\u{378}\u{308}", "\u{AC01}"]),
1360 ("\u{378}\u{1F1E6}", &["\u{378}", "\u{1F1E6}"]),
1361 ("\u{378}\u{308}\u{1F1E6}", &["\u{378}\u{308}", "\u{1F1E6}"]),
1362 ("\u{378}\u{378}", &["\u{378}", "\u{378}"]),
1363 ("\u{378}\u{308}\u{378}", &["\u{378}\u{308}", "\u{378}"]),
1364 ("\u{61}\u{1F1E6}\u{62}", &["\u{61}", "\u{1F1E6}", "\u{62}"]),
1365 ("\u{1F1F7}\u{1F1FA}", &["\u{1F1F7}\u{1F1FA}"]),
1366 ("\u{1F1F7}\u{1F1FA}\u{1F1F8}", &["\u{1F1F7}\u{1F1FA}\u{1F1F8}"]),
1367 ("\u{1F1F7}\u{1F1FA}\u{1F1F8}\u{1F1EA}",
1368 &["\u{1F1F7}\u{1F1FA}\u{1F1F8}\u{1F1EA}"]),
1369 ("\u{1F1F7}\u{1F1FA}\u{200B}\u{1F1F8}\u{1F1EA}",
1370 &["\u{1F1F7}\u{1F1FA}", "\u{200B}", "\u{1F1F8}\u{1F1EA}"]),
1371 ("\u{1F1E6}\u{1F1E7}\u{1F1E8}", &["\u{1F1E6}\u{1F1E7}\u{1F1E8}"]),
1372 ("\u{1F1E6}\u{200D}\u{1F1E7}\u{1F1E8}", &["\u{1F1E6}\u{200D}",
1373 "\u{1F1E7}\u{1F1E8}"]),
1374 ("\u{1F1E6}\u{1F1E7}\u{200D}\u{1F1E8}",
1375 &["\u{1F1E6}\u{1F1E7}\u{200D}", "\u{1F1E8}"]),
1376 ("\u{20}\u{200D}\u{646}", &["\u{20}\u{200D}", "\u{646}"]),
1377 ("\u{646}\u{200D}\u{20}", &["\u{646}\u{200D}", "\u{20}"]),
1380 let test_diff: [(_, &[_], &[_]); 23] = [
1381 ("\u{20}\u{903}", &["\u{20}\u{903}"], &["\u{20}", "\u{903}"]), ("\u{20}\u{308}\u{903}",
1382 &["\u{20}\u{308}\u{903}"], &["\u{20}\u{308}", "\u{903}"]), ("\u{D}\u{308}\u{903}",
1383 &["\u{D}", "\u{308}\u{903}"], &["\u{D}", "\u{308}", "\u{903}"]), ("\u{A}\u{308}\u{903}",
1384 &["\u{A}", "\u{308}\u{903}"], &["\u{A}", "\u{308}", "\u{903}"]), ("\u{1}\u{308}\u{903}",
1385 &["\u{1}", "\u{308}\u{903}"], &["\u{1}", "\u{308}", "\u{903}"]), ("\u{300}\u{903}",
1386 &["\u{300}\u{903}"], &["\u{300}", "\u{903}"]), ("\u{300}\u{308}\u{903}",
1387 &["\u{300}\u{308}\u{903}"], &["\u{300}\u{308}", "\u{903}"]), ("\u{903}\u{903}",
1388 &["\u{903}\u{903}"], &["\u{903}", "\u{903}"]), ("\u{903}\u{308}\u{903}",
1389 &["\u{903}\u{308}\u{903}"], &["\u{903}\u{308}", "\u{903}"]), ("\u{1100}\u{903}",
1390 &["\u{1100}\u{903}"], &["\u{1100}", "\u{903}"]), ("\u{1100}\u{308}\u{903}",
1391 &["\u{1100}\u{308}\u{903}"], &["\u{1100}\u{308}", "\u{903}"]), ("\u{1160}\u{903}",
1392 &["\u{1160}\u{903}"], &["\u{1160}", "\u{903}"]), ("\u{1160}\u{308}\u{903}",
1393 &["\u{1160}\u{308}\u{903}"], &["\u{1160}\u{308}", "\u{903}"]), ("\u{11A8}\u{903}",
1394 &["\u{11A8}\u{903}"], &["\u{11A8}", "\u{903}"]), ("\u{11A8}\u{308}\u{903}",
1395 &["\u{11A8}\u{308}\u{903}"], &["\u{11A8}\u{308}", "\u{903}"]), ("\u{AC00}\u{903}",
1396 &["\u{AC00}\u{903}"], &["\u{AC00}", "\u{903}"]), ("\u{AC00}\u{308}\u{903}",
1397 &["\u{AC00}\u{308}\u{903}"], &["\u{AC00}\u{308}", "\u{903}"]), ("\u{AC01}\u{903}",
1398 &["\u{AC01}\u{903}"], &["\u{AC01}", "\u{903}"]), ("\u{AC01}\u{308}\u{903}",
1399 &["\u{AC01}\u{308}\u{903}"], &["\u{AC01}\u{308}", "\u{903}"]), ("\u{1F1E6}\u{903}",
1400 &["\u{1F1E6}\u{903}"], &["\u{1F1E6}", "\u{903}"]), ("\u{1F1E6}\u{308}\u{903}",
1401 &["\u{1F1E6}\u{308}\u{903}"], &["\u{1F1E6}\u{308}", "\u{903}"]), ("\u{378}\u{903}",
1402 &["\u{378}\u{903}"], &["\u{378}", "\u{903}"]), ("\u{378}\u{308}\u{903}",
1403 &["\u{378}\u{308}\u{903}"], &["\u{378}\u{308}", "\u{903}"]),
1406 for &(s, g) in &test_same[..] {
1407 // test forward iterator
1408 assert!(order::equals(s.graphemes(true), g.iter().cloned()));
1409 assert!(order::equals(s.graphemes(false), g.iter().cloned()));
1411 // test reverse iterator
1412 assert!(order::equals(s.graphemes(true).rev(), g.iter().rev().cloned()));
1413 assert!(order::equals(s.graphemes(false).rev(), g.iter().rev().cloned()));
1416 for &(s, gt, gf) in &test_diff {
1417 // test forward iterator
1418 assert!(order::equals(s.graphemes(true), gt.iter().cloned()));
1419 assert!(order::equals(s.graphemes(false), gf.iter().cloned()));
1421 // test reverse iterator
1422 assert!(order::equals(s.graphemes(true).rev(), gt.iter().rev().cloned()));
1423 assert!(order::equals(s.graphemes(false).rev(), gf.iter().rev().cloned()));
1426 // test the indices iterators
1427 let s = "a̐éö̲\r\n";
1428 let gr_inds = s.grapheme_indices(true).collect::<Vec<(usize, &str)>>();
1429 let b: &[_] = &[(0, "a̐"), (3, "é"), (6, "ö̲"), (11, "\r\n")];
1430 assert_eq!(gr_inds, b);
1431 let gr_inds = s.grapheme_indices(true).rev().collect::<Vec<(usize, &str)>>();
1432 let b: &[_] = &[(11, "\r\n"), (6, "ö̲"), (3, "é"), (0, "a̐")];
1433 assert_eq!(gr_inds, b);
1434 let mut gr_inds_iter = s.grapheme_indices(true);
1436 let gr_inds = gr_inds_iter.by_ref();
1437 let e1 = gr_inds.size_hint();
1438 assert_eq!(e1, (1, Some(13)));
1439 let c = gr_inds.count();
1442 let e2 = gr_inds_iter.size_hint();
1443 assert_eq!(e2, (0, Some(0)));
1445 // make sure the reverse iterator does the right thing with "\n" at beginning of string
1447 let gr = s.graphemes(true).rev().collect::<Vec<&str>>();
1448 let b: &[_] = &["\r", "\r\n", "\n"];
1453 fn test_splitator() {
1454 fn t(s: &str, sep: &str, u: &[&str]) {
1455 let v: Vec<&str> = s.split(sep).collect();
1458 t("--1233345--", "12345", &["--1233345--"]);
1459 t("abc::hello::there", "::", &["abc", "hello", "there"]);
1460 t("::hello::there", "::", &["", "hello", "there"]);
1461 t("hello::there::", "::", &["hello", "there", ""]);
1462 t("::hello::there::", "::", &["", "hello", "there", ""]);
1463 t("ประเทศไทย中华Việt Nam", "中华", &["ประเทศไทย", "Việt Nam"]);
1464 t("zzXXXzzYYYzz", "zz", &["", "XXX", "YYY", ""]);
1465 t("zzXXXzYYYz", "XXX", &["zz", "zYYYz"]);
1466 t(".XXX.YYY.", ".", &["", "XXX", "YYY", ""]);
1468 t("zz", "zz", &["",""]);
1469 t("ok", "z", &["ok"]);
1470 t("zzz", "zz", &["","z"]);
1471 t("zzzzz", "zz", &["","","z"]);
1475 fn test_str_default() {
1476 use std::default::Default;
1478 fn t<S: Default + AsRef<str>>() {
1479 let s: S = Default::default();
1480 assert_eq!(s.as_ref(), "");
1488 fn test_str_container() {
1489 fn sum_len(v: &[&str]) -> usize {
1490 v.iter().map(|x| x.len()).sum()
1493 let s = String::from("01234");
1494 assert_eq!(5, sum_len(&["012", "", "34"]));
1495 assert_eq!(5, sum_len(&[&String::from("01"),
1497 &String::from("34"),
1498 &String::from("")]));
1499 assert_eq!(5, sum_len(&[&s]));
1503 fn test_str_from_utf8() {
1505 assert_eq!(from_utf8(xs), Ok("hello"));
1507 let xs = "ศไทย中华Việt Nam".as_bytes();
1508 assert_eq!(from_utf8(xs), Ok("ศไทย中华Việt Nam"));
1510 let xs = b"hello\xFF";
1511 assert!(from_utf8(xs).is_err());
1515 fn test_pattern_deref_forward() {
1516 let data = "aabcdaa";
1517 assert!(data.contains("bcd"));
1518 assert!(data.contains(&"bcd"));
1519 assert!(data.contains(&"bcd".to_string()));
1523 fn test_empty_match_indices() {
1525 let vec: Vec<_> = data.match_indices("").collect();
1526 assert_eq!(vec, [(0, 0), (1, 1), (3, 3), (6, 6), (7, 7)]);
1530 fn test_bool_from_str() {
1531 assert_eq!("true".parse().ok(), Some(true));
1532 assert_eq!("false".parse().ok(), Some(false));
1533 assert_eq!("not even a boolean".parse::<bool>().ok(), None);
1536 fn check_contains_all_substrings(s: &str) {
1537 assert!(s.contains(""));
1538 for i in 0..s.len() {
1539 for j in i+1..s.len() + 1 {
1540 assert!(s.contains(&s[i..j]));
1546 fn strslice_issue_16589() {
1547 assert!("bananas".contains("nana"));
1549 // prior to the fix for #16589, x.contains("abcdabcd") returned false
1550 // test all substrings for good measure
1551 check_contains_all_substrings("012345678901234567890123456789bcdabcdabcd");
1555 fn strslice_issue_16878() {
1556 assert!(!"1234567ah012345678901ah".contains("hah"));
1557 assert!(!"00abc01234567890123456789abc".contains("bcabc"));
1562 fn test_strslice_contains() {
1563 let x = "There are moments, Jeeves, when one asks oneself, 'Do trousers matter?'";
1564 check_contains_all_substrings(x);
1568 fn test_rsplitn_char_iterator() {
1569 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
1571 let mut split: Vec<&str> = data.rsplitn(4, ' ').collect();
1573 assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
1575 let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == ' ').collect();
1577 assert_eq!(split, ["\nMäry häd ä", "little", "lämb\nLittle", "lämb\n"]);
1580 let mut split: Vec<&str> = data.rsplitn(4, 'ä').collect();
1582 assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
1584 let mut split: Vec<&str> = data.rsplitn(4, |c: char| c == 'ä').collect();
1586 assert_eq!(split, ["\nMäry häd ", " little l", "mb\nLittle l", "mb\n"]);
1590 fn test_split_char_iterator() {
1591 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
1593 let split: Vec<&str> = data.split(' ').collect();
1594 assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
1596 let mut rsplit: Vec<&str> = data.split(' ').rev().collect();
1598 assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
1600 let split: Vec<&str> = data.split(|c: char| c == ' ').collect();
1601 assert_eq!( split, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
1603 let mut rsplit: Vec<&str> = data.split(|c: char| c == ' ').rev().collect();
1605 assert_eq!(rsplit, ["\nMäry", "häd", "ä", "little", "lämb\nLittle", "lämb\n"]);
1608 let split: Vec<&str> = data.split('ä').collect();
1609 assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
1611 let mut rsplit: Vec<&str> = data.split('ä').rev().collect();
1613 assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
1615 let split: Vec<&str> = data.split(|c: char| c == 'ä').collect();
1616 assert_eq!( split, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
1618 let mut rsplit: Vec<&str> = data.split(|c: char| c == 'ä').rev().collect();
1620 assert_eq!(rsplit, ["\nM", "ry h", "d ", " little l", "mb\nLittle l", "mb\n"]);
1624 fn test_rev_split_char_iterator_no_trailing() {
1625 let data = "\nMäry häd ä little lämb\nLittle lämb\n";
1627 let mut split: Vec<&str> = data.split('\n').rev().collect();
1629 assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb", ""]);
1631 let mut split: Vec<&str> = data.split_terminator('\n').rev().collect();
1633 assert_eq!(split, ["", "Märy häd ä little lämb", "Little lämb"]);
1637 fn test_utf16_code_units() {
1638 use rustc_unicode::str::Utf16Encoder;
1639 assert_eq!(Utf16Encoder::new(vec!['é', '\u{1F4A9}'].into_iter()).collect::<Vec<u16>>(),
1640 [0xE9, 0xD83D, 0xDCA9])
1644 fn starts_with_in_unicode() {
1645 assert!(!"├── Cargo.toml".starts_with("# "));
1649 fn starts_short_long() {
1650 assert!(!"".starts_with("##"));
1651 assert!(!"##".starts_with("####"));
1652 assert!("####".starts_with("##"));
1653 assert!(!"##ä".starts_with("####"));
1654 assert!("####ä".starts_with("##"));
1655 assert!(!"##".starts_with("####ä"));
1656 assert!("##ä##".starts_with("##ä"));
1658 assert!("".starts_with(""));
1659 assert!("ä".starts_with(""));
1660 assert!("#ä".starts_with(""));
1661 assert!("##ä".starts_with(""));
1662 assert!("ä###".starts_with(""));
1663 assert!("#ä##".starts_with(""));
1664 assert!("##ä#".starts_with(""));
1668 fn contains_weird_cases() {
1669 assert!("* \t".contains(' '));
1670 assert!(!"* \t".contains('?'));
1671 assert!(!"* \t".contains('\u{1F4A9}'));
1676 assert_eq!(" \t a \t ".trim_left_matches(|c: char| c.is_whitespace()),
1678 assert_eq!(" \t a \t ".trim_right_matches(|c: char| c.is_whitespace()),
1680 assert_eq!(" \t a \t ".trim_matches(|c: char| c.is_whitespace()),
1682 assert_eq!(" \t \t ".trim_left_matches(|c: char| c.is_whitespace()),
1684 assert_eq!(" \t \t ".trim_right_matches(|c: char| c.is_whitespace()),
1686 assert_eq!(" \t \t ".trim_matches(|c: char| c.is_whitespace()),
1691 use std::str::pattern::Pattern;
1692 use std::str::pattern::{Searcher, ReverseSearcher};
1693 use std::str::pattern::SearchStep::{self, Match, Reject, Done};
1695 macro_rules! make_test {
1696 ($name:ident, $p:expr, $h:expr, [$($e:expr,)*]) => {
1698 use std::str::pattern::SearchStep::{Match, Reject};
1699 use super::{cmp_search_to_vec};
1702 cmp_search_to_vec(false, $p, $h, vec![$($e),*]);
1706 cmp_search_to_vec(true, $p, $h, vec![$($e),*]);
1712 fn cmp_search_to_vec<'a, P: Pattern<'a>>(rev: bool, pat: P, haystack: &'a str,
1713 right: Vec<SearchStep>)
1714 where P::Searcher: ReverseSearcher<'a>
1716 let mut searcher = pat.into_searcher(haystack);
1719 match if !rev {searcher.next()} else {searcher.next_back()} {
1720 Match(a, b) => v.push(Match(a, b)),
1721 Reject(a, b) => v.push(Reject(a, b)),
1729 let mut first_index = 0;
1732 for (i, e) in right.iter().enumerate() {
1734 Match(a, b) | Reject(a, b)
1735 if a <= b && a == first_index => {
1745 if let Some(err) = err {
1746 panic!("Input skipped range at {}", err);
1749 if first_index != haystack.len() {
1750 panic!("Did not cover whole input");
1753 assert_eq!(v, right);
1756 make_test!(str_searcher_ascii_haystack, "bb", "abbcbbd", [
1763 make_test!(str_searcher_empty_needle_ascii_haystack, "", "abbcbbd", [
1780 make_test!(str_searcher_mulibyte_haystack, " ", "├──", [
1785 make_test!(str_searcher_empty_needle_mulibyte_haystack, "", "├──", [
1794 make_test!(str_searcher_empty_needle_empty_haystack, "", "", [
1797 make_test!(str_searcher_nonempty_needle_empty_haystack, "├", "", [
1799 make_test!(char_searcher_ascii_haystack, 'b', "abbcbbd", [
1808 make_test!(char_searcher_mulibyte_haystack, ' ', "├──", [
1813 make_test!(char_searcher_short_haystack, '\u{1F4A9}', "* \t", [
1821 macro_rules! generate_iterator_test {
1825 ($($arg:expr),*) -> [$($t:tt)*];
1828 with $fwd:expr, $bwd:expr;
1834 let res = vec![$($t)*];
1836 let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
1837 assert_eq!(fwd_vec, res);
1839 let mut bwd_vec: Vec<_> = ($bwd)($($arg),*).collect();
1841 assert_eq!(bwd_vec, res);
1849 ($($arg:expr),*) -> [$($t:tt)*];
1858 let res = vec![$($t)*];
1860 let fwd_vec: Vec<_> = ($fwd)($($arg),*).collect();
1861 assert_eq!(fwd_vec, res);
1868 generate_iterator_test! {
1869 double_ended_split {
1870 ("foo.bar.baz", '.') -> ["foo", "bar", "baz"];
1871 ("foo::bar::baz", "::") -> ["foo", "bar", "baz"];
1873 with str::split, str::rsplit;
1876 generate_iterator_test! {
1877 double_ended_split_terminator {
1878 ("foo;bar;baz;", ';') -> ["foo", "bar", "baz"];
1880 with str::split_terminator, str::rsplit_terminator;
1883 generate_iterator_test! {
1884 double_ended_matches {
1885 ("a1b2c3", char::is_numeric) -> ["1", "2", "3"];
1887 with str::matches, str::rmatches;
1890 generate_iterator_test! {
1891 double_ended_match_indices {
1892 ("a1b2c3", char::is_numeric) -> [(1, 2), (3, 4), (5, 6)];
1894 with str::match_indices, str::rmatch_indices;
1897 generate_iterator_test! {
1898 not_double_ended_splitn {
1899 ("foo::bar::baz", 2, "::") -> ["foo", "bar::baz"];
1904 generate_iterator_test! {
1905 not_double_ended_rsplitn {
1906 ("foo::bar::baz", 2, "::") -> ["baz", "foo::bar"];
1912 use test::{Bencher, black_box};
1915 fn char_iterator(b: &mut Bencher) {
1916 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
1918 b.iter(|| s.chars().count());
1922 fn char_iterator_for(b: &mut Bencher) {
1923 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
1926 for ch in s.chars() { black_box(ch); }
1931 fn char_iterator_ascii(b: &mut Bencher) {
1932 let s = "Mary had a little lamb, Little lamb
1933 Mary had a little lamb, Little lamb
1934 Mary had a little lamb, Little lamb
1935 Mary had a little lamb, Little lamb
1936 Mary had a little lamb, Little lamb
1937 Mary had a little lamb, Little lamb";
1939 b.iter(|| s.chars().count());
1943 fn char_iterator_rev(b: &mut Bencher) {
1944 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
1946 b.iter(|| s.chars().rev().count());
1950 fn char_iterator_rev_for(b: &mut Bencher) {
1951 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
1954 for ch in s.chars().rev() { black_box(ch); }
1959 fn char_indicesator(b: &mut Bencher) {
1960 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
1961 let len = s.chars().count();
1963 b.iter(|| assert_eq!(s.char_indices().count(), len));
1967 fn char_indicesator_rev(b: &mut Bencher) {
1968 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
1969 let len = s.chars().count();
1971 b.iter(|| assert_eq!(s.char_indices().rev().count(), len));
1975 fn split_unicode_ascii(b: &mut Bencher) {
1976 let s = "ประเทศไทย中华Việt Namประเทศไทย中华Việt Nam";
1978 b.iter(|| assert_eq!(s.split('V').count(), 3));
1982 fn split_ascii(b: &mut Bencher) {
1983 let s = "Mary had a little lamb, Little lamb, little-lamb.";
1984 let len = s.split(' ').count();
1986 b.iter(|| assert_eq!(s.split(' ').count(), len));
1990 fn split_extern_fn(b: &mut Bencher) {
1991 let s = "Mary had a little lamb, Little lamb, little-lamb.";
1992 let len = s.split(' ').count();
1993 fn pred(c: char) -> bool { c == ' ' }
1995 b.iter(|| assert_eq!(s.split(pred).count(), len));
1999 fn split_closure(b: &mut Bencher) {
2000 let s = "Mary had a little lamb, Little lamb, little-lamb.";
2001 let len = s.split(' ').count();
2003 b.iter(|| assert_eq!(s.split(|c: char| c == ' ').count(), len));
2007 fn split_slice(b: &mut Bencher) {
2008 let s = "Mary had a little lamb, Little lamb, little-lamb.";
2009 let len = s.split(' ').count();
2011 let c: &[char] = &[' '];
2012 b.iter(|| assert_eq!(s.split(c).count(), len));
2016 fn bench_connect(b: &mut Bencher) {
2017 let s = "ศไทย中华Việt Nam; Mary had a little lamb, Little lamb";
2019 let v = vec![s, s, s, s, s, s, s, s, s, s];
2021 assert_eq!(v.connect(sep).len(), s.len() * 10 + sep.len() * 9);
2026 fn bench_contains_short_short(b: &mut Bencher) {
2027 let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
2031 assert!(haystack.contains(needle));
2036 fn bench_contains_short_long(b: &mut Bencher) {
2038 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
2039 ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
2040 eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
2041 sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
2042 tempus vel, gravida nec quam.
2044 In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
2045 sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
2046 diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
2047 lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
2048 eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
2049 interdum. Curabitur ut nisi justo.
2051 Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
2052 mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
2053 lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
2054 est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
2055 felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
2056 ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
2057 feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
2058 Aliquam sit amet placerat lorem.
2060 Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
2061 mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
2062 Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
2063 lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
2064 suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
2067 Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
2068 feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
2069 vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
2070 leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
2071 malesuada sollicitudin quam eu fermentum.";
2072 let needle = "english";
2075 assert!(!haystack.contains(needle));
2080 fn bench_contains_bad_naive(b: &mut Bencher) {
2081 let haystack = "aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa";
2082 let needle = "aaaaaaaab";
2085 assert!(!haystack.contains(needle));
2090 fn bench_contains_equal(b: &mut Bencher) {
2091 let haystack = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
2092 let needle = "Lorem ipsum dolor sit amet, consectetur adipiscing elit.";
2095 assert!(haystack.contains(needle));
2099 macro_rules! make_test_inner {
2100 ($s:ident, $code:expr, $name:ident, $str:expr) => {
2102 fn $name(bencher: &mut Bencher) {
2105 bencher.iter(|| $code);
2110 macro_rules! make_test {
2111 ($name:ident, $s:ident, $code:expr) => {
2114 use test::black_box;
2116 // Short strings: 65 bytes each
2117 make_test_inner!($s, $code, short_ascii,
2118 "Mary had a little lamb, Little lamb Mary had a littl lamb, lamb!");
2119 make_test_inner!($s, $code, short_mixed,
2120 "ศไทย中华Việt Nam; Mary had a little lamb, Little lam!");
2121 make_test_inner!($s, $code, short_pile_of_poo,
2122 "💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩💩!");
2123 make_test_inner!($s, $code, long_lorem_ipsum,"\
2124 Lorem ipsum dolor sit amet, consectetur adipiscing elit. Suspendisse quis lorem sit amet dolor \
2125 ultricies condimentum. Praesent iaculis purus elit, ac malesuada quam malesuada in. Duis sed orci \
2126 eros. Suspendisse sit amet magna mollis, mollis nunc luctus, imperdiet mi. Integer fringilla non \
2127 sem ut lacinia. Fusce varius tortor a risus porttitor hendrerit. Morbi mauris dui, ultricies nec \
2128 tempus vel, gravida nec quam.
2130 In est dui, tincidunt sed tempus interdum, adipiscing laoreet ante. Etiam tempor, tellus quis \
2131 sagittis interdum, nulla purus mattis sem, quis auctor erat odio ac tellus. In nec nunc sit amet \
2132 diam volutpat molestie at sed ipsum. Vestibulum laoreet consequat vulputate. Integer accumsan \
2133 lorem ac dignissim placerat. Suspendisse convallis faucibus lorem. Aliquam erat volutpat. In vel \
2134 eleifend felis. Sed suscipit nulla lorem, sed mollis est sollicitudin et. Nam fermentum egestas \
2135 interdum. Curabitur ut nisi justo.
2137 Sed sollicitudin ipsum tellus, ut condimentum leo eleifend nec. Cras ut velit ante. Phasellus nec \
2138 mollis odio. Mauris molestie erat in arcu mattis, at aliquet dolor vehicula. Quisque malesuada \
2139 lectus sit amet nisi pretium, a condimentum ipsum porta. Morbi at dapibus diam. Praesent egestas \
2140 est sed risus elementum, eu rutrum metus ultrices. Etiam fermentum consectetur magna, id rutrum \
2141 felis accumsan a. Aliquam ut pellentesque libero. Sed mi nulla, lobortis eu tortor id, suscipit \
2142 ultricies neque. Morbi iaculis sit amet risus at iaculis. Praesent eget ligula quis turpis \
2143 feugiat suscipit vel non arcu. Interdum et malesuada fames ac ante ipsum primis in faucibus. \
2144 Aliquam sit amet placerat lorem.
2146 Cras a lacus vel ante posuere elementum. Nunc est leo, bibendum ut facilisis vel, bibendum at \
2147 mauris. Nullam adipiscing diam vel odio ornare, luctus adipiscing mi luctus. Nulla facilisi. \
2148 Mauris adipiscing bibendum neque, quis adipiscing lectus tempus et. Sed feugiat erat et nisl \
2149 lobortis pharetra. Donec vitae erat enim. Nullam sit amet felis et quam lacinia tincidunt. Aliquam \
2150 suscipit dapibus urna. Sed volutpat urna in magna pulvinar volutpat. Phasellus nec tellus ac diam \
2153 Nam lectus enim, dapibus non nisi tempor, consectetur convallis massa. Maecenas eleifend dictum \
2154 feugiat. Etiam quis mauris vel risus luctus mattis a a nunc. Nullam orci quam, imperdiet id \
2155 vehicula in, porttitor ut nibh. Duis sagittis adipiscing nisl vitae congue. Donec mollis risus eu \
2156 leo suscipit, varius porttitor nulla porta. Pellentesque ut sem nec nisi euismod vehicula. Nulla \
2157 malesuada sollicitudin quam eu fermentum!");
2162 make_test!(chars_count, s, s.chars().count());
2164 make_test!(contains_bang_str, s, s.contains("!"));
2165 make_test!(contains_bang_char, s, s.contains('!'));
2167 make_test!(match_indices_a_str, s, s.match_indices("a").count());
2169 make_test!(split_a_str, s, s.split("a").count());
2171 make_test!(trim_ascii_char, s, {
2172 use std::ascii::AsciiExt;
2173 s.trim_matches(|c: char| c.is_ascii())
2175 make_test!(trim_left_ascii_char, s, {
2176 use std::ascii::AsciiExt;
2177 s.trim_left_matches(|c: char| c.is_ascii())
2179 make_test!(trim_right_ascii_char, s, {
2180 use std::ascii::AsciiExt;
2181 s.trim_right_matches(|c: char| c.is_ascii())
2184 make_test!(find_underscore_char, s, s.find('_'));
2185 make_test!(rfind_underscore_char, s, s.rfind('_'));
2186 make_test!(find_underscore_str, s, s.find("_"));
2188 make_test!(find_zzz_char, s, s.find('\u{1F4A4}'));
2189 make_test!(rfind_zzz_char, s, s.rfind('\u{1F4A4}'));
2190 make_test!(find_zzz_str, s, s.find("\u{1F4A4}"));
2192 make_test!(split_space_char, s, s.split(' ').count());
2193 make_test!(split_terminator_space_char, s, s.split_terminator(' ').count());
2195 make_test!(splitn_space_char, s, s.splitn(10, ' ').count());
2196 make_test!(rsplitn_space_char, s, s.rsplitn(10, ' ').count());
2198 make_test!(split_space_str, s, s.split(" ").count());
2199 make_test!(split_ad_str, s, s.split("ad").count());