]> git.lizzy.rs Git - rust.git/blob - crates/mbe/src/benchmark.rs
Merge #9130
[rust.git] / crates / mbe / src / benchmark.rs
1 //! This module add real world mbe example for benchmark tests
2
3 use rustc_hash::FxHashMap;
4 use syntax::{
5     ast::{self, NameOwner},
6     AstNode, SmolStr,
7 };
8 use test_utils::{bench, bench_fixture, skip_slow_tests};
9
10 use crate::{
11     ast_to_token_tree,
12     parser::{Op, RepeatKind, Separator},
13     MacroRules,
14 };
15
16 #[test]
17 fn benchmark_parse_macro_rules() {
18     if skip_slow_tests() {
19         return;
20     }
21     let rules = macro_rules_fixtures_tt();
22     let hash: usize = {
23         let _pt = bench("mbe parse macro rules");
24         rules.values().map(|it| MacroRules::parse(it).unwrap().rules.len()).sum()
25     };
26     assert_eq!(hash, 1144);
27 }
28
29 #[test]
30 fn benchmark_expand_macro_rules() {
31     if skip_slow_tests() {
32         return;
33     }
34     let rules = macro_rules_fixtures();
35     let invocations = invocation_fixtures(&rules);
36
37     let hash: usize = {
38         let _pt = bench("mbe expand macro rules");
39         invocations
40             .into_iter()
41             .map(|(id, tt)| {
42                 let res = rules[&id].expand(&tt);
43                 assert!(res.err.is_none());
44                 res.value.token_trees.len()
45             })
46             .sum()
47     };
48     assert_eq!(hash, 69413);
49 }
50
51 fn macro_rules_fixtures() -> FxHashMap<String, MacroRules> {
52     macro_rules_fixtures_tt()
53         .into_iter()
54         .map(|(id, tt)| (id, MacroRules::parse(&tt).unwrap()))
55         .collect()
56 }
57
58 fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
59     let fixture = bench_fixture::numerous_macro_rules();
60     let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
61
62     source_file
63         .syntax()
64         .descendants()
65         .filter_map(ast::MacroRules::cast)
66         .map(|rule| {
67             let id = rule.name().unwrap().to_string();
68             let (def_tt, _) = ast_to_token_tree(&rule.token_tree().unwrap());
69             (id, def_tt)
70         })
71         .collect()
72 }
73
74 /// Generate random invocation fixtures from rules
75 fn invocation_fixtures(rules: &FxHashMap<String, MacroRules>) -> Vec<(String, tt::Subtree)> {
76     let mut seed = 123456789;
77     let mut res = Vec::new();
78
79     for (name, it) in rules {
80         for rule in &it.rules {
81             // Generate twice
82             for _ in 0..2 {
83                 // The input are generated by filling the `Op` randomly.
84                 // However, there are some cases generated are ambiguous for expanding, for example:
85                 // ```rust
86                 // macro_rules! m {
87                 //    ($($t:ident),* as $ty:ident) => {}
88                 // }
89                 // m!(as u32);  // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option.
90                 // ```
91                 //
92                 // So we just skip any error cases and try again
93                 let mut try_cnt = 0;
94                 loop {
95                     let mut subtree = tt::Subtree::default();
96                     for op in rule.lhs.iter() {
97                         collect_from_op(op, &mut subtree, &mut seed);
98                     }
99                     if it.expand(&subtree).err.is_none() {
100                         res.push((name.clone(), subtree));
101                         break;
102                     }
103                     try_cnt += 1;
104                     if try_cnt > 100 {
105                         panic!("invocaton fixture {} cannot be generated.\n", name);
106                     }
107                 }
108             }
109         }
110     }
111     return res;
112
113     fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
114         return match op {
115             Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) {
116                 Some("ident") => parent.token_trees.push(make_ident("foo")),
117                 Some("ty") => parent.token_trees.push(make_ident("Foo")),
118                 Some("tt") => parent.token_trees.push(make_ident("foo")),
119                 Some("vis") => parent.token_trees.push(make_ident("pub")),
120                 Some("pat") => parent.token_trees.push(make_ident("foo")),
121                 Some("path") => parent.token_trees.push(make_ident("foo")),
122                 Some("literal") => parent.token_trees.push(make_literal("1")),
123                 Some("expr") => parent.token_trees.push(make_ident("foo")),
124                 Some("lifetime") => {
125                     parent.token_trees.push(make_punct('\''));
126                     parent.token_trees.push(make_ident("a"));
127                 }
128                 Some("block") => {
129                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None))
130                 }
131                 Some("item") => {
132                     parent.token_trees.push(make_ident("fn"));
133                     parent.token_trees.push(make_ident("foo"));
134                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
135                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None));
136                 }
137                 Some("meta") => {
138                     parent.token_trees.push(make_ident("foo"));
139                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
140                 }
141
142                 None => (),
143                 Some(kind) => panic!("Unhandled kind {}", kind),
144             },
145             Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
146             Op::Repeat { tokens, kind, separator } => {
147                 let max = 10;
148                 let cnt = match kind {
149                     RepeatKind::ZeroOrMore => rand(seed) % max,
150                     RepeatKind::OneOrMore => 1 + rand(seed) % max,
151                     RepeatKind::ZeroOrOne => rand(seed) % 2,
152                 };
153                 for i in 0..cnt {
154                     for it in tokens.iter() {
155                         collect_from_op(it, parent, seed);
156                     }
157                     if i + 1 != cnt {
158                         if let Some(sep) = separator {
159                             match sep {
160                                 Separator::Literal(it) => {
161                                     parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
162                                 }
163                                 Separator::Ident(it) => {
164                                     parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
165                                 }
166                                 Separator::Puncts(puncts) => {
167                                     for it in puncts {
168                                         parent.token_trees.push(tt::Leaf::Punct(*it).into())
169                                     }
170                                 }
171                             };
172                         }
173                     }
174                 }
175             }
176             Op::Subtree { tokens, delimiter } => {
177                 let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
178                 tokens.iter().for_each(|it| {
179                     collect_from_op(it, &mut subtree, seed);
180                 });
181                 parent.token_trees.push(subtree.into());
182             }
183         };
184
185         // Simple linear congruential generator for determistic result
186         fn rand(seed: &mut usize) -> usize {
187             let a = 1664525;
188             let c = 1013904223;
189             *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
190             *seed
191         }
192         fn make_ident(ident: &str) -> tt::TokenTree {
193             tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })
194                 .into()
195         }
196         fn make_punct(char: char) -> tt::TokenTree {
197             tt::Leaf::Punct(tt::Punct {
198                 id: tt::TokenId::unspecified(),
199                 char,
200                 spacing: tt::Spacing::Alone,
201             })
202             .into()
203         }
204         fn make_literal(lit: &str) -> tt::TokenTree {
205             tt::Leaf::Literal(tt::Literal {
206                 id: tt::TokenId::unspecified(),
207                 text: SmolStr::new(lit),
208             })
209             .into()
210         }
211         fn make_subtree(
212             kind: tt::DelimiterKind,
213             token_trees: Option<Vec<tt::TokenTree>>,
214         ) -> tt::TokenTree {
215             tt::Subtree {
216                 delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }),
217                 token_trees: token_trees.unwrap_or_default(),
218             }
219             .into()
220         }
221     }
222 }