]> git.lizzy.rs Git - rust.git/blob - src/tools/rust-analyzer/crates/mbe/src/benchmark.rs
Rollup merge of #101151 - jethrogb:jb/sgx-platform, r=JohnTitor
[rust.git] / src / tools / rust-analyzer / crates / mbe / src / benchmark.rs
1 //! This module add real world mbe example for benchmark tests
2
3 use rustc_hash::FxHashMap;
4 use syntax::{
5     ast::{self, HasName},
6     AstNode, SmolStr,
7 };
8 use test_utils::{bench, bench_fixture, skip_slow_tests};
9
10 use crate::{
11     parser::{Op, RepeatKind, Separator},
12     syntax_node_to_token_tree, DeclarativeMacro,
13 };
14
15 #[test]
16 fn benchmark_parse_macro_rules() {
17     if skip_slow_tests() {
18         return;
19     }
20     let rules = macro_rules_fixtures_tt();
21     let hash: usize = {
22         let _pt = bench("mbe parse macro rules");
23         rules.values().map(|it| DeclarativeMacro::parse_macro_rules(it).unwrap().rules.len()).sum()
24     };
25     assert_eq!(hash, 1144);
26 }
27
28 #[test]
29 fn benchmark_expand_macro_rules() {
30     if skip_slow_tests() {
31         return;
32     }
33     let rules = macro_rules_fixtures();
34     let invocations = invocation_fixtures(&rules);
35
36     let hash: usize = {
37         let _pt = bench("mbe expand macro rules");
38         invocations
39             .into_iter()
40             .map(|(id, tt)| {
41                 let res = rules[&id].expand(&tt);
42                 assert!(res.err.is_none());
43                 res.value.token_trees.len()
44             })
45             .sum()
46     };
47     assert_eq!(hash, 69413);
48 }
49
50 fn macro_rules_fixtures() -> FxHashMap<String, DeclarativeMacro> {
51     macro_rules_fixtures_tt()
52         .into_iter()
53         .map(|(id, tt)| (id, DeclarativeMacro::parse_macro_rules(&tt).unwrap()))
54         .collect()
55 }
56
57 fn macro_rules_fixtures_tt() -> FxHashMap<String, tt::Subtree> {
58     let fixture = bench_fixture::numerous_macro_rules();
59     let source_file = ast::SourceFile::parse(&fixture).ok().unwrap();
60
61     source_file
62         .syntax()
63         .descendants()
64         .filter_map(ast::MacroRules::cast)
65         .map(|rule| {
66             let id = rule.name().unwrap().to_string();
67             let (def_tt, _) = syntax_node_to_token_tree(rule.token_tree().unwrap().syntax());
68             (id, def_tt)
69         })
70         .collect()
71 }
72
73 /// Generate random invocation fixtures from rules
74 fn invocation_fixtures(rules: &FxHashMap<String, DeclarativeMacro>) -> Vec<(String, tt::Subtree)> {
75     let mut seed = 123456789;
76     let mut res = Vec::new();
77
78     for (name, it) in rules {
79         for rule in &it.rules {
80             // Generate twice
81             for _ in 0..2 {
82                 // The input are generated by filling the `Op` randomly.
83                 // However, there are some cases generated are ambiguous for expanding, for example:
84                 // ```rust
85                 // macro_rules! m {
86                 //    ($($t:ident),* as $ty:ident) => {}
87                 // }
88                 // m!(as u32);  // error: local ambiguity: multiple parsing options: built-in NTs ident ('t') or 1 other option.
89                 // ```
90                 //
91                 // So we just skip any error cases and try again
92                 let mut try_cnt = 0;
93                 loop {
94                     let mut subtree = tt::Subtree::default();
95                     for op in rule.lhs.iter() {
96                         collect_from_op(op, &mut subtree, &mut seed);
97                     }
98                     if it.expand(&subtree).err.is_none() {
99                         res.push((name.clone(), subtree));
100                         break;
101                     }
102                     try_cnt += 1;
103                     if try_cnt > 100 {
104                         panic!("invocaton fixture {} cannot be generated.\n", name);
105                     }
106                 }
107             }
108         }
109     }
110     return res;
111
112     fn collect_from_op(op: &Op, parent: &mut tt::Subtree, seed: &mut usize) {
113         return match op {
114             Op::Var { kind, .. } => match kind.as_ref().map(|it| it.as_str()) {
115                 Some("ident") => parent.token_trees.push(make_ident("foo")),
116                 Some("ty") => parent.token_trees.push(make_ident("Foo")),
117                 Some("tt") => parent.token_trees.push(make_ident("foo")),
118                 Some("vis") => parent.token_trees.push(make_ident("pub")),
119                 Some("pat") => parent.token_trees.push(make_ident("foo")),
120                 Some("path") => parent.token_trees.push(make_ident("foo")),
121                 Some("literal") => parent.token_trees.push(make_literal("1")),
122                 Some("expr") => parent.token_trees.push(make_ident("foo")),
123                 Some("lifetime") => {
124                     parent.token_trees.push(make_punct('\''));
125                     parent.token_trees.push(make_ident("a"));
126                 }
127                 Some("block") => {
128                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None))
129                 }
130                 Some("item") => {
131                     parent.token_trees.push(make_ident("fn"));
132                     parent.token_trees.push(make_ident("foo"));
133                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
134                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Brace, None));
135                 }
136                 Some("meta") => {
137                     parent.token_trees.push(make_ident("foo"));
138                     parent.token_trees.push(make_subtree(tt::DelimiterKind::Parenthesis, None));
139                 }
140
141                 None => (),
142                 Some(kind) => panic!("Unhandled kind {}", kind),
143             },
144             Op::Leaf(leaf) => parent.token_trees.push(leaf.clone().into()),
145             Op::Repeat { tokens, kind, separator } => {
146                 let max = 10;
147                 let cnt = match kind {
148                     RepeatKind::ZeroOrMore => rand(seed) % max,
149                     RepeatKind::OneOrMore => 1 + rand(seed) % max,
150                     RepeatKind::ZeroOrOne => rand(seed) % 2,
151                 };
152                 for i in 0..cnt {
153                     for it in tokens.iter() {
154                         collect_from_op(it, parent, seed);
155                     }
156                     if i + 1 != cnt {
157                         if let Some(sep) = separator {
158                             match sep {
159                                 Separator::Literal(it) => {
160                                     parent.token_trees.push(tt::Leaf::Literal(it.clone()).into())
161                                 }
162                                 Separator::Ident(it) => {
163                                     parent.token_trees.push(tt::Leaf::Ident(it.clone()).into())
164                                 }
165                                 Separator::Puncts(puncts) => {
166                                     for it in puncts {
167                                         parent.token_trees.push(tt::Leaf::Punct(*it).into())
168                                     }
169                                 }
170                             };
171                         }
172                     }
173                 }
174             }
175             Op::Subtree { tokens, delimiter } => {
176                 let mut subtree = tt::Subtree { delimiter: *delimiter, token_trees: Vec::new() };
177                 tokens.iter().for_each(|it| {
178                     collect_from_op(it, &mut subtree, seed);
179                 });
180                 parent.token_trees.push(subtree.into());
181             }
182             Op::Ignore { .. } | Op::Index { .. } => {}
183         };
184
185         // Simple linear congruential generator for determistic result
186         fn rand(seed: &mut usize) -> usize {
187             let a = 1664525;
188             let c = 1013904223;
189             *seed = usize::wrapping_add(usize::wrapping_mul(*seed, a), c);
190             *seed
191         }
192         fn make_ident(ident: &str) -> tt::TokenTree {
193             tt::Leaf::Ident(tt::Ident { id: tt::TokenId::unspecified(), text: SmolStr::new(ident) })
194                 .into()
195         }
196         fn make_punct(char: char) -> tt::TokenTree {
197             tt::Leaf::Punct(tt::Punct {
198                 id: tt::TokenId::unspecified(),
199                 char,
200                 spacing: tt::Spacing::Alone,
201             })
202             .into()
203         }
204         fn make_literal(lit: &str) -> tt::TokenTree {
205             tt::Leaf::Literal(tt::Literal {
206                 id: tt::TokenId::unspecified(),
207                 text: SmolStr::new(lit),
208             })
209             .into()
210         }
211         fn make_subtree(
212             kind: tt::DelimiterKind,
213             token_trees: Option<Vec<tt::TokenTree>>,
214         ) -> tt::TokenTree {
215             tt::Subtree {
216                 delimiter: Some(tt::Delimiter { id: tt::TokenId::unspecified(), kind }),
217                 token_trees: token_trees.unwrap_or_default(),
218             }
219             .into()
220         }
221     }
222 }