]> git.lizzy.rs Git - rust.git/blob - src/test/ui/proc-macro/auxiliary/expand-expr.rs
Point at type parameter in plain path expr
[rust.git] / src / test / ui / proc-macro / auxiliary / expand-expr.rs
1 // force-host
2 // no-prefer-dynamic
3
4 #![crate_type = "proc-macro"]
5 #![deny(warnings)]
6 #![feature(proc_macro_expand, proc_macro_span)]
7
8 extern crate proc_macro;
9
10 use proc_macro::*;
11 use std::str::FromStr;
12
13 // Flatten the TokenStream, removing any toplevel `Delimiter::None`s for
14 // comparison.
15 fn flatten(ts: TokenStream) -> Vec<TokenTree> {
16     ts.into_iter()
17         .flat_map(|tt| match &tt {
18             TokenTree::Group(group) if group.delimiter() == Delimiter::None => {
19                 flatten(group.stream())
20             }
21             _ => vec![tt],
22         })
23         .collect()
24 }
25
26 // Assert that two TokenStream values are roughly equal to one-another.
27 fn assert_ts_eq(lhs: &TokenStream, rhs: &TokenStream) {
28     let ltts = flatten(lhs.clone());
29     let rtts = flatten(rhs.clone());
30
31     if ltts.len() != rtts.len() {
32         panic!(
33             "expected the same number of tts ({} == {})\nlhs:\n{:#?}\nrhs:\n{:#?}",
34             ltts.len(),
35             rtts.len(),
36             lhs,
37             rhs
38         )
39     }
40
41     for (ltt, rtt) in ltts.iter().zip(&rtts) {
42         match (ltt, rtt) {
43             (TokenTree::Group(l), TokenTree::Group(r)) => {
44                 assert_eq!(
45                     l.delimiter(),
46                     r.delimiter(),
47                     "expected delimiters to match for {:?} and {:?}",
48                     l,
49                     r
50                 );
51                 assert_ts_eq(&l.stream(), &r.stream());
52             }
53             (TokenTree::Punct(l), TokenTree::Punct(r)) => assert_eq!(
54                 (l.as_char(), l.spacing()),
55                 (r.as_char(), r.spacing()),
56                 "expected punct to match for {:?} and {:?}",
57                 l,
58                 r
59             ),
60             (TokenTree::Ident(l), TokenTree::Ident(r)) => assert_eq!(
61                 l.to_string(),
62                 r.to_string(),
63                 "expected ident to match for {:?} and {:?}",
64                 l,
65                 r
66             ),
67             (TokenTree::Literal(l), TokenTree::Literal(r)) => assert_eq!(
68                 l.to_string(),
69                 r.to_string(),
70                 "expected literal to match for {:?} and {:?}",
71                 l,
72                 r
73             ),
74             (l, r) => panic!("expected type to match for {:?} and {:?}", l, r),
75         }
76     }
77 }
78
79 #[proc_macro]
80 pub fn expand_expr_is(input: TokenStream) -> TokenStream {
81     let mut iter = input.into_iter();
82     let mut expected_tts = Vec::new();
83     loop {
84         match iter.next() {
85             Some(TokenTree::Punct(ref p)) if p.as_char() == ',' => break,
86             Some(tt) => expected_tts.push(tt),
87             None => panic!("expected comma"),
88         }
89     }
90
91     let expected = expected_tts.into_iter().collect::<TokenStream>();
92     let expanded = iter.collect::<TokenStream>().expand_expr().expect("expand_expr failed");
93     assert!(
94         expected.to_string() == expanded.to_string(),
95         "assert failed\nexpected: `{}`\nexpanded: `{}`",
96         expected.to_string(),
97         expanded.to_string()
98     );
99
100     // Also compare the raw tts to make sure they line up.
101     assert_ts_eq(&expected, &expanded);
102
103     TokenStream::new()
104 }
105
106 #[proc_macro]
107 pub fn expand_expr_fail(input: TokenStream) -> TokenStream {
108     match input.expand_expr() {
109         Ok(ts) => panic!("expand_expr unexpectedly succeeded: `{}`", ts),
110         Err(_) => TokenStream::new(),
111     }
112 }
113
114 #[proc_macro]
115 pub fn check_expand_expr_file(ts: TokenStream) -> TokenStream {
116     // Check that the passed in `file!()` invocation and a parsed `file!`
117     // invocation expand to the same literal.
118     let input_t = ts.expand_expr().expect("expand_expr failed on macro input").to_string();
119     let parse_t = TokenStream::from_str("file!{}")
120         .unwrap()
121         .expand_expr()
122         .expect("expand_expr failed on internal macro")
123         .to_string();
124     assert_eq!(input_t, parse_t);
125
126     // Check that the literal matches `Span::call_site().source_file().path()`
127     let expect_t =
128         Literal::string(&Span::call_site().source_file().path().to_string_lossy()).to_string();
129     assert_eq!(input_t, expect_t);
130
131     TokenStream::new()
132 }
133
134 #[proc_macro]
135 pub fn recursive_expand(_: TokenStream) -> TokenStream {
136     // Recursively call until we hit the recursion limit and get an error.
137     //
138     // NOTE: This doesn't panic if expansion fails because that'll cause a very
139     // large number of errors to fill the output.
140     TokenStream::from_str("recursive_expand!{}")
141         .unwrap()
142         .expand_expr()
143         .unwrap_or(std::iter::once(TokenTree::Literal(Literal::u32_suffixed(0))).collect())
144 }
145
146 #[proc_macro]
147 pub fn echo_pm(input: TokenStream) -> TokenStream {
148     input
149 }