]> git.lizzy.rs Git - rust.git/blob - src/libsyntax/ext/tt/macro_rules.rs
complete openbsd support for `std::env`
[rust.git] / src / libsyntax / ext / tt / macro_rules.rs
1 // Copyright 2015 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10
11 use ast::{self, TokenTree, TtDelimited, TtSequence, TtToken};
12 use codemap::{Span, DUMMY_SP};
13 use ext::base::{ExtCtxt, MacResult, SyntaxExtension};
14 use ext::base::{NormalTT, TTMacroExpander};
15 use ext::tt::macro_parser::{Success, Error, Failure};
16 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
17 use ext::tt::macro_parser::{parse, parse_or_else};
18 use parse::lexer::{new_tt_reader, new_tt_reader_with_doc_flag};
19 use parse::parser::Parser;
20 use parse::attr::ParserAttr;
21 use parse::token::{self, special_idents, gensym_ident, NtTT, Token};
22 use parse::token::Token::*;
23 use print;
24 use ptr::P;
25
26 use util::small_vector::SmallVector;
27
28 use std::cell::RefCell;
29 use std::rc::Rc;
30
31 struct ParserAnyMacro<'a> {
32     parser: RefCell<Parser<'a>>,
33 }
34
35 impl<'a> ParserAnyMacro<'a> {
36     /// Make sure we don't have any tokens left to parse, so we don't
37     /// silently drop anything. `allow_semi` is so that "optional"
38     /// semicolons at the end of normal expressions aren't complained
39     /// about e.g. the semicolon in `macro_rules! kapow { () => {
40     /// panic!(); } }` doesn't get picked up by .parse_expr(), but it's
41     /// allowed to be there.
42     fn ensure_complete_parse(&self, allow_semi: bool) {
43         let mut parser = self.parser.borrow_mut();
44         if allow_semi && parser.token == token::Semi {
45             parser.bump()
46         }
47         if parser.token != token::Eof {
48             let token_str = parser.this_token_to_string();
49             let msg = format!("macro expansion ignores token `{}` and any \
50                                following",
51                               token_str);
52             let span = parser.span;
53             parser.span_err(span, &msg[]);
54         }
55     }
56 }
57
58 impl<'a> MacResult for ParserAnyMacro<'a> {
59     fn make_expr(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Expr>> {
60         let ret = self.parser.borrow_mut().parse_expr();
61         self.ensure_complete_parse(true);
62         Some(ret)
63     }
64     fn make_pat(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Pat>> {
65         let ret = self.parser.borrow_mut().parse_pat();
66         self.ensure_complete_parse(false);
67         Some(ret)
68     }
69     fn make_items(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Item>>> {
70         let mut ret = SmallVector::zero();
71         loop {
72             let mut parser = self.parser.borrow_mut();
73             // so... do outer attributes attached to the macro invocation
74             // just disappear? This question applies to make_methods, as
75             // well.
76             match parser.parse_item_with_outer_attributes() {
77                 Some(item) => ret.push(item),
78                 None => break
79             }
80         }
81         self.ensure_complete_parse(false);
82         Some(ret)
83     }
84
85     fn make_methods(self: Box<ParserAnyMacro<'a>>) -> Option<SmallVector<P<ast::Method>>> {
86         let mut ret = SmallVector::zero();
87         loop {
88             let mut parser = self.parser.borrow_mut();
89             match parser.token {
90                 token::Eof => break,
91                 _ => {
92                     let attrs = parser.parse_outer_attributes();
93                     ret.push(parser.parse_method(attrs, ast::Inherited))
94                 }
95             }
96         }
97         self.ensure_complete_parse(false);
98         Some(ret)
99     }
100
101     fn make_stmt(self: Box<ParserAnyMacro<'a>>) -> Option<P<ast::Stmt>> {
102         let attrs = self.parser.borrow_mut().parse_outer_attributes();
103         let ret = self.parser.borrow_mut().parse_stmt(attrs);
104         self.ensure_complete_parse(true);
105         Some(ret)
106     }
107 }
108
109 struct MacroRulesMacroExpander {
110     name: ast::Ident,
111     imported_from: Option<ast::Ident>,
112     lhses: Vec<Rc<NamedMatch>>,
113     rhses: Vec<Rc<NamedMatch>>,
114 }
115
116 impl TTMacroExpander for MacroRulesMacroExpander {
117     fn expand<'cx>(&self,
118                    cx: &'cx mut ExtCtxt,
119                    sp: Span,
120                    arg: &[ast::TokenTree])
121                    -> Box<MacResult+'cx> {
122         generic_extension(cx,
123                           sp,
124                           self.name,
125                           self.imported_from,
126                           arg,
127                           &self.lhses[],
128                           &self.rhses[])
129     }
130 }
131
132 /// Given `lhses` and `rhses`, this is the new macro we create
133 fn generic_extension<'cx>(cx: &'cx ExtCtxt,
134                           sp: Span,
135                           name: ast::Ident,
136                           imported_from: Option<ast::Ident>,
137                           arg: &[ast::TokenTree],
138                           lhses: &[Rc<NamedMatch>],
139                           rhses: &[Rc<NamedMatch>])
140                           -> Box<MacResult+'cx> {
141     if cx.trace_macros() {
142         println!("{}! {{ {} }}",
143                  token::get_ident(name),
144                  print::pprust::tts_to_string(arg));
145     }
146
147     // Which arm's failure should we report? (the one furthest along)
148     let mut best_fail_spot = DUMMY_SP;
149     let mut best_fail_msg = "internal error: ran no matchers".to_string();
150
151     for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers
152         match **lhs {
153           MatchedNonterminal(NtTT(ref lhs_tt)) => {
154             let lhs_tt = match **lhs_tt {
155                 TtDelimited(_, ref delim) => &delim.tts[],
156                 _ => cx.span_fatal(sp, "malformed macro lhs")
157             };
158             // `None` is because we're not interpolating
159             let arg_rdr = new_tt_reader_with_doc_flag(&cx.parse_sess().span_diagnostic,
160                                                       None,
161                                                       None,
162                                                       arg.iter()
163                                                          .map(|x| (*x).clone())
164                                                          .collect(),
165                                                       true);
166             match parse(cx.parse_sess(), cx.cfg(), arg_rdr, lhs_tt) {
167               Success(named_matches) => {
168                 let rhs = match *rhses[i] {
169                     // okay, what's your transcriber?
170                     MatchedNonterminal(NtTT(ref tt)) => {
171                         match **tt {
172                             // ignore delimiters
173                             TtDelimited(_, ref delimed) => delimed.tts.clone(),
174                             _ => cx.span_fatal(sp, "macro rhs must be delimited"),
175                         }
176                     },
177                     _ => cx.span_bug(sp, "bad thing in rhs")
178                 };
179                 // rhs has holes ( `$id` and `$(...)` that need filled)
180                 let trncbr = new_tt_reader(&cx.parse_sess().span_diagnostic,
181                                            Some(named_matches),
182                                            imported_from,
183                                            rhs);
184                 let mut p = Parser::new(cx.parse_sess(), cx.cfg(), box trncbr);
185                 p.check_unknown_macro_variable();
186                 // Let the context choose how to interpret the result.
187                 // Weird, but useful for X-macros.
188                 return box ParserAnyMacro {
189                     parser: RefCell::new(p),
190                 } as Box<MacResult+'cx>
191               }
192               Failure(sp, ref msg) => if sp.lo >= best_fail_spot.lo {
193                 best_fail_spot = sp;
194                 best_fail_msg = (*msg).clone();
195               },
196               Error(sp, ref msg) => cx.span_fatal(sp, &msg[])
197             }
198           }
199           _ => cx.bug("non-matcher found in parsed lhses")
200         }
201     }
202     cx.span_fatal(best_fail_spot, &best_fail_msg[]);
203 }
204
205 // Note that macro-by-example's input is also matched against a token tree:
206 //                   $( $lhs:tt => $rhs:tt );+
207 //
208 // Holy self-referential!
209
210 /// Converts a `macro_rules!` invocation into a syntax extension.
211 pub fn compile<'cx>(cx: &'cx mut ExtCtxt,
212                     def: &ast::MacroDef) -> SyntaxExtension {
213
214     let lhs_nm =  gensym_ident("lhs");
215     let rhs_nm =  gensym_ident("rhs");
216
217     // The pattern that macro_rules matches.
218     // The grammar for macro_rules! is:
219     // $( $lhs:tt => $rhs:tt );+
220     // ...quasiquoting this would be nice.
221     // These spans won't matter, anyways
222     let match_lhs_tok = MatchNt(lhs_nm, special_idents::tt, token::Plain, token::Plain);
223     let match_rhs_tok = MatchNt(rhs_nm, special_idents::tt, token::Plain, token::Plain);
224     let argument_gram = vec!(
225         TtSequence(DUMMY_SP,
226                    Rc::new(ast::SequenceRepetition {
227                        tts: vec![
228                            TtToken(DUMMY_SP, match_lhs_tok),
229                            TtToken(DUMMY_SP, token::FatArrow),
230                            TtToken(DUMMY_SP, match_rhs_tok)],
231                        separator: Some(token::Semi),
232                        op: ast::OneOrMore,
233                        num_captures: 2
234                    })),
235         //to phase into semicolon-termination instead of
236         //semicolon-separation
237         TtSequence(DUMMY_SP,
238                    Rc::new(ast::SequenceRepetition {
239                        tts: vec![TtToken(DUMMY_SP, token::Semi)],
240                        separator: None,
241                        op: ast::ZeroOrMore,
242                        num_captures: 0
243                    })));
244
245
246     // Parse the macro_rules! invocation (`none` is for no interpolations):
247     let arg_reader = new_tt_reader(&cx.parse_sess().span_diagnostic,
248                                    None,
249                                    None,
250                                    def.body.clone());
251     let argument_map = parse_or_else(cx.parse_sess(),
252                                      cx.cfg(),
253                                      arg_reader,
254                                      argument_gram);
255
256     // Extract the arguments:
257     let lhses = match *argument_map[lhs_nm] {
258         MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
259         _ => cx.span_bug(def.span, "wrong-structured lhs")
260     };
261
262     for lhs in &lhses {
263         check_lhs_nt_follows(cx, &**lhs, def.span);
264     }
265
266     let rhses = match *argument_map[rhs_nm] {
267         MatchedSeq(ref s, _) => /* FIXME (#2543) */ (*s).clone(),
268         _ => cx.span_bug(def.span, "wrong-structured rhs")
269     };
270
271     let exp = box MacroRulesMacroExpander {
272         name: def.ident,
273         imported_from: def.imported_from,
274         lhses: lhses,
275         rhses: rhses,
276     };
277
278     NormalTT(exp, Some(def.span))
279 }
280
281 fn check_lhs_nt_follows(cx: &mut ExtCtxt, lhs: &NamedMatch, sp: Span) {
282     // lhs is going to be like MatchedNonterminal(NtTT(TtDelimited(...))), where the entire lhs is
283     // those tts. Or, it can be a "bare sequence", not wrapped in parens.
284     match lhs {
285         &MatchedNonterminal(NtTT(ref inner)) => match &**inner {
286             &TtDelimited(_, ref tts) => {
287                 check_matcher(cx, tts.tts.iter(), &Eof);
288             },
289             tt @ &TtSequence(..) => {
290                 check_matcher(cx, Some(tt).into_iter(), &Eof);
291             },
292             _ => cx.span_bug(sp, "wrong-structured lhs for follow check (didn't find \
293             a TtDelimited or TtSequence)")
294         },
295         _ => cx.span_bug(sp, "wrong-structured lhs for follow check (didn't find a \
296            MatchedNonterminal)")
297     };
298     // we don't abort on errors on rejection, the driver will do that for us
299     // after parsing/expansion. we can report every error in every macro this way.
300 }
301
302 // returns the last token that was checked, for TtSequence. this gets used later on.
303 fn check_matcher<'a, I>(cx: &mut ExtCtxt, matcher: I, follow: &Token)
304 -> Option<(Span, Token)> where I: Iterator<Item=&'a TokenTree> {
305     use print::pprust::token_to_string;
306
307     let mut last = None;
308
309     // 2. For each token T in M:
310     let mut tokens = matcher.peekable();
311     while let Some(token) = tokens.next() {
312         last = match *token {
313             TtToken(sp, MatchNt(ref name, ref frag_spec, _, _)) => {
314                 // ii. If T is a simple NT, look ahead to the next token T' in
315                 // M.
316                 let next_token = match tokens.peek() {
317                     // If T' closes a complex NT, replace T' with F
318                     Some(&&TtToken(_, CloseDelim(_))) => follow.clone(),
319                     Some(&&TtToken(_, ref tok)) => tok.clone(),
320                     Some(&&TtSequence(sp, _)) => {
321                         cx.span_err(sp,
322                                     format!("`${0}:{1}` is followed by a \
323                                              sequence repetition, which is not \
324                                              allowed for `{1}` fragments",
325                                             name.as_str(), frag_spec.as_str())
326                                         .as_slice());
327                         Eof
328                     },
329                     // die next iteration
330                     Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
331                     // else, we're at the end of the macro or sequence
332                     None => follow.clone()
333                 };
334
335                 let tok = if let TtToken(_, ref tok) = *token { tok } else { unreachable!() };
336                 // If T' is in the set FOLLOW(NT), continue. Else, reject.
337                 match (&next_token, is_in_follow(cx, &next_token, frag_spec.as_str())) {
338                     (&Eof, _) => return Some((sp, tok.clone())),
339                     (_, Ok(true)) => continue,
340                     (next, Ok(false)) => {
341                         cx.span_err(sp, format!("`${0}:{1}` is followed by `{2}`, which \
342                                                  is not allowed for `{1}` fragments",
343                                                  name.as_str(), frag_spec.as_str(),
344                                                  token_to_string(next)).as_slice());
345                         continue
346                     },
347                     (_, Err(msg)) => {
348                         cx.span_err(sp, msg.as_slice());
349                         continue
350                     }
351                 }
352             },
353             TtSequence(sp, ref seq) => {
354                 // iii. Else, T is a complex NT.
355                 match seq.separator {
356                     // If T has the form $(...)U+ or $(...)U* for some token U,
357                     // run the algorithm on the contents with F set to U. If it
358                     // accepts, continue, else, reject.
359                     Some(ref u) => {
360                         let last = check_matcher(cx, seq.tts.iter(), u);
361                         match last {
362                             // Since the delimiter isn't required after the last
363                             // repetition, make sure that the *next* token is
364                             // sane. This doesn't actually compute the FIRST of
365                             // the rest of the matcher yet, it only considers
366                             // single tokens and simple NTs. This is imprecise,
367                             // but conservatively correct.
368                             Some((span, tok)) => {
369                                 let fol = match tokens.peek() {
370                                     Some(&&TtToken(_, ref tok)) => tok.clone(),
371                                     Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
372                                     Some(_) => {
373                                         cx.span_err(sp, "sequence repetition followed by \
374                                                 another sequence repetition, which is not allowed");
375                                         Eof
376                                     },
377                                     None => Eof
378                                 };
379                                 check_matcher(cx, Some(&TtToken(span, tok.clone())).into_iter(),
380                                               &fol)
381                             },
382                             None => last,
383                         }
384                     },
385                     // If T has the form $(...)+ or $(...)*, run the algorithm
386                     // on the contents with F set to the token following the
387                     // sequence. If it accepts, continue, else, reject.
388                     None => {
389                         let fol = match tokens.peek() {
390                             Some(&&TtToken(_, ref tok)) => tok.clone(),
391                             Some(&&TtDelimited(_, ref delim)) => delim.close_token(),
392                             Some(_) => {
393                                 cx.span_err(sp, "sequence repetition followed by another \
394                                              sequence repetition, which is not allowed");
395                                 Eof
396                             },
397                             None => Eof
398                         };
399                         check_matcher(cx, seq.tts.iter(), &fol)
400                     }
401                 }
402             },
403             TtToken(..) => {
404                 // i. If T is not an NT, continue.
405                 continue
406             },
407             TtDelimited(_, ref tts) => {
408                 // if we don't pass in that close delimiter, we'll incorrectly consider the matcher
409                 // `{ $foo:ty }` as having a follow that isn't `RBrace`
410                 check_matcher(cx, tts.tts.iter(), &tts.close_token())
411             }
412         }
413     }
414     last
415 }
416
417 fn is_in_follow(_: &ExtCtxt, tok: &Token, frag: &str) -> Result<bool, String> {
418     if let &CloseDelim(_) = tok {
419         Ok(true)
420     } else {
421         match frag {
422             "item" => {
423                 // since items *must* be followed by either a `;` or a `}`, we can
424                 // accept anything after them
425                 Ok(true)
426             },
427             "block" => {
428                 // anything can follow block, the braces provide a easy boundary to
429                 // maintain
430                 Ok(true)
431             },
432             "stmt" | "expr"  => {
433                 match *tok {
434                     FatArrow | Comma | Semi => Ok(true),
435                     _ => Ok(false)
436                 }
437             },
438             "pat" => {
439                 match *tok {
440                     FatArrow | Comma | Eq => Ok(true),
441                     _ => Ok(false)
442                 }
443             },
444             "path" | "ty" => {
445                 match *tok {
446                     Comma | FatArrow | Colon | Eq | Gt => Ok(true),
447                     Ident(i, _) if i.as_str() == "as" => Ok(true),
448                     _ => Ok(false)
449                 }
450             },
451             "ident" => {
452                 // being a single token, idents are harmless
453                 Ok(true)
454             },
455             "meta" | "tt" => {
456                 // being either a single token or a delimited sequence, tt is
457                 // harmless
458                 Ok(true)
459             },
460             _ => Err(format!("invalid fragment specifier `{}`", frag))
461         }
462     }
463 }