]> git.lizzy.rs Git - rust.git/blob - src/libsyntax/ext/tt/transcribe.rs
rollup merge of #20482: kmcallister/macro-reform
[rust.git] / src / libsyntax / ext / tt / transcribe.rs
1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10 use self::LockstepIterSize::*;
11
12 use ast;
13 use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident};
14 use codemap::{Span, DUMMY_SP};
15 use diagnostic::SpanHandler;
16 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
17 use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt};
18 use parse::token::{Token, NtIdent, SpecialMacroVar};
19 use parse::token;
20 use parse::lexer::TokenAndSpan;
21
22 use std::rc::Rc;
23 use std::ops::Add;
24 use std::collections::HashMap;
25
26 ///an unzipping of `TokenTree`s
27 #[derive(Clone)]
28 struct TtFrame {
29     forest: TokenTree,
30     idx: uint,
31     dotdotdoted: bool,
32     sep: Option<Token>,
33 }
34
35 #[derive(Clone)]
36 pub struct TtReader<'a> {
37     pub sp_diag: &'a SpanHandler,
38     /// the unzipped tree:
39     stack: Vec<TtFrame>,
40     /* for MBE-style macro transcription */
41     interpolations: HashMap<Ident, Rc<NamedMatch>>,
42     imported_from: Option<Ident>,
43
44     // Some => return imported_from as the next token
45     crate_name_next: Option<Span>,
46     repeat_idx: Vec<uint>,
47     repeat_len: Vec<uint>,
48     /* cached: */
49     pub cur_tok: Token,
50     pub cur_span: Span,
51     /// Transform doc comments. Only useful in macro invocations
52     pub desugar_doc_comments: bool,
53 }
54
55 /// This can do Macro-By-Example transcription. On the other hand, if
56 /// `src` contains no `TtSequence`s and `TtNonterminal`s, `interp` can (and
57 /// should) be none.
58 pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
59                          interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
60                          imported_from: Option<Ident>,
61                          src: Vec<ast::TokenTree> )
62                          -> TtReader<'a> {
63     let mut r = TtReader {
64         sp_diag: sp_diag,
65         stack: vec!(TtFrame {
66             forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
67                 tts: src,
68                 // doesn't matter. This merely holds the root unzipping.
69                 separator: None, op: ast::ZeroOrMore, num_captures: 0
70             })),
71             idx: 0,
72             dotdotdoted: false,
73             sep: None,
74         }),
75         interpolations: match interp { /* just a convenience */
76             None => HashMap::new(),
77             Some(x) => x,
78         },
79         imported_from: imported_from,
80         crate_name_next: None,
81         repeat_idx: Vec::new(),
82         repeat_len: Vec::new(),
83         desugar_doc_comments: false,
84         /* dummy values, never read: */
85         cur_tok: token::Eof,
86         cur_span: DUMMY_SP,
87     };
88     tt_next_token(&mut r); /* get cur_tok and cur_span set up */
89     r
90 }
91
92 fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
93     r.repeat_idx.iter().fold(start, |ad, idx| {
94         match *ad {
95             MatchedNonterminal(_) => {
96                 // end of the line; duplicate henceforth
97                 ad.clone()
98             }
99             MatchedSeq(ref ads, _) => ads[*idx].clone()
100         }
101     })
102 }
103
104 fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
105     let matched_opt = r.interpolations.get(&name).cloned();
106     matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
107 }
108
109 #[derive(Clone)]
110 enum LockstepIterSize {
111     LisUnconstrained,
112     LisConstraint(uint, Ident),
113     LisContradiction(String),
114 }
115
116 impl Add for LockstepIterSize {
117     type Output = LockstepIterSize;
118
119     fn add(self, other: LockstepIterSize) -> LockstepIterSize {
120         match self {
121             LisUnconstrained => other,
122             LisContradiction(_) => self,
123             LisConstraint(l_len, ref l_id) => match other {
124                 LisUnconstrained => self.clone(),
125                 LisContradiction(_) => other,
126                 LisConstraint(r_len, _) if l_len == r_len => self.clone(),
127                 LisConstraint(r_len, r_id) => {
128                     let l_n = token::get_ident(l_id.clone());
129                     let r_n = token::get_ident(r_id);
130                     LisContradiction(format!("inconsistent lockstep iteration: \
131                                               '{}' has {} items, but '{}' has {}",
132                                               l_n, l_len, r_n, r_len).to_string())
133                 }
134             },
135         }
136     }
137 }
138
139 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
140     match *t {
141         TtDelimited(_, ref delimed) => {
142             delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
143                 size + lockstep_iter_size(tt, r)
144             })
145         },
146         TtSequence(_, ref seq) => {
147             seq.tts.iter().fold(LisUnconstrained, |size, tt| {
148                 size + lockstep_iter_size(tt, r)
149             })
150         },
151         TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
152             match lookup_cur_matched(r, name) {
153                 Some(matched) => match *matched {
154                     MatchedNonterminal(_) => LisUnconstrained,
155                     MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
156                 },
157                 _ => LisUnconstrained
158             },
159         TtToken(..) => LisUnconstrained,
160     }
161 }
162
163 /// Return the next token from the TtReader.
164 /// EFFECT: advances the reader's token field
165 pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
166     // FIXME(pcwalton): Bad copy?
167     let ret_val = TokenAndSpan {
168         tok: r.cur_tok.clone(),
169         sp: r.cur_span.clone(),
170     };
171     loop {
172         match r.crate_name_next.take() {
173             None => (),
174             Some(sp) => {
175                 r.cur_span = sp;
176                 r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
177                 return ret_val;
178             },
179         }
180         let should_pop = match r.stack.last() {
181             None => {
182                 assert_eq!(ret_val.tok, token::Eof);
183                 return ret_val;
184             }
185             Some(frame) => {
186                 if frame.idx < frame.forest.len() {
187                     break;
188                 }
189                 !frame.dotdotdoted ||
190                     *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
191             }
192         };
193
194         /* done with this set; pop or repeat? */
195         if should_pop {
196             let prev = r.stack.pop().unwrap();
197             match r.stack.last_mut() {
198                 None => {
199                     r.cur_tok = token::Eof;
200                     return ret_val;
201                 }
202                 Some(frame) => {
203                     frame.idx += 1;
204                 }
205             }
206             if prev.dotdotdoted {
207                 r.repeat_idx.pop();
208                 r.repeat_len.pop();
209             }
210         } else { /* repeat */
211             *r.repeat_idx.last_mut().unwrap() += 1u;
212             r.stack.last_mut().unwrap().idx = 0;
213             match r.stack.last().unwrap().sep.clone() {
214                 Some(tk) => {
215                     r.cur_tok = tk; /* repeat same span, I guess */
216                     return ret_val;
217                 }
218                 None => {}
219             }
220         }
221     }
222     loop { /* because it's easiest, this handles `TtDelimited` not starting
223               with a `TtToken`, even though it won't happen */
224         let t = {
225             let frame = r.stack.last().unwrap();
226             // FIXME(pcwalton): Bad copy.
227             frame.forest.get_tt(frame.idx)
228         };
229         match t {
230             TtSequence(sp, seq) => {
231                 // FIXME(pcwalton): Bad copy.
232                 match lockstep_iter_size(&TtSequence(sp, seq.clone()),
233                                          r) {
234                     LisUnconstrained => {
235                         r.sp_diag.span_fatal(
236                             sp.clone(), /* blame macro writer */
237                             "attempted to repeat an expression \
238                              containing no syntax \
239                              variables matched as repeating at this depth");
240                     }
241                     LisContradiction(ref msg) => {
242                         // FIXME #2887 blame macro invoker instead
243                         r.sp_diag.span_fatal(sp.clone(), msg[]);
244                     }
245                     LisConstraint(len, _) => {
246                         if len == 0 {
247                             if seq.op == ast::OneOrMore {
248                                 // FIXME #2887 blame invoker
249                                 r.sp_diag.span_fatal(sp.clone(),
250                                                      "this must repeat at least once");
251                             }
252
253                             r.stack.last_mut().unwrap().idx += 1;
254                             return tt_next_token(r);
255                         }
256                         r.repeat_len.push(len);
257                         r.repeat_idx.push(0);
258                         r.stack.push(TtFrame {
259                             idx: 0,
260                             dotdotdoted: true,
261                             sep: seq.separator.clone(),
262                             forest: TtSequence(sp, seq),
263                         });
264                     }
265                 }
266             }
267             // FIXME #2887: think about span stuff here
268             TtToken(sp, SubstNt(ident, namep)) => {
269                 match lookup_cur_matched(r, ident) {
270                     None => {
271                         r.stack.push(TtFrame {
272                             forest: TtToken(sp, SubstNt(ident, namep)),
273                             idx: 0,
274                             dotdotdoted: false,
275                             sep: None
276                         });
277                         // this can't be 0 length, just like TtDelimited
278                     }
279                     Some(cur_matched) => {
280                         r.stack.last_mut().unwrap().idx += 1;
281                         match *cur_matched {
282                             // sidestep the interpolation tricks for ident because
283                             // (a) idents can be in lots of places, so it'd be a pain
284                             // (b) we actually can, since it's a token.
285                             MatchedNonterminal(NtIdent(box sn, b)) => {
286                                 r.cur_span = sp;
287                                 r.cur_tok = token::Ident(sn, b);
288                                 return ret_val;
289                             }
290                             MatchedNonterminal(ref other_whole_nt) => {
291                                 // FIXME(pcwalton): Bad copy.
292                                 r.cur_span = sp;
293                                 r.cur_tok = token::Interpolated((*other_whole_nt).clone());
294                                 return ret_val;
295                             }
296                             MatchedSeq(..) => {
297                                 r.sp_diag.span_fatal(
298                                     r.cur_span, /* blame the macro writer */
299                                     format!("variable '{}' is still repeating at this depth",
300                                             token::get_ident(ident))[]);
301                             }
302                         }
303                     }
304                 }
305             }
306             // TtDelimited or any token that can be unzipped
307             seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => {
308                 // do not advance the idx yet
309                 r.stack.push(TtFrame {
310                    forest: seq,
311                    idx: 0,
312                    dotdotdoted: false,
313                    sep: None
314                 });
315                 // if this could be 0-length, we'd need to potentially recur here
316             }
317             TtToken(sp, DocComment(name)) if r.desugar_doc_comments => {
318                 r.stack.push(TtFrame {
319                    forest: TtToken(sp, DocComment(name)),
320                    idx: 0,
321                    dotdotdoted: false,
322                    sep: None
323                 });
324             }
325             TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
326                 r.stack.last_mut().unwrap().idx += 1;
327
328                 if r.imported_from.is_some() {
329                     r.cur_span = sp;
330                     r.cur_tok = token::ModSep;
331                     r.crate_name_next = Some(sp);
332                     return ret_val;
333                 }
334
335                 // otherwise emit nothing and proceed to the next token
336             }
337             TtToken(sp, tok) => {
338                 r.cur_span = sp;
339                 r.cur_tok = tok;
340                 r.stack.last_mut().unwrap().idx += 1;
341                 return ret_val;
342             }
343         }
344     }
345 }