]> git.lizzy.rs Git - rust.git/blob - src/libsyntax/ext/tt/transcribe.rs
Auto merge of #22541 - Manishearth:rollup, r=Gankro
[rust.git] / src / libsyntax / ext / tt / transcribe.rs
1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
4 //
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10 use self::LockstepIterSize::*;
11
12 use ast;
13 use ast::{TokenTree, TtDelimited, TtToken, TtSequence, Ident};
14 use codemap::{Span, DUMMY_SP};
15 use diagnostic::SpanHandler;
16 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
17 use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt};
18 use parse::token::{Token, NtIdent, SpecialMacroVar};
19 use parse::token;
20 use parse::lexer::TokenAndSpan;
21
22 use std::rc::Rc;
23 use std::ops::Add;
24 use std::collections::HashMap;
25
26 ///an unzipping of `TokenTree`s
27 #[derive(Clone)]
28 struct TtFrame {
29     forest: TokenTree,
30     idx: usize,
31     dotdotdoted: bool,
32     sep: Option<Token>,
33 }
34
35 #[derive(Clone)]
36 pub struct TtReader<'a> {
37     pub sp_diag: &'a SpanHandler,
38     /// the unzipped tree:
39     stack: Vec<TtFrame>,
40     /* for MBE-style macro transcription */
41     interpolations: HashMap<Ident, Rc<NamedMatch>>,
42     imported_from: Option<Ident>,
43
44     // Some => return imported_from as the next token
45     crate_name_next: Option<Span>,
46     repeat_idx: Vec<usize>,
47     repeat_len: Vec<usize>,
48     /* cached: */
49     pub cur_tok: Token,
50     pub cur_span: Span,
51     /// Transform doc comments. Only useful in macro invocations
52     pub desugar_doc_comments: bool,
53 }
54
55 /// This can do Macro-By-Example transcription. On the other hand, if
56 /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
57 /// (and should) be None.
58 pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
59                          interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
60                          imported_from: Option<Ident>,
61                          src: Vec<ast::TokenTree>)
62                          -> TtReader<'a> {
63     new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
64 }
65
66 /// The extra `desugar_doc_comments` flag enables reading doc comments
67 /// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
68 ///
69 /// This can do Macro-By-Example transcription. On the other hand, if
70 /// `src` contains no `TtSequence`s, `MatchNt`s or `SubstNt`s, `interp` can
71 /// (and should) be None.
72 pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
73                                        interp: Option<HashMap<Ident, Rc<NamedMatch>>>,
74                                        imported_from: Option<Ident>,
75                                        src: Vec<ast::TokenTree>,
76                                        desugar_doc_comments: bool)
77                                        -> TtReader<'a> {
78     let mut r = TtReader {
79         sp_diag: sp_diag,
80         stack: vec!(TtFrame {
81             forest: TtSequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
82                 tts: src,
83                 // doesn't matter. This merely holds the root unzipping.
84                 separator: None, op: ast::ZeroOrMore, num_captures: 0
85             })),
86             idx: 0,
87             dotdotdoted: false,
88             sep: None,
89         }),
90         interpolations: match interp { /* just a convenience */
91             None => HashMap::new(),
92             Some(x) => x,
93         },
94         imported_from: imported_from,
95         crate_name_next: None,
96         repeat_idx: Vec::new(),
97         repeat_len: Vec::new(),
98         desugar_doc_comments: desugar_doc_comments,
99         /* dummy values, never read: */
100         cur_tok: token::Eof,
101         cur_span: DUMMY_SP,
102     };
103     tt_next_token(&mut r); /* get cur_tok and cur_span set up */
104     r
105 }
106
107 fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
108     r.repeat_idx.iter().fold(start, |ad, idx| {
109         match *ad {
110             MatchedNonterminal(_) => {
111                 // end of the line; duplicate henceforth
112                 ad.clone()
113             }
114             MatchedSeq(ref ads, _) => ads[*idx].clone()
115         }
116     })
117 }
118
119 fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
120     let matched_opt = r.interpolations.get(&name).cloned();
121     matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
122 }
123
124 #[derive(Clone)]
125 enum LockstepIterSize {
126     LisUnconstrained,
127     LisConstraint(usize, Ident),
128     LisContradiction(String),
129 }
130
131 impl Add for LockstepIterSize {
132     type Output = LockstepIterSize;
133
134     fn add(self, other: LockstepIterSize) -> LockstepIterSize {
135         match self {
136             LisUnconstrained => other,
137             LisContradiction(_) => self,
138             LisConstraint(l_len, ref l_id) => match other {
139                 LisUnconstrained => self.clone(),
140                 LisContradiction(_) => other,
141                 LisConstraint(r_len, _) if l_len == r_len => self.clone(),
142                 LisConstraint(r_len, r_id) => {
143                     let l_n = token::get_ident(l_id.clone());
144                     let r_n = token::get_ident(r_id);
145                     LisContradiction(format!("inconsistent lockstep iteration: \
146                                               '{:?}' has {} items, but '{:?}' has {}",
147                                               l_n, l_len, r_n, r_len).to_string())
148                 }
149             },
150         }
151     }
152 }
153
154 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
155     match *t {
156         TtDelimited(_, ref delimed) => {
157             delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
158                 size + lockstep_iter_size(tt, r)
159             })
160         },
161         TtSequence(_, ref seq) => {
162             seq.tts.iter().fold(LisUnconstrained, |size, tt| {
163                 size + lockstep_iter_size(tt, r)
164             })
165         },
166         TtToken(_, SubstNt(name, _)) | TtToken(_, MatchNt(name, _, _, _)) =>
167             match lookup_cur_matched(r, name) {
168                 Some(matched) => match *matched {
169                     MatchedNonterminal(_) => LisUnconstrained,
170                     MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
171                 },
172                 _ => LisUnconstrained
173             },
174         TtToken(..) => LisUnconstrained,
175     }
176 }
177
178 /// Return the next token from the TtReader.
179 /// EFFECT: advances the reader's token field
180 pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
181     // FIXME(pcwalton): Bad copy?
182     let ret_val = TokenAndSpan {
183         tok: r.cur_tok.clone(),
184         sp: r.cur_span.clone(),
185     };
186     loop {
187         match r.crate_name_next.take() {
188             None => (),
189             Some(sp) => {
190                 r.cur_span = sp;
191                 r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
192                 return ret_val;
193             },
194         }
195         let should_pop = match r.stack.last() {
196             None => {
197                 assert_eq!(ret_val.tok, token::Eof);
198                 return ret_val;
199             }
200             Some(frame) => {
201                 if frame.idx < frame.forest.len() {
202                     break;
203                 }
204                 !frame.dotdotdoted ||
205                     *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
206             }
207         };
208
209         /* done with this set; pop or repeat? */
210         if should_pop {
211             let prev = r.stack.pop().unwrap();
212             match r.stack.last_mut() {
213                 None => {
214                     r.cur_tok = token::Eof;
215                     return ret_val;
216                 }
217                 Some(frame) => {
218                     frame.idx += 1;
219                 }
220             }
221             if prev.dotdotdoted {
222                 r.repeat_idx.pop();
223                 r.repeat_len.pop();
224             }
225         } else { /* repeat */
226             *r.repeat_idx.last_mut().unwrap() += 1;
227             r.stack.last_mut().unwrap().idx = 0;
228             match r.stack.last().unwrap().sep.clone() {
229                 Some(tk) => {
230                     r.cur_tok = tk; /* repeat same span, I guess */
231                     return ret_val;
232                 }
233                 None => {}
234             }
235         }
236     }
237     loop { /* because it's easiest, this handles `TtDelimited` not starting
238               with a `TtToken`, even though it won't happen */
239         let t = {
240             let frame = r.stack.last().unwrap();
241             // FIXME(pcwalton): Bad copy.
242             frame.forest.get_tt(frame.idx)
243         };
244         match t {
245             TtSequence(sp, seq) => {
246                 // FIXME(pcwalton): Bad copy.
247                 match lockstep_iter_size(&TtSequence(sp, seq.clone()),
248                                          r) {
249                     LisUnconstrained => {
250                         r.sp_diag.span_fatal(
251                             sp.clone(), /* blame macro writer */
252                             "attempted to repeat an expression \
253                              containing no syntax \
254                              variables matched as repeating at this depth");
255                     }
256                     LisContradiction(ref msg) => {
257                         // FIXME #2887 blame macro invoker instead
258                         r.sp_diag.span_fatal(sp.clone(), &msg[..]);
259                     }
260                     LisConstraint(len, _) => {
261                         if len == 0 {
262                             if seq.op == ast::OneOrMore {
263                                 // FIXME #2887 blame invoker
264                                 r.sp_diag.span_fatal(sp.clone(),
265                                                      "this must repeat at least once");
266                             }
267
268                             r.stack.last_mut().unwrap().idx += 1;
269                             return tt_next_token(r);
270                         }
271                         r.repeat_len.push(len);
272                         r.repeat_idx.push(0);
273                         r.stack.push(TtFrame {
274                             idx: 0,
275                             dotdotdoted: true,
276                             sep: seq.separator.clone(),
277                             forest: TtSequence(sp, seq),
278                         });
279                     }
280                 }
281             }
282             // FIXME #2887: think about span stuff here
283             TtToken(sp, SubstNt(ident, namep)) => {
284                 r.stack.last_mut().unwrap().idx += 1;
285                 match lookup_cur_matched(r, ident) {
286                     None => {
287                         r.cur_span = sp;
288                         r.cur_tok = SubstNt(ident, namep);
289                         return ret_val;
290                         // this can't be 0 length, just like TtDelimited
291                     }
292                     Some(cur_matched) => {
293                         match *cur_matched {
294                             // sidestep the interpolation tricks for ident because
295                             // (a) idents can be in lots of places, so it'd be a pain
296                             // (b) we actually can, since it's a token.
297                             MatchedNonterminal(NtIdent(box sn, b)) => {
298                                 r.cur_span = sp;
299                                 r.cur_tok = token::Ident(sn, b);
300                                 return ret_val;
301                             }
302                             MatchedNonterminal(ref other_whole_nt) => {
303                                 // FIXME(pcwalton): Bad copy.
304                                 r.cur_span = sp;
305                                 r.cur_tok = token::Interpolated((*other_whole_nt).clone());
306                                 return ret_val;
307                             }
308                             MatchedSeq(..) => {
309                                 r.sp_diag.span_fatal(
310                                     r.cur_span, /* blame the macro writer */
311                                     &format!("variable '{:?}' is still repeating at this depth",
312                                             token::get_ident(ident)));
313                             }
314                         }
315                     }
316                 }
317             }
318             // TtDelimited or any token that can be unzipped
319             seq @ TtDelimited(..) | seq @ TtToken(_, MatchNt(..)) => {
320                 // do not advance the idx yet
321                 r.stack.push(TtFrame {
322                    forest: seq,
323                    idx: 0,
324                    dotdotdoted: false,
325                    sep: None
326                 });
327                 // if this could be 0-length, we'd need to potentially recur here
328             }
329             TtToken(sp, DocComment(name)) if r.desugar_doc_comments => {
330                 r.stack.push(TtFrame {
331                    forest: TtToken(sp, DocComment(name)),
332                    idx: 0,
333                    dotdotdoted: false,
334                    sep: None
335                 });
336             }
337             TtToken(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
338                 r.stack.last_mut().unwrap().idx += 1;
339
340                 if r.imported_from.is_some() {
341                     r.cur_span = sp;
342                     r.cur_tok = token::ModSep;
343                     r.crate_name_next = Some(sp);
344                     return ret_val;
345                 }
346
347                 // otherwise emit nothing and proceed to the next token
348             }
349             TtToken(sp, tok) => {
350                 r.cur_span = sp;
351                 r.cur_tok = tok;
352                 r.stack.last_mut().unwrap().idx += 1;
353                 return ret_val;
354             }
355         }
356     }
357 }