1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10 use self::LockstepIterSize::*;
12 use ast::{Ident, Name};
13 use syntax_pos::{Span, DUMMY_SP};
14 use errors::{Handler, DiagnosticBuilder};
15 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
16 use parse::token::{DocComment, MatchNt, SubstNt};
17 use parse::token::{Token, Interpolated, NtIdent, NtTT, SpecialMacroVar};
19 use parse::lexer::TokenAndSpan;
20 use tokenstream::{self, TokenTree};
24 use std::collections::HashMap;
26 ///an unzipping of `TokenTree`s
36 pub struct TtReader<'a> {
37 pub sp_diag: &'a Handler,
38 /// the unzipped tree:
40 /* for MBE-style macro transcription */
41 interpolations: HashMap<Name, Rc<NamedMatch>>,
42 imported_from: Option<Ident>,
44 // Some => return imported_from as the next token
45 crate_name_next: Option<Span>,
46 repeat_idx: Vec<usize>,
47 repeat_len: Vec<usize>,
51 /// Transform doc comments. Only useful in macro invocations
52 pub desugar_doc_comments: bool,
53 pub fatal_errs: Vec<DiagnosticBuilder<'a>>,
56 /// This can do Macro-By-Example transcription. On the other hand, if
57 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
58 /// (and should) be None.
59 pub fn new_tt_reader(sp_diag: &Handler,
60 interp: Option<HashMap<Name, Rc<NamedMatch>>>,
61 imported_from: Option<Ident>,
62 src: Vec<tokenstream::TokenTree>)
64 new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
67 /// The extra `desugar_doc_comments` flag enables reading doc comments
68 /// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
70 /// This can do Macro-By-Example transcription. On the other hand, if
71 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
72 /// (and should) be None.
73 pub fn new_tt_reader_with_doc_flag(sp_diag: &Handler,
74 interp: Option<HashMap<Name, Rc<NamedMatch>>>,
75 imported_from: Option<Ident>,
76 src: Vec<tokenstream::TokenTree>,
77 desugar_doc_comments: bool)
79 let mut r = TtReader {
82 forest: TokenTree::Sequence(DUMMY_SP, Rc::new(tokenstream::SequenceRepetition {
84 // doesn't matter. This merely holds the root unzipping.
85 separator: None, op: tokenstream::KleeneOp::ZeroOrMore, num_captures: 0
91 interpolations: match interp { /* just a convenience */
92 None => HashMap::new(),
95 imported_from: imported_from,
96 crate_name_next: None,
97 repeat_idx: Vec::new(),
98 repeat_len: Vec::new(),
99 desugar_doc_comments: desugar_doc_comments,
100 /* dummy values, never read: */
103 fatal_errs: Vec::new(),
105 tt_next_token(&mut r); /* get cur_tok and cur_span set up */
109 fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
110 r.repeat_idx.iter().fold(start, |ad, idx| {
112 MatchedNonterminal(_) => {
113 // end of the line; duplicate henceforth
116 MatchedSeq(ref ads, _) => ads[*idx].clone()
121 fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
122 let matched_opt = r.interpolations.get(&name.name).cloned();
123 matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
127 enum LockstepIterSize {
129 LisConstraint(usize, Ident),
130 LisContradiction(String),
133 impl Add for LockstepIterSize {
134 type Output = LockstepIterSize;
136 fn add(self, other: LockstepIterSize) -> LockstepIterSize {
138 LisUnconstrained => other,
139 LisContradiction(_) => self,
140 LisConstraint(l_len, ref l_id) => match other {
141 LisUnconstrained => self.clone(),
142 LisContradiction(_) => other,
143 LisConstraint(r_len, _) if l_len == r_len => self.clone(),
144 LisConstraint(r_len, r_id) => {
145 LisContradiction(format!("inconsistent lockstep iteration: \
146 '{}' has {} items, but '{}' has {}",
147 l_id, l_len, r_id, r_len))
154 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
156 TokenTree::Delimited(_, ref delimed) => {
157 delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
158 size + lockstep_iter_size(tt, r)
161 TokenTree::Sequence(_, ref seq) => {
162 seq.tts.iter().fold(LisUnconstrained, |size, tt| {
163 size + lockstep_iter_size(tt, r)
166 TokenTree::Token(_, SubstNt(name)) | TokenTree::Token(_, MatchNt(name, _)) =>
167 match lookup_cur_matched(r, name) {
168 Some(matched) => match *matched {
169 MatchedNonterminal(_) => LisUnconstrained,
170 MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
172 _ => LisUnconstrained
174 TokenTree::Token(..) => LisUnconstrained,
178 /// Return the next token from the TtReader.
179 /// EFFECT: advances the reader's token field
180 pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
181 // FIXME(pcwalton): Bad copy?
182 let ret_val = TokenAndSpan {
183 tok: r.cur_tok.clone(),
184 sp: r.cur_span.clone(),
187 match r.crate_name_next.take() {
191 r.cur_tok = token::Ident(r.imported_from.unwrap());
195 let should_pop = match r.stack.last() {
197 assert_eq!(ret_val.tok, token::Eof);
201 if frame.idx < frame.forest.len() {
204 !frame.dotdotdoted ||
205 *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
209 /* done with this set; pop or repeat? */
211 let prev = r.stack.pop().unwrap();
212 match r.stack.last_mut() {
214 r.cur_tok = token::Eof;
221 if prev.dotdotdoted {
225 } else { /* repeat */
226 *r.repeat_idx.last_mut().unwrap() += 1;
227 r.stack.last_mut().unwrap().idx = 0;
228 if let Some(tk) = r.stack.last().unwrap().sep.clone() {
229 r.cur_tok = tk; // repeat same span, I guess
234 loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
235 with a `TokenTree::Token`, even though it won't happen */
237 let frame = r.stack.last().unwrap();
238 // FIXME(pcwalton): Bad copy.
239 frame.forest.get_tt(frame.idx)
242 TokenTree::Sequence(sp, seq) => {
243 // FIXME(pcwalton): Bad copy.
244 match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
246 LisUnconstrained => {
247 panic!(r.sp_diag.span_fatal(
248 sp.clone(), /* blame macro writer */
249 "attempted to repeat an expression \
250 containing no syntax \
251 variables matched as repeating at this depth"));
253 LisContradiction(ref msg) => {
254 // FIXME #2887 blame macro invoker instead
255 panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
257 LisConstraint(len, _) => {
259 if seq.op == tokenstream::KleeneOp::OneOrMore {
260 // FIXME #2887 blame invoker
261 panic!(r.sp_diag.span_fatal(sp.clone(),
262 "this must repeat at least once"));
265 r.stack.last_mut().unwrap().idx += 1;
266 return tt_next_token(r);
268 r.repeat_len.push(len);
269 r.repeat_idx.push(0);
270 r.stack.push(TtFrame {
273 sep: seq.separator.clone(),
274 forest: TokenTree::Sequence(sp, seq),
279 // FIXME #2887: think about span stuff here
280 TokenTree::Token(sp, SubstNt(ident)) => {
281 match lookup_cur_matched(r, ident) {
283 r.stack.last_mut().unwrap().idx += 1;
285 r.cur_tok = SubstNt(ident);
287 // this can't be 0 length, just like TokenTree::Delimited
289 Some(cur_matched) => {
291 // sidestep the interpolation tricks for ident because
292 // (a) idents can be in lots of places, so it'd be a pain
293 // (b) we actually can, since it's a token.
294 MatchedNonterminal(NtIdent(ref sn)) => {
295 r.stack.last_mut().unwrap().idx += 1;
296 r.cur_span = sn.span;
297 r.cur_tok = token::Ident(sn.node);
300 MatchedNonterminal(NtTT(ref tt)) => {
301 r.stack.push(TtFrame {
302 forest: TokenTree::Token(sp, Interpolated(NtTT(tt.clone()))),
308 MatchedNonterminal(ref other_whole_nt) => {
309 r.stack.last_mut().unwrap().idx += 1;
310 // FIXME(pcwalton): Bad copy.
312 r.cur_tok = Interpolated((*other_whole_nt).clone());
316 panic!(r.sp_diag.span_fatal(
317 sp, /* blame the macro writer */
318 &format!("variable '{}' is still repeating at this depth",
325 // TokenTree::Delimited or any token that can be unzipped
326 seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
327 // do not advance the idx yet
328 r.stack.push(TtFrame {
334 // if this could be 0-length, we'd need to potentially recur here
336 TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
337 r.stack.push(TtFrame {
338 forest: TokenTree::Token(sp, DocComment(name)),
344 TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
345 r.stack.last_mut().unwrap().idx += 1;
347 if r.imported_from.is_some() {
349 r.cur_tok = token::ModSep;
350 r.crate_name_next = Some(sp);
354 // otherwise emit nothing and proceed to the next token
356 TokenTree::Token(sp, tok) => {
359 r.stack.last_mut().unwrap().idx += 1;