1 // Copyright 2012 The Rust Project Developers. See the COPYRIGHT
2 // file at the top-level directory of this distribution and at
3 // http://rust-lang.org/COPYRIGHT.
5 // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
6 // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
7 // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
8 // option. This file may not be copied, modified, or distributed
9 // except according to those terms.
10 use self::LockstepIterSize::*;
13 use ast::{TokenTree, Ident, Name};
14 use codemap::{Span, DUMMY_SP};
15 use diagnostic::SpanHandler;
16 use ext::tt::macro_parser::{NamedMatch, MatchedSeq, MatchedNonterminal};
17 use parse::token::{Eof, DocComment, Interpolated, MatchNt, SubstNt};
18 use parse::token::{Token, NtIdent, SpecialMacroVar};
20 use parse::lexer::TokenAndSpan;
24 use std::collections::HashMap;
26 ///an unzipping of `TokenTree`s
36 pub struct TtReader<'a> {
37 pub sp_diag: &'a SpanHandler,
38 /// the unzipped tree:
40 /* for MBE-style macro transcription */
41 interpolations: HashMap<Name, Rc<NamedMatch>>,
42 imported_from: Option<Ident>,
44 // Some => return imported_from as the next token
45 crate_name_next: Option<Span>,
46 repeat_idx: Vec<usize>,
47 repeat_len: Vec<usize>,
51 /// Transform doc comments. Only useful in macro invocations
52 pub desugar_doc_comments: bool,
55 /// This can do Macro-By-Example transcription. On the other hand, if
56 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
57 /// (and should) be None.
58 pub fn new_tt_reader<'a>(sp_diag: &'a SpanHandler,
59 interp: Option<HashMap<Name, Rc<NamedMatch>>>,
60 imported_from: Option<Ident>,
61 src: Vec<ast::TokenTree>)
63 new_tt_reader_with_doc_flag(sp_diag, interp, imported_from, src, false)
66 /// The extra `desugar_doc_comments` flag enables reading doc comments
67 /// like any other attribute which consists of `meta` and surrounding #[ ] tokens.
69 /// This can do Macro-By-Example transcription. On the other hand, if
70 /// `src` contains no `TokenTree::Sequence`s, `MatchNt`s or `SubstNt`s, `interp` can
71 /// (and should) be None.
72 pub fn new_tt_reader_with_doc_flag<'a>(sp_diag: &'a SpanHandler,
73 interp: Option<HashMap<Name, Rc<NamedMatch>>>,
74 imported_from: Option<Ident>,
75 src: Vec<ast::TokenTree>,
76 desugar_doc_comments: bool)
78 let mut r = TtReader {
81 forest: TokenTree::Sequence(DUMMY_SP, Rc::new(ast::SequenceRepetition {
83 // doesn't matter. This merely holds the root unzipping.
84 separator: None, op: ast::ZeroOrMore, num_captures: 0
90 interpolations: match interp { /* just a convenience */
91 None => HashMap::new(),
94 imported_from: imported_from,
95 crate_name_next: None,
96 repeat_idx: Vec::new(),
97 repeat_len: Vec::new(),
98 desugar_doc_comments: desugar_doc_comments,
99 /* dummy values, never read: */
103 tt_next_token(&mut r); /* get cur_tok and cur_span set up */
107 fn lookup_cur_matched_by_matched(r: &TtReader, start: Rc<NamedMatch>) -> Rc<NamedMatch> {
108 r.repeat_idx.iter().fold(start, |ad, idx| {
110 MatchedNonterminal(_) => {
111 // end of the line; duplicate henceforth
114 MatchedSeq(ref ads, _) => ads[*idx].clone()
119 fn lookup_cur_matched(r: &TtReader, name: Ident) -> Option<Rc<NamedMatch>> {
120 let matched_opt = r.interpolations.get(&name.name).cloned();
121 matched_opt.map(|s| lookup_cur_matched_by_matched(r, s))
125 enum LockstepIterSize {
127 LisConstraint(usize, Ident),
128 LisContradiction(String),
131 impl Add for LockstepIterSize {
132 type Output = LockstepIterSize;
134 fn add(self, other: LockstepIterSize) -> LockstepIterSize {
136 LisUnconstrained => other,
137 LisContradiction(_) => self,
138 LisConstraint(l_len, ref l_id) => match other {
139 LisUnconstrained => self.clone(),
140 LisContradiction(_) => other,
141 LisConstraint(r_len, _) if l_len == r_len => self.clone(),
142 LisConstraint(r_len, r_id) => {
143 LisContradiction(format!("inconsistent lockstep iteration: \
144 '{}' has {} items, but '{}' has {}",
145 l_id, l_len, r_id, r_len))
152 fn lockstep_iter_size(t: &TokenTree, r: &TtReader) -> LockstepIterSize {
154 TokenTree::Delimited(_, ref delimed) => {
155 delimed.tts.iter().fold(LisUnconstrained, |size, tt| {
156 size + lockstep_iter_size(tt, r)
159 TokenTree::Sequence(_, ref seq) => {
160 seq.tts.iter().fold(LisUnconstrained, |size, tt| {
161 size + lockstep_iter_size(tt, r)
164 TokenTree::Token(_, SubstNt(name, _)) | TokenTree::Token(_, MatchNt(name, _, _, _)) =>
165 match lookup_cur_matched(r, name) {
166 Some(matched) => match *matched {
167 MatchedNonterminal(_) => LisUnconstrained,
168 MatchedSeq(ref ads, _) => LisConstraint(ads.len(), name),
170 _ => LisUnconstrained
172 TokenTree::Token(..) => LisUnconstrained,
176 /// Return the next token from the TtReader.
177 /// EFFECT: advances the reader's token field
178 pub fn tt_next_token(r: &mut TtReader) -> TokenAndSpan {
179 // FIXME(pcwalton): Bad copy?
180 let ret_val = TokenAndSpan {
181 tok: r.cur_tok.clone(),
182 sp: r.cur_span.clone(),
185 match r.crate_name_next.take() {
189 r.cur_tok = token::Ident(r.imported_from.unwrap(), token::Plain);
193 let should_pop = match r.stack.last() {
195 assert_eq!(ret_val.tok, token::Eof);
199 if frame.idx < frame.forest.len() {
202 !frame.dotdotdoted ||
203 *r.repeat_idx.last().unwrap() == *r.repeat_len.last().unwrap() - 1
207 /* done with this set; pop or repeat? */
209 let prev = r.stack.pop().unwrap();
210 match r.stack.last_mut() {
212 r.cur_tok = token::Eof;
219 if prev.dotdotdoted {
223 } else { /* repeat */
224 *r.repeat_idx.last_mut().unwrap() += 1;
225 r.stack.last_mut().unwrap().idx = 0;
226 match r.stack.last().unwrap().sep.clone() {
228 r.cur_tok = tk; /* repeat same span, I guess */
235 loop { /* because it's easiest, this handles `TokenTree::Delimited` not starting
236 with a `TokenTree::Token`, even though it won't happen */
238 let frame = r.stack.last().unwrap();
239 // FIXME(pcwalton): Bad copy.
240 frame.forest.get_tt(frame.idx)
243 TokenTree::Sequence(sp, seq) => {
244 // FIXME(pcwalton): Bad copy.
245 match lockstep_iter_size(&TokenTree::Sequence(sp, seq.clone()),
247 LisUnconstrained => {
248 panic!(r.sp_diag.span_fatal(
249 sp.clone(), /* blame macro writer */
250 "attempted to repeat an expression \
251 containing no syntax \
252 variables matched as repeating at this depth"));
254 LisContradiction(ref msg) => {
255 // FIXME #2887 blame macro invoker instead
256 panic!(r.sp_diag.span_fatal(sp.clone(), &msg[..]));
258 LisConstraint(len, _) => {
260 if seq.op == ast::OneOrMore {
261 // FIXME #2887 blame invoker
262 panic!(r.sp_diag.span_fatal(sp.clone(),
263 "this must repeat at least once"));
266 r.stack.last_mut().unwrap().idx += 1;
267 return tt_next_token(r);
269 r.repeat_len.push(len);
270 r.repeat_idx.push(0);
271 r.stack.push(TtFrame {
274 sep: seq.separator.clone(),
275 forest: TokenTree::Sequence(sp, seq),
280 // FIXME #2887: think about span stuff here
281 TokenTree::Token(sp, SubstNt(ident, namep)) => {
282 r.stack.last_mut().unwrap().idx += 1;
283 match lookup_cur_matched(r, ident) {
286 r.cur_tok = SubstNt(ident, namep);
288 // this can't be 0 length, just like TokenTree::Delimited
290 Some(cur_matched) => {
292 // sidestep the interpolation tricks for ident because
293 // (a) idents can be in lots of places, so it'd be a pain
294 // (b) we actually can, since it's a token.
295 MatchedNonterminal(NtIdent(ref sn, b)) => {
296 r.cur_span = sn.span;
297 r.cur_tok = token::Ident(sn.node, b);
300 MatchedNonterminal(ref other_whole_nt) => {
301 // FIXME(pcwalton): Bad copy.
303 r.cur_tok = token::Interpolated((*other_whole_nt).clone());
307 panic!(r.sp_diag.span_fatal(
308 sp, /* blame the macro writer */
309 &format!("variable '{}' is still repeating at this depth",
316 // TokenTree::Delimited or any token that can be unzipped
317 seq @ TokenTree::Delimited(..) | seq @ TokenTree::Token(_, MatchNt(..)) => {
318 // do not advance the idx yet
319 r.stack.push(TtFrame {
325 // if this could be 0-length, we'd need to potentially recur here
327 TokenTree::Token(sp, DocComment(name)) if r.desugar_doc_comments => {
328 r.stack.push(TtFrame {
329 forest: TokenTree::Token(sp, DocComment(name)),
335 TokenTree::Token(sp, token::SpecialVarNt(SpecialMacroVar::CrateMacroVar)) => {
336 r.stack.last_mut().unwrap().idx += 1;
338 if r.imported_from.is_some() {
340 r.cur_tok = token::ModSep;
341 r.crate_name_next = Some(sp);
345 // otherwise emit nothing and proceed to the next token
347 TokenTree::Token(sp, tok) => {
350 r.stack.last_mut().unwrap().idx += 1;