1 use crate::base::ExtCtxt;
3 use crate::mbe::macro_parser::{MatchedNonterminal, MatchedSeq, NamedMatch};
5 use rustc_ast::mut_visit::{self, MutVisitor};
6 use rustc_ast::token::{self, NtTT, Token};
7 use rustc_ast::tokenstream::{DelimSpan, TokenStream, TokenTree, TreeAndSpacing};
8 use rustc_data_structures::fx::FxHashMap;
9 use rustc_data_structures::sync::Lrc;
10 use rustc_errors::{pluralize, PResult};
11 use rustc_span::hygiene::{LocalExpnId, Transparency};
12 use rustc_span::symbol::MacroRulesNormalizedIdent;
15 use smallvec::{smallvec, SmallVec};
18 // A Marker adds the given mark to the syntax context.
19 struct Marker(LocalExpnId, Transparency);
21 impl MutVisitor for Marker {
22 fn token_visiting_enabled(&self) -> bool {
26 fn visit_span(&mut self, span: &mut Span) {
27 *span = span.apply_mark(self.0.to_expn_id(), self.1)
31 /// An iterator over the token trees in a delimited token tree (`{ ... }`) or a sequence (`$(...)`).
33 Delimited { forest: Lrc<mbe::Delimited>, idx: usize, span: DelimSpan },
34 Sequence { forest: Lrc<mbe::SequenceRepetition>, idx: usize, sep: Option<Token> },
38 /// Construct a new frame around the delimited set of tokens.
39 fn new(tts: Vec<mbe::TokenTree>) -> Frame {
40 let forest = Lrc::new(mbe::Delimited { delim: token::NoDelim, tts });
41 Frame::Delimited { forest, idx: 0, span: DelimSpan::dummy() }
45 impl Iterator for Frame {
46 type Item = mbe::TokenTree;
48 fn next(&mut self) -> Option<mbe::TokenTree> {
50 Frame::Delimited { ref forest, ref mut idx, .. } => {
52 forest.tts.get(*idx - 1).cloned()
54 Frame::Sequence { ref forest, ref mut idx, .. } => {
56 forest.tts.get(*idx - 1).cloned()
62 /// This can do Macro-By-Example transcription.
63 /// - `interp` is a map of meta-variables to the tokens (non-terminals) they matched in the
64 /// invocation. We are assuming we already know there is a match.
65 /// - `src` is the RHS of the MBE, that is, the "example" we are filling in.
70 /// macro_rules! foo {
71 /// ($id:ident) => { println!("{}", stringify!($id)); }
77 /// `interp` would contain `$id => bar` and `src` would contain `println!("{}", stringify!($id));`.
79 /// `transcribe` would return a `TokenStream` containing `println!("{}", stringify!(bar));`.
81 /// Along the way, we do some additional error checking.
82 pub(super) fn transcribe<'a>(
84 interp: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
85 src: Vec<mbe::TokenTree>,
86 transparency: Transparency,
87 ) -> PResult<'a, TokenStream> {
88 // Nothing for us to transcribe...
90 return Ok(TokenStream::default());
93 // We descend into the RHS (`src`), expanding things as we go. This stack contains the things
94 // we have yet to expand/are still expanding. We start the stack off with the whole RHS.
95 let mut stack: SmallVec<[Frame; 1]> = smallvec![Frame::new(src)];
97 // As we descend in the RHS, we will need to be able to match nested sequences of matchers.
98 // `repeats` keeps track of where we are in matching at each level, with the last element being
99 // the most deeply nested sequence. This is used as a stack.
100 let mut repeats = Vec::new();
102 // `result` contains resulting token stream from the TokenTree we just finished processing. At
103 // the end, this will contain the full result of transcription, but at arbitrary points during
104 // `transcribe`, `result` will contain subsets of the final result.
106 // Specifically, as we descend into each TokenTree, we will push the existing results onto the
107 // `result_stack` and clear `results`. We will then produce the results of transcribing the
108 // TokenTree into `results`. Then, as we unwind back out of the `TokenTree`, we will pop the
109 // `result_stack` and append `results` too it to produce the new `results` up to that point.
111 // Thus, if we try to pop the `result_stack` and it is empty, we have reached the top-level
112 // again, and we are done transcribing.
113 let mut result: Vec<TreeAndSpacing> = Vec::new();
114 let mut result_stack = Vec::new();
115 let mut marker = Marker(cx.current_expansion.id, transparency);
118 // Look at the last frame on the stack.
119 let tree = if let Some(tree) = stack.last_mut().unwrap().next() {
120 // If it still has a TokenTree we have not looked at yet, use that tree.
123 // This else-case never produces a value for `tree` (it `continue`s or `return`s).
125 // Otherwise, if we have just reached the end of a sequence and we can keep repeating,
126 // go back to the beginning of the sequence.
127 if let Frame::Sequence { idx, sep, .. } = stack.last_mut().unwrap() {
128 let (repeat_idx, repeat_len) = repeats.last_mut().unwrap();
130 if repeat_idx < repeat_len {
132 if let Some(sep) = sep {
133 result.push(TokenTree::Token(sep.clone()).into());
139 // We are done with the top of the stack. Pop it. Depending on what it was, we do
140 // different things. Note that the outermost item must be the delimited, wrapped RHS
141 // that was passed in originally to `transcribe`.
142 match stack.pop().unwrap() {
143 // Done with a sequence. Pop from repeats.
144 Frame::Sequence { .. } => {
148 // We are done processing a Delimited. If this is the top-level delimited, we are
149 // done. Otherwise, we unwind the result_stack to append what we have produced to
150 // any previous results.
151 Frame::Delimited { forest, span, .. } => {
152 if result_stack.is_empty() {
153 // No results left to compute! We are back at the top-level.
154 return Ok(TokenStream::new(result));
157 // Step back into the parent Delimited.
158 let tree = TokenTree::Delimited(span, forest.delim, TokenStream::new(result));
159 result = result_stack.pop().unwrap();
160 result.push(tree.into());
166 // At this point, we know we are in the middle of a TokenTree (the last one on `stack`).
167 // `tree` contains the next `TokenTree` to be processed.
169 // We are descending into a sequence. We first make sure that the matchers in the RHS
170 // and the matches in `interp` have the same shape. Otherwise, either the caller or the
171 // macro writer has made a mistake.
172 seq @ mbe::TokenTree::Sequence(..) => {
173 match lockstep_iter_size(&seq, interp, &repeats) {
174 LockstepIterSize::Unconstrained => {
175 return Err(cx.struct_span_err(
176 seq.span(), /* blame macro writer */
177 "attempted to repeat an expression containing no syntax variables \
178 matched as repeating at this depth",
182 LockstepIterSize::Contradiction(ref msg) => {
183 // FIXME: this really ought to be caught at macro definition time... It
184 // happens when two meta-variables are used in the same repetition in a
185 // sequence, but they come from different sequence matchers and repeat
186 // different amounts.
187 return Err(cx.struct_span_err(seq.span(), &msg[..]));
190 LockstepIterSize::Constraint(len, _) => {
191 // We do this to avoid an extra clone above. We know that this is a
193 let (sp, seq) = if let mbe::TokenTree::Sequence(sp, seq) = seq {
199 // Is the repetition empty?
201 if seq.kleene.op == mbe::KleeneOp::OneOrMore {
202 // FIXME: this really ought to be caught at macro definition
203 // time... It happens when the Kleene operator in the matcher and
204 // the body for the same meta-variable do not match.
205 return Err(cx.struct_span_err(
207 "this must repeat at least once",
211 // 0 is the initial counter (we have done 0 repretitions so far). `len`
212 // is the total number of repetitions we should generate.
213 repeats.push((0, len));
215 // The first time we encounter the sequence we push it to the stack. It
216 // then gets reused (see the beginning of the loop) until we are done
218 stack.push(Frame::Sequence {
220 sep: seq.separator.clone(),
228 // Replace the meta-var with the matched token tree from the invocation.
229 mbe::TokenTree::MetaVar(mut sp, mut orignal_ident) => {
230 // Find the matched nonterminal from the macro invocation, and use it to replace
232 let ident = MacroRulesNormalizedIdent::new(orignal_ident);
233 if let Some(cur_matched) = lookup_cur_matched(ident, interp, &repeats) {
234 if let MatchedNonterminal(nt) = cur_matched {
235 let token = if let NtTT(tt) = &**nt {
236 // `tt`s are emitted into the output stream directly as "raw tokens",
237 // without wrapping them into groups.
240 // Other variables are emitted into the output stream as groups with
241 // `Delimiter::None` to maintain parsing priorities.
242 // `Interpolated` is currenty used for such groups in rustc parser.
243 marker.visit_span(&mut sp);
244 TokenTree::token(token::Interpolated(nt.clone()), sp)
246 result.push(token.into());
248 // We were unable to descend far enough. This is an error.
249 return Err(cx.struct_span_err(
250 sp, /* blame the macro writer */
251 &format!("variable '{}' is still repeating at this depth", ident),
255 // If we aren't able to match the meta-var, we push it back into the result but
256 // with modified syntax context. (I believe this supports nested macros).
257 marker.visit_span(&mut sp);
258 marker.visit_ident(&mut orignal_ident);
259 result.push(TokenTree::token(token::Dollar, sp).into());
260 result.push(TokenTree::Token(Token::from_ast_ident(orignal_ident)).into());
264 // If we are entering a new delimiter, we push its contents to the `stack` to be
265 // processed, and we push all of the currently produced results to the `result_stack`.
266 // We will produce all of the results of the inside of the `Delimited` and then we will
267 // jump back out of the Delimited, pop the result_stack and add the new results back to
268 // the previous results (from outside the Delimited).
269 mbe::TokenTree::Delimited(mut span, delimited) => {
270 mut_visit::visit_delim_span(&mut span, &mut marker);
271 stack.push(Frame::Delimited { forest: delimited, idx: 0, span });
272 result_stack.push(mem::take(&mut result));
275 // Nothing much to do here. Just push the token to the result, being careful to
276 // preserve syntax context.
277 mbe::TokenTree::Token(token) => {
278 let mut tt = TokenTree::Token(token);
279 mut_visit::visit_tt(&mut tt, &mut marker);
280 result.push(tt.into());
283 // There should be no meta-var declarations in the invocation of a macro.
284 mbe::TokenTree::MetaVarDecl(..) => panic!("unexpected `TokenTree::MetaVarDecl"),
289 /// Lookup the meta-var named `ident` and return the matched token tree from the invocation using
290 /// the set of matches `interpolations`.
292 /// See the definition of `repeats` in the `transcribe` function. `repeats` is used to descend
293 /// into the right place in nested matchers. If we attempt to descend too far, the macro writer has
294 /// made a mistake, and we return `None`.
295 fn lookup_cur_matched<'a>(
296 ident: MacroRulesNormalizedIdent,
297 interpolations: &'a FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
298 repeats: &[(usize, usize)],
299 ) -> Option<&'a NamedMatch> {
300 interpolations.get(&ident).map(|matched| {
301 let mut matched = matched;
302 for &(idx, _) in repeats {
304 MatchedNonterminal(_) => break,
305 MatchedSeq(ref ads) => matched = ads.get(idx).unwrap(),
313 /// An accumulator over a TokenTree to be used with `fold`. During transcription, we need to make
314 /// sure that the size of each sequence and all of its nested sequences are the same as the sizes
315 /// of all the matched (nested) sequences in the macro invocation. If they don't match, somebody
316 /// has made a mistake (either the macro writer or caller).
318 enum LockstepIterSize {
319 /// No constraints on length of matcher. This is true for any TokenTree variants except a
320 /// `MetaVar` with an actual `MatchedSeq` (as opposed to a `MatchedNonterminal`).
323 /// A `MetaVar` with an actual `MatchedSeq`. The length of the match and the name of the
324 /// meta-var are returned.
325 Constraint(usize, MacroRulesNormalizedIdent),
327 /// Two `Constraint`s on the same sequence had different lengths. This is an error.
328 Contradiction(String),
331 impl LockstepIterSize {
332 /// Find incompatibilities in matcher/invocation sizes.
333 /// - `Unconstrained` is compatible with everything.
334 /// - `Contradiction` is incompatible with everything.
335 /// - `Constraint(len)` is only compatible with other constraints of the same length.
336 fn with(self, other: LockstepIterSize) -> LockstepIterSize {
338 LockstepIterSize::Unconstrained => other,
339 LockstepIterSize::Contradiction(_) => self,
340 LockstepIterSize::Constraint(l_len, ref l_id) => match other {
341 LockstepIterSize::Unconstrained => self,
342 LockstepIterSize::Contradiction(_) => other,
343 LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
344 LockstepIterSize::Constraint(r_len, r_id) => {
346 "meta-variable `{}` repeats {} time{}, but `{}` repeats {} time{}",
354 LockstepIterSize::Contradiction(msg)
361 /// Given a `tree`, make sure that all sequences have the same length as the matches for the
362 /// appropriate meta-vars in `interpolations`.
364 /// Note that if `repeats` does not match the exact correct depth of a meta-var,
365 /// `lookup_cur_matched` will return `None`, which is why this still works even in the presence of
366 /// multiple nested matcher sequences.
367 fn lockstep_iter_size(
368 tree: &mbe::TokenTree,
369 interpolations: &FxHashMap<MacroRulesNormalizedIdent, NamedMatch>,
370 repeats: &[(usize, usize)],
371 ) -> LockstepIterSize {
374 TokenTree::Delimited(_, ref delimed) => {
375 delimed.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
376 size.with(lockstep_iter_size(tt, interpolations, repeats))
379 TokenTree::Sequence(_, ref seq) => {
380 seq.tts.iter().fold(LockstepIterSize::Unconstrained, |size, tt| {
381 size.with(lockstep_iter_size(tt, interpolations, repeats))
384 TokenTree::MetaVar(_, name) | TokenTree::MetaVarDecl(_, name, _) => {
385 let name = MacroRulesNormalizedIdent::new(name);
386 match lookup_cur_matched(name, interpolations, repeats) {
387 Some(matched) => match matched {
388 MatchedNonterminal(_) => LockstepIterSize::Unconstrained,
389 MatchedSeq(ref ads) => LockstepIterSize::Constraint(ads.len(), name),
391 _ => LockstepIterSize::Unconstrained,
394 TokenTree::Token(..) => LockstepIterSize::Unconstrained,