use rustc_ast as ast;
use rustc_ast::token::{self, DelimToken, Nonterminal, Token, TokenKind};
-use rustc_ast::tokenstream::{self, Spacing, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{self, TokenStream, TokenTree};
use rustc_ast_pretty::pprust;
use rustc_data_structures::sync::Lrc;
use rustc_errors::{Diagnostic, FatalError, Level, PResult};
};
// FIXME(#43081): Avoid this pretty-print + reparse hack
- let source = pprust::nonterminal_to_string(nt);
+ // Pretty-print the AST struct without inserting any parenthesis
+ // beyond those explicitly written by the user (e.g. `ExpnKind::Paren`).
+ // The resulting stream may have incorrect precedence, but it's only
+ // ever used for a comparison against the capture tokenstream.
+ let source = pprust::nonterminal_to_string_no_extra_parens(nt);
let filename = FileName::macro_expansion_source_code(&source);
let reparsed_tokens = parse_stream_from_source_str(filename, source, sess, Some(span));
// modifications, including adding/removing typically non-semantic
// tokens such as extra braces and commas, don't happen.
if let Some(tokens) = tokens {
- if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess) {
+ // Compare with a non-relaxed delim match to start.
+ if tokenstream_probably_equal_for_proc_macro(&tokens, &reparsed_tokens, sess, false) {
return tokens;
}
+
+ // The check failed. This time, we pretty-print the AST struct with parenthesis
+ // inserted to preserve precedence. This may cause `None`-delimiters in the captured
+ // token stream to match up with inserted parenthesis in the reparsed stream.
+ let source_with_parens = pprust::nonterminal_to_string(nt);
+ let filename_with_parens = FileName::macro_expansion_source_code(&source_with_parens);
+ let reparsed_tokens_with_parens = parse_stream_from_source_str(
+ filename_with_parens,
+ source_with_parens,
+ sess,
+ Some(span),
+ );
+
+ // Compare with a relaxed delim match - we want inserted parenthesis in the
+ // reparsed stream to match `None`-delimiters in the original stream.
+ if tokenstream_probably_equal_for_proc_macro(
+ &tokens,
+ &reparsed_tokens_with_parens,
+ sess,
+ true,
+ ) {
+ return tokens;
+ }
+
info!(
"cached tokens found, but they're not \"probably equal\", \
going with stringified version"
);
- info!("cached tokens: {:?}", tokens);
- info!("reparsed tokens: {:?}", reparsed_tokens);
+ info!("cached tokens: {}", pprust::tts_to_string(&tokens));
+ info!("reparsed tokens: {}", pprust::tts_to_string(&reparsed_tokens_with_parens));
+
+ info!("cached tokens debug: {:?}", tokens);
+ info!("reparsed tokens debug: {:?}", reparsed_tokens_with_parens);
}
reparsed_tokens
}
tokens: &TokenStream,
reparsed_tokens: &TokenStream,
sess: &ParseSess,
+ relaxed_delim_match: bool,
) -> bool {
// When checking for `probably_eq`, we ignore certain tokens that aren't
// preserved in the AST. Because they are not preserved, the pretty
// The pretty printer tends to add trailing commas to
// everything, and in particular, after struct fields.
| token::Comma
- // The pretty printer emits `NoDelim` as whitespace.
- | token::OpenDelim(DelimToken::NoDelim)
- | token::CloseDelim(DelimToken::NoDelim)
// The pretty printer collapses many semicolons into one.
| token::Semi
// We don't preserve leading `|` tokens in patterns, so
token_trees.into_iter()
}
- let expand_nt = |tree: TokenTree| {
- if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
- // When checking tokenstreams for 'probable equality', we are comparing
- // a captured (from parsing) `TokenStream` to a reparsed tokenstream.
- // The reparsed Tokenstream will never have `None`-delimited groups,
- // since they are only ever inserted as a result of macro expansion.
- // Therefore, inserting a `None`-delimtied group here (when we
- // convert a nested `Nonterminal` to a tokenstream) would cause
- // a mismatch with the reparsed tokenstream.
- //
- // Note that we currently do not handle the case where the
- // reparsed stream has a `Parenthesis`-delimited group
- // inserted. This will cause a spurious mismatch:
- // issue #75734 tracks resolving this.
- nt_to_tokenstream(nt, sess, *span).into_trees()
- } else {
- TokenStream::new(vec![(tree, Spacing::Alone)]).into_trees()
- }
- };
+ fn expand_token(tree: TokenTree, sess: &ParseSess) -> impl Iterator<Item = TokenTree> {
+ // When checking tokenstreams for 'probable equality', we are comparing
+ // a captured (from parsing) `TokenStream` to a reparsed tokenstream.
+ // The reparsed Tokenstream will never have `None`-delimited groups,
+ // since they are only ever inserted as a result of macro expansion.
+ // Therefore, inserting a `None`-delimtied group here (when we
+ // convert a nested `Nonterminal` to a tokenstream) would cause
+ // a mismatch with the reparsed tokenstream.
+ //
+ // Note that we currently do not handle the case where the
+ // reparsed stream has a `Parenthesis`-delimited group
+ // inserted. This will cause a spurious mismatch:
+ // issue #75734 tracks resolving this.
+
+ let expanded: SmallVec<[_; 1]> =
+ if let TokenTree::Token(Token { kind: TokenKind::Interpolated(nt), span }) = &tree {
+ nt_to_tokenstream(nt, sess, *span)
+ .into_trees()
+ .flat_map(|t| expand_token(t, sess))
+ .collect()
+ } else {
+ // Filter before and after breaking tokens,
+ // since we may want to ignore both glued and unglued tokens.
+ std::iter::once(tree)
+ .filter(semantic_tree)
+ .flat_map(break_tokens)
+ .filter(semantic_tree)
+ .collect()
+ };
+ expanded.into_iter()
+ }
// Break tokens after we expand any nonterminals, so that we break tokens
// that are produced as a result of nonterminal expansion.
- let tokens = tokens.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
- let reparsed_tokens =
- reparsed_tokens.trees().filter(semantic_tree).flat_map(expand_nt).flat_map(break_tokens);
+ let tokens = tokens.trees().flat_map(|t| expand_token(t, sess));
+ let reparsed_tokens = reparsed_tokens.trees().flat_map(|t| expand_token(t, sess));
- tokens.eq_by(reparsed_tokens, |t, rt| tokentree_probably_equal_for_proc_macro(&t, &rt, sess))
+ tokens.eq_by(reparsed_tokens, |t, rt| {
+ tokentree_probably_equal_for_proc_macro(&t, &rt, sess, relaxed_delim_match)
+ })
}
// See comments in `Nonterminal::to_tokenstream` for why we care about
token: &TokenTree,
reparsed_token: &TokenTree,
sess: &ParseSess,
+ relaxed_delim_match: bool,
) -> bool {
match (token, reparsed_token) {
(TokenTree::Token(token), TokenTree::Token(reparsed_token)) => {
(
TokenTree::Delimited(_, delim, tokens),
TokenTree::Delimited(_, reparsed_delim, reparsed_tokens),
- ) => {
- delim == reparsed_delim
- && tokenstream_probably_equal_for_proc_macro(tokens, reparsed_tokens, sess)
+ ) if delim == reparsed_delim => tokenstream_probably_equal_for_proc_macro(
+ tokens,
+ reparsed_tokens,
+ sess,
+ relaxed_delim_match,
+ ),
+ (TokenTree::Delimited(_, DelimToken::NoDelim, tokens), reparsed_token) => {
+ if relaxed_delim_match {
+ if let TokenTree::Delimited(_, DelimToken::Paren, reparsed_tokens) = reparsed_token
+ {
+ if tokenstream_probably_equal_for_proc_macro(
+ tokens,
+ reparsed_tokens,
+ sess,
+ relaxed_delim_match,
+ ) {
+ return true;
+ }
+ }
+ }
+ tokens.len() == 1
+ && tokentree_probably_equal_for_proc_macro(
+ &tokens.trees().next().unwrap(),
+ reparsed_token,
+ sess,
+ relaxed_delim_match,
+ )
}
_ => false,
}