let tokens_for_real = nt.1.force(|| {
// FIXME(#43081): Avoid this pretty-print + reparse hack
let source = pprust::token_to_string(self);
- parse_stream_from_source_str(FileName::MacroExpansion, source, sess, Some(span))
+ let filename = FileName::macro_expansion_source_code(&source);
+ parse_stream_from_source_str(filename, source, sess, Some(span))
});
// During early phases of the compiler the AST could get modified
- // directly (e.g. attributes added or removed) and the internal cache
+ // directly (e.g., attributes added or removed) and the internal cache
// of tokens my not be invalidated or updated. Consequently if the
// "lossless" token stream disagrees with our actual stringification
// (which has historically been much more battle-tested) then we go
assert_eq!(attr.style, ast::AttrStyle::Outer,
"inner attributes should prevent cached tokens from existing");
+ let source = pprust::attr_to_string(attr);
+ let macro_filename = FileName::macro_expansion_source_code(&source);
if attr.is_sugared_doc {
let stream = parse_stream_from_source_str(
- FileName::MacroExpansion,
- pprust::attr_to_string(attr),
+ macro_filename,
+ source,
sess,
Some(span),
);
// should eventually be removed.
} else {
let stream = parse_stream_from_source_str(
- FileName::MacroExpansion,
- pprust::path_to_string(&attr.path),
+ macro_filename,
+ source,
sess,
Some(span),
);