From: Vadim Petrochenkov Date: Wed, 5 Jun 2019 19:04:52 +0000 (+0300) Subject: Address review comments X-Git-Url: https://git.lizzy.rs/?a=commitdiff_plain;h=3a31f0634bb1669eae64e83f595942986f867125;p=rust.git Address review comments --- diff --git a/src/libsyntax/ext/tt/macro_rules.rs b/src/libsyntax/ext/tt/macro_rules.rs index d25339a78f4..7ab51c1eb20 100644 --- a/src/libsyntax/ext/tt/macro_rules.rs +++ b/src/libsyntax/ext/tt/macro_rules.rs @@ -130,9 +130,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, } // Which arm's failure should we report? (the one furthest along) - let mut best_fail_spot = DUMMY_SP; - let mut best_fail_tok = None; - let mut best_fail_text = None; + let mut best_failure: Option<(Token, &str)> = None; for (i, lhs) in lhses.iter().enumerate() { // try each arm's matchers let lhs_tt = match *lhs { @@ -190,21 +188,20 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt<'_>, arm_span, }) } - Failure(token, msg) => if token.span.lo() >= best_fail_spot.lo() { - best_fail_spot = token.span; - best_fail_tok = Some(token.kind); - best_fail_text = Some(msg); - }, + Failure(token, msg) => match best_failure { + Some((ref best_token, _)) if best_token.span.lo() >= token.span.lo() => {} + _ => best_failure = Some((token, msg)) + } Error(err_sp, ref msg) => { cx.span_fatal(err_sp.substitute_dummy(sp), &msg[..]) } } } - let best_fail_msg = parse_failure_msg(best_fail_tok.expect("ran no matchers")); - let span = best_fail_spot.substitute_dummy(sp); - let mut err = cx.struct_span_err(span, &best_fail_msg); - err.span_label(span, best_fail_text.unwrap_or(&best_fail_msg)); + let (token, label) = best_failure.expect("ran no matchers"); + let span = token.span.substitute_dummy(sp); + let mut err = cx.struct_span_err(span, &parse_failure_msg(token.kind)); + err.span_label(span, label); if let Some(sp) = def_span { if cx.source_map().span_to_filename(sp).is_real() && !sp.is_dummy() { err.span_label(cx.source_map().def_span(sp), "when calling this macro"); diff --git a/src/libsyntax/mut_visit.rs b/src/libsyntax/mut_visit.rs index 4d7f0a97b0f..d2a614c4a54 100644 --- a/src/libsyntax/mut_visit.rs +++ b/src/libsyntax/mut_visit.rs @@ -604,6 +604,8 @@ pub fn noop_visit_token(t: &mut Token, vis: &mut T) { let mut ident = Ident::new(*name, *span); vis.visit_ident(&mut ident); *name = ident.name; + *span = ident.span; + return; // avoid visiting the span for the second time } token::Interpolated(nt) => { let mut nt = Lrc::make_mut(nt); diff --git a/src/libsyntax/parse/parser.rs b/src/libsyntax/parse/parser.rs index 51bfe3527cf..43e7c9330e4 100644 --- a/src/libsyntax/parse/parser.rs +++ b/src/libsyntax/parse/parser.rs @@ -197,13 +197,17 @@ enum PrevTokenKind { #[derive(Clone)] pub struct Parser<'a> { pub sess: &'a ParseSess, - /// The current token. + /// The current normalized token. + /// "Normalized" means that some interpolated tokens + /// (`$i: ident` and `$l: lifetime` meta-variables) are replaced + /// with non-interpolated identifier and lifetime tokens they refer to. + /// Perhaps the normalized / non-normalized setup can be simplified somehow. pub token: Token, - /// The span of the previous token. + /// Span of the current non-normalized token. meta_var_span: Option, - /// The span of the previous token. + /// Span of the previous non-normalized token. pub prev_span: Span, - /// The previous token kind. + /// Kind of the previous normalized token (in simplified form). prev_token_kind: PrevTokenKind, restrictions: Restrictions, /// Used to determine the path to externally loaded source files.