]> git.lizzy.rs Git - rust.git/commitdiff
Auto merge of #57944 - estebank:unclosed-delim-the-quickening, r=oli-obk
authorbors <bors@rust-lang.org>
Sat, 9 Feb 2019 20:15:57 +0000 (20:15 +0000)
committerbors <bors@rust-lang.org>
Sat, 9 Feb 2019 20:15:57 +0000 (20:15 +0000)
Deduplicate mismatched delimiter errors

Delay unmatched delimiter errors until after the parser has run to deduplicate them when parsing and attempt recovering intelligently.

Second attempt at #54029, follow up to #53949. Fix #31528.

1  2 
src/librustc_errors/emitter.rs
src/librustc_metadata/cstore_impl.rs
src/libsyntax/parse/parser.rs
src/libsyntax/parse/token.rs
src/libsyntax_ext/proc_macro_server.rs

index 061d23697fa3a26bd6be5b3212dfd06cd318d02f,f04bd7b8f021b0858d965a7f87897e09d7f30f4e..2821201173ea095b22afe104ac65c0162c1912bd
@@@ -1,26 -1,28 +1,26 @@@
 -use self::Destination::*;
 +use Destination::*;
  
  use syntax_pos::{SourceFile, Span, MultiSpan};
  
 -use {Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, DiagnosticId};
 -use snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style};
 -use styled_buffer::StyledBuffer;
 +use crate::{Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, DiagnosticId};
 +use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style};
 +use crate::styled_buffer::StyledBuffer;
  
  use rustc_data_structures::fx::FxHashMap;
  use rustc_data_structures::sync::Lrc;
 -use atty;
  use std::borrow::Cow;
  use std::io::prelude::*;
  use std::io;
  use std::cmp::{min, Reverse};
  use termcolor::{StandardStream, ColorChoice, ColorSpec, BufferWriter};
  use termcolor::{WriteColor, Color, Buffer};
 -use unicode_width;
  
  const ANONYMIZED_LINE_NUM: &str = "LL";
  
  /// Emitter trait for emitting errors.
  pub trait Emitter {
      /// Emit a structured diagnostic.
 -    fn emit(&mut self, db: &DiagnosticBuilder);
 +    fn emit(&mut self, db: &DiagnosticBuilder<'_>);
  
      /// Check if should show explanations about "rustc --explain"
      fn should_show_explain(&self) -> bool {
@@@ -29,7 -31,7 +29,7 @@@
  }
  
  impl Emitter for EmitterWriter {
 -    fn emit(&mut self, db: &DiagnosticBuilder) {
 +    fn emit(&mut self, db: &DiagnosticBuilder<'_>) {
          let mut primary_span = db.span.clone();
          let mut children = db.children.clone();
          let mut suggestions: &[_] = &[];
@@@ -672,8 -674,8 +672,8 @@@ impl EmitterWriter 
          //   | |  something about `foo`
          //   | something about `fn foo()`
          annotations_position.sort_by(|a, b| {
-             // Decreasing order
-             a.1.len().cmp(&b.1.len()).reverse()
+             // Decreasing order. When `a` and `b` are the same length, prefer `Primary`.
+             (a.1.len(), !a.1.is_primary).cmp(&(b.1.len(), !b.1.is_primary)).reverse()
          });
  
          // Write the underlines.
@@@ -1429,7 -1431,7 +1429,7 @@@ fn emit_to_destination(rendered_buffer
                         dst: &mut Destination,
                         short_message: bool)
                         -> io::Result<()> {
 -    use lock;
 +    use crate::lock;
  
      let mut dst = dst.writable();
  
index 49a3e335e34179eec2798c85af5d8b78ec1e12f3,fd4089dfdb9db0f137e70c210360309ba982be87..b248c6bf6565a29d09d89e59a571efbbb20c03db
@@@ -1,9 -1,9 +1,9 @@@
 -use cstore::{self, LoadedMacro};
 -use encoder;
 -use link_args;
 -use native_libs;
 -use foreign_modules;
 -use schema;
 +use crate::cstore::{self, LoadedMacro};
 +use crate::encoder;
 +use crate::link_args;
 +use crate::native_libs;
 +use crate::foreign_modules;
 +use crate::schema;
  
  use rustc::ty::query::QueryConfig;
  use rustc::middle::cstore::{CrateStore, DepKind,
@@@ -29,6 -29,7 +29,7 @@@ use syntax::attr
  use syntax::source_map;
  use syntax::edition::Edition;
  use syntax::parse::source_file_to_stream;
+ use syntax::parse::parser::emit_unclosed_delims;
  use syntax::symbol::Symbol;
  use syntax_pos::{Span, NO_EXPANSION, FileName};
  use rustc_data_structures::bit_set::BitSet;
@@@ -51,7 -52,7 +52,7 @@@ macro_rules! provide 
                      index: CRATE_DEF_INDEX
                  });
                  let dep_node = def_path_hash
 -                    .to_dep_node(::rustc::dep_graph::DepKind::CrateMetadata);
 +                    .to_dep_node(rustc::dep_graph::DepKind::CrateMetadata);
                  // The DepNodeIndex of the DepNode::CrateMetadata should be
                  // cached somewhere, so that we can use read_index().
                  $tcx.dep_graph.read(dep_node);
@@@ -421,7 -422,7 +422,7 @@@ impl cstore::CStore 
              use syntax::ext::base::SyntaxExtension;
              use syntax_ext::proc_macro_impl::BangProcMacro;
  
 -            let client = ::proc_macro::bridge::client::Client::expand1(::proc_macro::quote);
 +            let client = proc_macro::bridge::client::Client::expand1(proc_macro::quote);
              let ext = SyntaxExtension::ProcMacro {
                  expander: Box::new(BangProcMacro { client }),
                  allow_internal_unstable: true,
  
          let source_file = sess.parse_sess.source_map().new_source_file(source_name, def.body);
          let local_span = Span::new(source_file.start_pos, source_file.end_pos, NO_EXPANSION);
-         let body = source_file_to_stream(&sess.parse_sess, source_file, None);
+         let (body, errors) = source_file_to_stream(&sess.parse_sess, source_file, None);
+         emit_unclosed_delims(&errors, &sess.diagnostic());
  
          // Mark the attrs as used
          let attrs = data.get_item_attrs(id.index, sess);
index d71145893c34acf63ebbc54e3ff8addf6155a6f6,0ac3b8f6bd01803c7c9160046449127205889f50..69d6407d506fb0bc17dc89851f731c4d83a613b5
@@@ -35,7 -35,7 +35,7 @@@ use crate::ext::base::DummyResult
  use crate::source_map::{self, SourceMap, Spanned, respan};
  use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
  use crate::parse::{self, SeqSep, classify, token};
- use crate::parse::lexer::TokenAndSpan;
+ use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
  use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
  use crate::parse::token::DelimToken;
  use crate::parse::{new_sub_parser_from_file, ParseSess, Directory, DirectoryOwnership};
@@@ -251,6 -251,11 +251,11 @@@ pub struct Parser<'a> 
      ///
      /// See the comments in the `parse_path_segment` function for more details.
      crate unmatched_angle_bracket_count: u32,
+     crate max_angle_bracket_count: u32,
+     /// List of all unclosed delimiters found by the lexer. If an entry is used for error recovery
+     /// it gets removed from here. Every entry left at the end gets emitted as an independent
+     /// error.
+     crate unclosed_delims: Vec<UnmatchedBrace>,
  }
  
  
@@@ -397,7 -402,6 +402,7 @@@ crate enum TokenType 
      Ident,
      Path,
      Type,
 +    Const,
  }
  
  impl TokenType {
              TokenType::Ident => "identifier".to_string(),
              TokenType::Path => "path".to_string(),
              TokenType::Type => "type".to_string(),
 +            TokenType::Const => "const".to_string(),
          }
      }
  }
@@@ -575,6 -578,8 +580,8 @@@ impl<'a> Parser<'a> 
              desugar_doc_comments,
              cfg_mods: true,
              unmatched_angle_bracket_count: 0,
+             max_angle_bracket_count: 0,
+             unclosed_delims: Vec::new(),
          };
  
          let tok = parser.next_tok();
  
      /// Expect and consume the token t. Signal an error if
      /// the next token is not t.
-     pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  ()> {
+     pub fn expect(&mut self, t: &token::Token) -> PResult<'a,  bool /* recovered */> {
          if self.expected_tokens.is_empty() {
              if self.token == *t {
                  self.bump();
-                 Ok(())
+                 Ok(false)
              } else {
                  let token_str = pprust::token_to_string(t);
                  let this_token_str = self.this_token_descr();
                      self.sess.source_map().next_point(self.prev_span)
                  };
                  let label_exp = format!("expected `{}`", token_str);
+                 match self.recover_closing_delimiter(&[t.clone()], err) {
+                     Err(e) => err = e,
+                     Ok(recovered) => {
+                         return Ok(recovered);
+                     }
+                 }
                  let cm = self.sess.source_map();
                  match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
                      (Ok(ref a), Ok(ref b)) if a.line == b.line => {
          }
      }
  
+     fn recover_closing_delimiter(
+         &mut self,
+         tokens: &[token::Token],
+         mut err: DiagnosticBuilder<'a>,
+     ) -> PResult<'a, bool> {
+         let mut pos = None;
+         // we want to use the last closing delim that would apply
+         for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
+             if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
+                 && Some(self.span) > unmatched.unclosed_span
+             {
+                 pos = Some(i);
+             }
+         }
+         match pos {
+             Some(pos) => {
+                 // Recover and assume that the detected unclosed delimiter was meant for
+                 // this location. Emit the diagnostic and act as if the delimiter was
+                 // present for the parser's sake.
+                  // Don't attempt to recover from this unclosed delimiter more than once.
+                 let unmatched = self.unclosed_delims.remove(pos);
+                 let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
+                  // We want to suggest the inclusion of the closing delimiter where it makes
+                 // the most sense, which is immediately after the last token:
+                 //
+                 //  {foo(bar {}}
+                 //      -      ^
+                 //      |      |
+                 //      |      help: `)` may belong here (FIXME: #58270)
+                 //      |
+                 //      unclosed delimiter
+                 if let Some(sp) = unmatched.unclosed_span {
+                     err.span_label(sp, "unclosed delimiter");
+                 }
+                 err.span_suggestion_short(
+                     self.sess.source_map().next_point(self.prev_span),
+                     &format!("{} may belong here", delim.to_string()),
+                     delim.to_string(),
+                     Applicability::MaybeIncorrect,
+                 );
+                 err.emit();
+                 self.expected_tokens.clear();  // reduce errors
+                 Ok(true)
+             }
+             _ => Err(err),
+         }
+     }
      /// Expect next token to be edible or inedible token.  If edible,
      /// then consume it; if inedible, then return without consuming
      /// anything.  Signal a fatal error if next token is unexpected.
-     pub fn expect_one_of(&mut self,
-                          edible: &[token::Token],
-                          inedible: &[token::Token]) -> PResult<'a,  ()>{
+     pub fn expect_one_of(
+         &mut self,
+         edible: &[token::Token],
+         inedible: &[token::Token],
+     ) -> PResult<'a, bool /* recovered */> {
          fn tokens_to_string(tokens: &[TokenType]) -> String {
              let mut i = tokens.iter();
              // This might be a sign we need a connect method on Iterator.
          }
          if edible.contains(&self.token) {
              self.bump();
-             Ok(())
+             Ok(false)
          } else if inedible.contains(&self.token) {
              // leave it in the input
-             Ok(())
+             Ok(false)
          } else {
              let mut expected = edible.iter()
                  .map(|x| TokenType::Token(x.clone()))
              } else {
                  label_sp
              };
+             match self.recover_closing_delimiter(&expected.iter().filter_map(|tt| match tt {
+                 TokenType::Token(t) => Some(t.clone()),
+                 _ => None,
+             }).collect::<Vec<_>>(), err) {
+                 Err(e) => err = e,
+                 Ok(recovered) => {
+                     return Ok(recovered);
+                 }
+             }
  
              let cm = self.sess.source_map();
              match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
          }
      }
  
 +    fn check_const_arg(&mut self) -> bool {
 +        if self.token.can_begin_const_arg() {
 +            true
 +        } else {
 +            self.expected_tokens.push(TokenType::Const);
 +            false
 +        }
 +    }
 +
      /// Expect and consume a `+`. if `+=` is seen, replace it with a `=`
      /// and continue. If a `+` is not seen, return false.
      ///
      }
  
      /// Attempt to consume a `<`. If `<<` is seen, replace it with a single
 -    /// `<` and continue. If a `<` is not seen, return false.
 +    /// `<` and continue. If `<-` is seen, replace it with a single `<`
 +    /// and continue. If a `<` is not seen, return false.
      ///
      /// This is meant to be used when parsing generics on a path to get the
      /// starting token.
                  self.bump_with(token::Lt, span);
                  true
              }
 +            token::LArrow => {
 +                let span = self.span.with_lo(self.span.lo() + BytePos(1));
 +                self.bump_with(token::BinOp(token::Minus), span);
 +                true
 +            }
              _ => false,
          };
  
          if ate {
              // See doc comment for `unmatched_angle_bracket_count`.
              self.unmatched_angle_bracket_count += 1;
+             self.max_angle_bracket_count += 1;
              debug!("eat_lt: (increment) count={:?}", self.unmatched_angle_bracket_count);
          }
  
          };
  
          match ate {
-             Some(x) => {
+             Some(_) => {
                  // See doc comment for `unmatched_angle_bracket_count`.
                  self.unmatched_angle_bracket_count -= 1;
                  debug!("expect_gt: (decrement) count={:?}", self.unmatched_angle_bracket_count);
  
-                 Ok(x)
+                 Ok(())
              },
              None => self.unexpected(),
          }
                                    -> PResult<'a, Vec<T>> where
          F: FnMut(&mut Parser<'a>) -> PResult<'a,  T>,
      {
-         let val = self.parse_seq_to_before_end(ket, sep, f)?;
-         self.bump();
+         let (val, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+         if !recovered {
+             self.bump();
+         }
          Ok(val)
      }
  
      /// Parse a sequence, not including the closing delimiter. The function
      /// f must consume tokens until reaching the next separator or
      /// closing bracket.
-     pub fn parse_seq_to_before_end<T, F>(&mut self,
-                                          ket: &token::Token,
-                                          sep: SeqSep,
-                                          f: F)
-                                          -> PResult<'a, Vec<T>>
+     pub fn parse_seq_to_before_end<T, F>(
+         &mut self,
+         ket: &token::Token,
+         sep: SeqSep,
+         f: F,
+     ) -> PResult<'a, (Vec<T>, bool)>
          where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
      {
          self.parse_seq_to_before_tokens(&[ket], sep, TokenExpectType::Expect, f)
          sep: SeqSep,
          expect: TokenExpectType,
          mut f: F,
-     ) -> PResult<'a, Vec<T>>
+     ) -> PResult<'a, (Vec<T>, bool /* recovered */)>
          where F: FnMut(&mut Parser<'a>) -> PResult<'a, T>
      {
-         let mut first: bool = true;
+         let mut first = true;
+         let mut recovered = false;
          let mut v = vec![];
          while !kets.iter().any(|k| {
                  match expect {
                  if first {
                      first = false;
                  } else {
-                     if let Err(mut e) = self.expect(t) {
-                         // Attempt to keep parsing if it was a similar separator
-                         if let Some(ref tokens) = t.similar_tokens() {
-                             if tokens.contains(&self.token) {
-                                 self.bump();
-                             }
+                     match self.expect(t) {
+                         Ok(false) => {}
+                         Ok(true) => {
+                             recovered = true;
+                             break;
                          }
-                         e.emit();
-                         // Attempt to keep parsing if it was an omitted separator
-                         match f(self) {
-                             Ok(t) => {
-                                 v.push(t);
-                                 continue;
-                             },
-                             Err(mut e) => {
-                                 e.cancel();
-                                 break;
+                         Err(mut e) => {
+                             // Attempt to keep parsing if it was a similar separator
+                             if let Some(ref tokens) = t.similar_tokens() {
+                                 if tokens.contains(&self.token) {
+                                     self.bump();
+                                 }
+                             }
+                             e.emit();
+                             // Attempt to keep parsing if it was an omitted separator
+                             match f(self) {
+                                 Ok(t) => {
+                                     v.push(t);
+                                     continue;
+                                 },
+                                 Err(mut e) => {
+                                     e.cancel();
+                                     break;
+                                 }
                              }
                          }
                      }
              v.push(t);
          }
  
-         Ok(v)
+         Ok((v, recovered))
      }
  
      /// Parse a sequence, including the closing delimiter. The function
      /// f must consume tokens until reaching the next separator or
      /// closing bracket.
-     fn parse_unspanned_seq<T, F>(&mut self,
-                                      bra: &token::Token,
-                                      ket: &token::Token,
-                                      sep: SeqSep,
-                                      f: F)
-                                      -> PResult<'a, Vec<T>> where
+     fn parse_unspanned_seq<T, F>(
+         &mut self,
+         bra: &token::Token,
+         ket: &token::Token,
+         sep: SeqSep,
+         f: F,
+     ) -> PResult<'a, Vec<T>> where
          F: FnMut(&mut Parser<'a>) -> PResult<'a, T>,
      {
          self.expect(bra)?;
-         let result = self.parse_seq_to_before_end(ket, sep, f)?;
-         self.eat(ket);
+         let (result, recovered) = self.parse_seq_to_before_end(ket, sep, f)?;
+         if !recovered {
+             self.eat(ket);
+         }
          Ok(result)
      }
  
              // We use `style == PathStyle::Expr` to check if this is in a recursion or not. If
              // it isn't, then we reset the unmatched angle bracket count as we're about to start
              // parsing a new path.
-             if style == PathStyle::Expr { self.unmatched_angle_bracket_count = 0; }
+             if style == PathStyle::Expr {
+                 self.unmatched_angle_bracket_count = 0;
+                 self.max_angle_bracket_count = 0;
+             }
  
              let args = if self.eat_lt() {
                  // `<'a, T, A = U>`
              } else {
                  // `(T, U) -> R`
                  self.bump(); // `(`
-                 let inputs = self.parse_seq_to_before_tokens(
+                 let (inputs, recovered) = self.parse_seq_to_before_tokens(
                      &[&token::CloseDelim(token::Paren)],
                      SeqSep::trailing_allowed(token::Comma),
                      TokenExpectType::Expect,
                      |p| p.parse_ty())?;
-                 self.bump(); // `)`
+                 if !recovered {
+                     self.bump(); // `)`
+                 }
                  let span = lo.to(self.prev_span);
                  let output = if self.eat(&token::RArrow) {
                      Some(self.parse_ty_common(false, false)?)
                  // (e,) is a tuple with only one field, e
                  let mut es = vec![];
                  let mut trailing_comma = false;
+                 let mut recovered = false;
                  while self.token != token::CloseDelim(token::Paren) {
                      es.push(self.parse_expr()?);
-                     self.expect_one_of(&[], &[token::Comma, token::CloseDelim(token::Paren)])?;
+                     recovered = self.expect_one_of(
+                         &[],
+                         &[token::Comma, token::CloseDelim(token::Paren)],
+                     )?;
                      if self.eat(&token::Comma) {
                          trailing_comma = true;
                      } else {
                          break;
                      }
                  }
-                 self.bump();
+                 if !recovered {
+                     self.bump();
+                 }
  
                  hi = self.prev_span;
                  ex = if es.len() == 1 && !trailing_comma {
                      hi = pth.span;
                      ex = ExprKind::Path(None, pth);
                  } else {
+                     if !self.unclosed_delims.is_empty() && self.check(&token::Semi) {
+                         // Don't complain about bare semicolons after unclosed braces
+                         // recovery in order to keep the error count down. Fixing the
+                         // delimiters will possibly also fix the bare semicolon found in
+                         // expression context. For example, silence the following error:
+                         // ```
+                         // error: expected expression, found `;`
+                         //  --> file.rs:2:13
+                         //   |
+                         // 2 |     foo(bar(;
+                         //   |             ^ expected expression
+                         // ```
+                         self.bump();
+                         return Ok(self.mk_expr(self.span, ExprKind::Err, ThinVec::new()));
+                     }
                      match self.parse_literal_maybe_minus() {
                          Ok(expr) => {
                              hi = expr.span;
  
              match self.expect_one_of(&[token::Comma],
                                       &[token::CloseDelim(token::Brace)]) {
-                 Ok(()) => if let Some(f) = parsed_field.or(recovery_field) {
+                 Ok(_) => if let Some(f) = parsed_field.or(recovery_field) {
                      // only include the field if there's no parse error for the field name
                      fields.push(f);
                  }
          Ok((ident, TraitItemKind::Type(bounds, default), generics))
      }
  
 +    fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
 +        self.expect_keyword(keywords::Const)?;
 +        let ident = self.parse_ident()?;
 +        self.expect(&token::Colon)?;
 +        let ty = self.parse_ty()?;
 +
 +        Ok(GenericParam {
 +            ident,
 +            id: ast::DUMMY_NODE_ID,
 +            attrs: preceding_attrs.into(),
 +            bounds: Vec::new(),
 +            kind: GenericParamKind::Const {
 +                ty,
 +            }
 +        })
 +    }
 +
      /// Parses (possibly empty) list of lifetime and type parameters, possibly including
      /// trailing comma and erroneous trailing attributes.
      crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
 -        let mut lifetimes = Vec::new();
          let mut params = Vec::new();
 -        let mut seen_ty_param: Option<Span> = None;
 -        let mut last_comma_span = None;
 -        let mut bad_lifetime_pos = vec![];
 -        let mut suggestions = vec![];
          loop {
              let attrs = self.parse_outer_attributes()?;
              if self.check_lifetime() {
                  } else {
                      Vec::new()
                  };
 -                lifetimes.push(ast::GenericParam {
 +                params.push(ast::GenericParam {
                      ident: lifetime.ident,
                      id: lifetime.id,
                      attrs: attrs.into(),
                      bounds,
                      kind: ast::GenericParamKind::Lifetime,
                  });
 -                if let Some(sp) = seen_ty_param {
 -                    let remove_sp = last_comma_span.unwrap_or(self.prev_span).to(self.prev_span);
 -                    bad_lifetime_pos.push(self.prev_span);
 -                    if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.prev_span) {
 -                        suggestions.push((remove_sp, String::new()));
 -                        suggestions.push((
 -                            sp.shrink_to_lo(),
 -                            format!("{}, ", snippet)));
 -                    }
 -                }
 +            } else if self.check_keyword(keywords::Const) {
 +                // Parse const parameter.
 +                params.push(self.parse_const_param(attrs)?);
              } else if self.check_ident() {
                  // Parse type parameter.
                  params.push(self.parse_ty_param(attrs)?);
 -                if seen_ty_param.is_none() {
 -                    seen_ty_param = Some(self.prev_span);
 -                }
              } else {
                  // Check for trailing attributes and stop parsing.
                  if !attrs.is_empty() {
 -                    let param_kind = if seen_ty_param.is_some() { "type" } else { "lifetime" };
 -                    self.struct_span_err(
 -                        attrs[0].span,
 -                        &format!("trailing attribute after {} parameters", param_kind),
 -                    )
 -                    .span_label(attrs[0].span, "attributes must go before parameters")
 -                    .emit();
 +                    if !params.is_empty() {
 +                        self.struct_span_err(
 +                            attrs[0].span,
 +                            &format!("trailing attribute after generic parameter"),
 +                        )
 +                        .span_label(attrs[0].span, "attributes must go before parameters")
 +                        .emit();
 +                    } else {
 +                        self.struct_span_err(
 +                            attrs[0].span,
 +                            &format!("attribute without generic parameters"),
 +                        )
 +                        .span_label(
 +                            attrs[0].span,
 +                            "attributes are only permitted when preceding parameters",
 +                        )
 +                        .emit();
 +                    }
                  }
                  break
              }
              if !self.eat(&token::Comma) {
                  break
              }
 -            last_comma_span = Some(self.prev_span);
 -        }
 -        if !bad_lifetime_pos.is_empty() {
 -            let mut err = self.struct_span_err(
 -                bad_lifetime_pos,
 -                "lifetime parameters must be declared prior to type parameters",
 -            );
 -            if !suggestions.is_empty() {
 -                err.multipart_suggestion(
 -                    "move the lifetime parameter prior to the first type parameter",
 -                    suggestions,
 -                    Applicability::MachineApplicable,
 -                );
 -            }
 -            err.emit();
          }
 -        lifetimes.extend(params);  // ensure the correct order of lifetimes and type params
 -        Ok(lifetimes)
 +        Ok(params)
      }
  
      /// Parse a set of optional generic type parameter declarations. Where
      fn parse_generic_args(&mut self) -> PResult<'a, (Vec<GenericArg>, Vec<TypeBinding>)> {
          let mut args = Vec::new();
          let mut bindings = Vec::new();
 +        let mut misplaced_assoc_ty_bindings: Vec<Span> = Vec::new();
 +        let mut assoc_ty_bindings: Vec<Span> = Vec::new();
  
 -        let mut seen_type = false;
 -        let mut seen_binding = false;
 +        let args_lo = self.span;
  
 -        let mut last_comma_span = None;
 -        let mut first_type_or_binding_span: Option<Span> = None;
 -        let mut first_binding_span: Option<Span> = None;
 -
 -        let mut bad_lifetime_pos = vec![];
 -        let mut bad_type_pos = vec![];
 -
 -        let mut lifetime_suggestions = vec![];
 -        let mut type_suggestions = vec![];
          loop {
              if self.check_lifetime() && self.look_ahead(1, |t| !t.is_like_plus()) {
                  // Parse lifetime argument.
                  args.push(GenericArg::Lifetime(self.expect_lifetime()));
 -
 -                if seen_type || seen_binding {
 -                    let remove_sp = last_comma_span.unwrap_or(self.prev_span).to(self.prev_span);
 -                    bad_lifetime_pos.push(self.prev_span);
 -
 -                    if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.prev_span) {
 -                        lifetime_suggestions.push((remove_sp, String::new()));
 -                        lifetime_suggestions.push((
 -                            first_type_or_binding_span.unwrap().shrink_to_lo(),
 -                            format!("{}, ", snippet)));
 -                    }
 -                }
 +                misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
              } else if self.check_ident() && self.look_ahead(1, |t| t == &token::Eq) {
                  // Parse associated type binding.
                  let lo = self.span;
                      ty,
                      span,
                  });
 -
 -                seen_binding = true;
 -                if first_type_or_binding_span.is_none() {
 -                    first_type_or_binding_span = Some(span);
 -                }
 -                if first_binding_span.is_none() {
 -                    first_binding_span = Some(span);
 -                }
 +                assoc_ty_bindings.push(span);
 +            } else if self.check_const_arg() {
 +                // FIXME(const_generics): to distinguish between idents for types and consts,
 +                // we should introduce a GenericArg::Ident in the AST and distinguish when
 +                // lowering to the HIR. For now, idents for const args are not permitted.
 +
 +                // Parse const argument.
 +                let expr = if let token::OpenDelim(token::Brace) = self.token {
 +                    self.parse_block_expr(None, self.span, BlockCheckMode::Default, ThinVec::new())?
 +                } else if self.token.is_ident() {
 +                    // FIXME(const_generics): to distinguish between idents for types and consts,
 +                    // we should introduce a GenericArg::Ident in the AST and distinguish when
 +                    // lowering to the HIR. For now, idents for const args are not permitted.
 +                    return Err(
 +                        self.fatal("identifiers may currently not be used for const generics")
 +                    );
 +                } else {
 +                    // FIXME(const_generics): this currently conflicts with emplacement syntax
 +                    // with negative integer literals.
 +                    self.parse_literal_maybe_minus()?
 +                };
 +                let value = AnonConst {
 +                    id: ast::DUMMY_NODE_ID,
 +                    value: expr,
 +                };
 +                args.push(GenericArg::Const(value));
 +                misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
              } else if self.check_type() {
                  // Parse type argument.
 -                let ty_param = self.parse_ty()?;
 -                if seen_binding {
 -                    let remove_sp = last_comma_span.unwrap_or(self.prev_span).to(self.prev_span);
 -                    bad_type_pos.push(self.prev_span);
 -
 -                    if let Ok(snippet) = self.sess.source_map().span_to_snippet(self.prev_span) {
 -                        type_suggestions.push((remove_sp, String::new()));
 -                        type_suggestions.push((
 -                            first_binding_span.unwrap().shrink_to_lo(),
 -                            format!("{}, ", snippet)));
 -                    }
 -                }
 -
 -                if first_type_or_binding_span.is_none() {
 -                    first_type_or_binding_span = Some(ty_param.span);
 -                }
 -                args.push(GenericArg::Type(ty_param));
 -                seen_type = true;
 +                args.push(GenericArg::Type(self.parse_ty()?));
 +                misplaced_assoc_ty_bindings.append(&mut assoc_ty_bindings);
              } else {
                  break
              }
  
              if !self.eat(&token::Comma) {
                  break
 -            } else {
 -                last_comma_span = Some(self.prev_span);
 -            }
 -        }
 -
 -        self.maybe_report_incorrect_generic_argument_order(
 -            bad_lifetime_pos, bad_type_pos, lifetime_suggestions, type_suggestions
 -        );
 -
 -        Ok((args, bindings))
 -    }
 -
 -    /// Maybe report an error about incorrect generic argument order - "lifetime parameters
 -    /// must be declared before type parameters", "type parameters must be declared before
 -    /// associated type bindings" or both.
 -    fn maybe_report_incorrect_generic_argument_order(
 -        &self,
 -        bad_lifetime_pos: Vec<Span>,
 -        bad_type_pos: Vec<Span>,
 -        lifetime_suggestions: Vec<(Span, String)>,
 -        type_suggestions: Vec<(Span, String)>,
 -    ) {
 -        let mut err = if !bad_lifetime_pos.is_empty() && !bad_type_pos.is_empty() {
 -            let mut positions = bad_lifetime_pos.clone();
 -            positions.extend_from_slice(&bad_type_pos);
 -
 -            self.struct_span_err(
 -                positions,
 -                "generic arguments must declare lifetimes, types and associated type bindings in \
 -                 that order",
 -            )
 -        } else if !bad_lifetime_pos.is_empty() {
 -            self.struct_span_err(
 -                bad_lifetime_pos.clone(),
 -                "lifetime parameters must be declared prior to type parameters"
 -            )
 -        } else if !bad_type_pos.is_empty() {
 -            self.struct_span_err(
 -                bad_type_pos.clone(),
 -                "type parameters must be declared prior to associated type bindings"
 -            )
 -        } else {
 -            return;
 -        };
 -
 -        if !bad_lifetime_pos.is_empty() {
 -            for sp in &bad_lifetime_pos {
 -                err.span_label(*sp, "must be declared prior to type parameters");
 -            }
 -        }
 -
 -        if !bad_type_pos.is_empty() {
 -            for sp in &bad_type_pos {
 -                err.span_label(*sp, "must be declared prior to associated type bindings");
              }
          }
  
 -        if !lifetime_suggestions.is_empty() && !type_suggestions.is_empty() {
 -            let mut suggestions = lifetime_suggestions;
 -            suggestions.extend_from_slice(&type_suggestions);
 -
 -            let plural = bad_lifetime_pos.len() + bad_type_pos.len() > 1;
 -            err.multipart_suggestion(
 -                &format!(
 -                    "move the parameter{}",
 -                    if plural { "s" } else { "" },
 -                ),
 -                suggestions,
 -                Applicability::MachineApplicable,
 -            );
 -        } else if !lifetime_suggestions.is_empty() {
 -            err.multipart_suggestion(
 -                &format!(
 -                    "move the lifetime parameter{} prior to the first type parameter",
 -                    if bad_lifetime_pos.len() > 1 { "s" } else { "" },
 -                ),
 -                lifetime_suggestions,
 -                Applicability::MachineApplicable,
 -            );
 -        } else if !type_suggestions.is_empty() {
 -            err.multipart_suggestion(
 -                &format!(
 -                    "move the type parameter{} prior to the first associated type binding",
 -                    if bad_type_pos.len() > 1 { "s" } else { "" },
 -                ),
 -                type_suggestions,
 -                Applicability::MachineApplicable,
 +        // FIXME: we would like to report this in ast_validation instead, but we currently do not
 +        // preserve ordering of generic parameters with respect to associated type binding, so we
 +        // lose that information after parsing.
 +        if misplaced_assoc_ty_bindings.len() > 0 {
 +            let mut err = self.struct_span_err(
 +                args_lo.to(self.prev_span),
 +                "associated type bindings must be declared after generic parameters",
              );
 +            for span in misplaced_assoc_ty_bindings {
 +                err.span_label(
 +                    span,
 +                    "this associated type binding should be moved after the generic parameters",
 +                );
 +            }
 +            err.emit();
          }
  
 -        err.emit();
 +        Ok((args, bindings))
      }
  
      /// Parses an optional `where` clause and places it in `generics`.
  
          let sp = self.span;
          let mut variadic = false;
-         let args: Vec<Option<Arg>> =
+         let (args, recovered): (Vec<Option<Arg>>, bool) =
              self.parse_seq_to_before_end(
                  &token::CloseDelim(token::Paren),
                  SeqSep::trailing_allowed(token::Comma),
                  }
              )?;
  
-         self.eat(&token::CloseDelim(token::Paren));
+         if !recovered {
+             self.eat(&token::CloseDelim(token::Paren));
+         }
  
          let args: Vec<_> = args.into_iter().filter_map(|x| x).collect();
  
  
          // Parse the rest of the function parameter list.
          let sep = SeqSep::trailing_allowed(token::Comma);
-         let fn_inputs = if let Some(self_arg) = self_arg {
+         let (fn_inputs, recovered) = if let Some(self_arg) = self_arg {
              if self.check(&token::CloseDelim(token::Paren)) {
-                 vec![self_arg]
+                 (vec![self_arg], false)
              } else if self.eat(&token::Comma) {
                  let mut fn_inputs = vec![self_arg];
-                 fn_inputs.append(&mut self.parse_seq_to_before_end(
-                     &token::CloseDelim(token::Paren), sep, parse_arg_fn)?
-                 );
-                 fn_inputs
+                 let (mut input, recovered) = self.parse_seq_to_before_end(
+                     &token::CloseDelim(token::Paren), sep, parse_arg_fn)?;
+                 fn_inputs.append(&mut input);
+                 (fn_inputs, recovered)
              } else {
                  return self.unexpected();
              }
              self.parse_seq_to_before_end(&token::CloseDelim(token::Paren), sep, parse_arg_fn)?
          };
  
-         // Parse closing paren and return type.
-         self.expect(&token::CloseDelim(token::Paren))?;
+         if !recovered {
+             // Parse closing paren and return type.
+             self.expect(&token::CloseDelim(token::Paren))?;
+         }
          Ok(P(FnDecl {
              inputs: fn_inputs,
              output: self.parse_ret_ty(true)?,
                      SeqSep::trailing_allowed(token::Comma),
                      TokenExpectType::NoExpect,
                      |p| p.parse_fn_block_arg()
-                 )?;
+                 )?.0;
                  self.expect_or()?;
                  args
              }
          //     `<` (LIFETIME|IDENT) `,` - first generic parameter in a list
          //     `<` (LIFETIME|IDENT) `:` - generic parameter with bounds
          //     `<` (LIFETIME|IDENT) `=` - generic parameter with a default
 +        //     `<` const                - generic const parameter
          // The only truly ambiguous case is
          //     `<` IDENT `>` `::` IDENT ...
          // we disambiguate it in favor of generics (`impl<T> ::absolute::Path<T> { ... }`)
              (self.look_ahead(1, |t| t == &token::Pound || t == &token::Gt) ||
               self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
                  self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
 -                                       t == &token::Colon || t == &token::Eq))
 +                                       t == &token::Colon || t == &token::Eq) ||
 +             self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
      }
  
      fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
              // eat a matched-delimiter token tree:
              let (delim, tts) = self.expect_delimited_token_tree()?;
              if delim != MacDelimiter::Brace {
-                 self.expect(&token::Semi)?
+                 self.expect(&token::Semi)?;
              }
  
              Ok(Some(respan(lo.to(self.prev_span), Mac_ { path: pth, tts, delim })))
      /// entry point for the parser.
      pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
          let lo = self.span;
-         Ok(ast::Crate {
+         let krate = Ok(ast::Crate {
              attrs: self.parse_inner_attributes()?,
              module: self.parse_mod_items(&token::Eof, lo)?,
              span: lo.to(self.span),
-         })
+         });
+         emit_unclosed_delims(&self.unclosed_delims, self.diagnostic());
+         self.unclosed_delims.clear();
+         krate
      }
  
      pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
          }
      }
  }
+ pub fn emit_unclosed_delims(unclosed_delims: &[UnmatchedBrace], handler: &errors::Handler) {
+     for unmatched in unclosed_delims {
+         let mut err = handler.struct_span_err(unmatched.found_span, &format!(
+             "incorrect close delimiter: `{}`",
+             pprust::token_to_string(&token::Token::CloseDelim(unmatched.found_delim)),
+         ));
+         err.span_label(unmatched.found_span, "incorrect close delimiter");
+         if let Some(sp) = unmatched.candidate_span {
+             err.span_label(sp, "close delimiter possibly meant for this");
+         }
+         if let Some(sp) = unmatched.unclosed_span {
+             err.span_label(sp, "un-closed delimiter");
+         }
+         err.emit();
+     }
+ }
index d5856c67156c06635901d518c5431af836169b38,d144223d1b8ebe446c7ffde04cb70a0c87a310a4..09924e304cfd9c7c5317dd948caefe62b219af6d
@@@ -10,6 -10,7 +10,7 @@@ use crate::print::pprust
  use crate::ptr::P;
  use crate::symbol::keywords;
  use crate::syntax::parse::parse_stream_from_source_str;
+ use crate::syntax::parse::parser::emit_unclosed_delims;
  use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
  
  use serialize::{Decodable, Decoder, Encodable, Encoder};
@@@ -279,20 -280,6 +280,20 @@@ impl Token 
          }
      }
  
 +    /// Returns `true` if the token can appear at the start of a const param.
 +    pub fn can_begin_const_arg(&self) -> bool {
 +        match self {
 +            OpenDelim(Brace) => true,
 +            Interpolated(ref nt) => match nt.0 {
 +                NtExpr(..) => true,
 +                NtBlock(..) => true,
 +                NtLiteral(..) => true,
 +                _ => false,
 +            }
 +            _ => self.can_begin_literal_or_bool(),
 +        }
 +    }
 +
      /// Returns `true` if the token can appear at the start of a generic bound.
      crate fn can_begin_bound(&self) -> bool {
          self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
          }
      }
  
 -    /// Returns `true` if the token is any literal, a minus (which can follow a literal,
 +    /// Returns `true` if the token is any literal, a minus (which can prefix a literal,
      /// for example a '-42', or one of the boolean idents).
      crate fn can_begin_literal_or_bool(&self) -> bool {
          match *self {
      /// Enables better error recovery when the wrong token is found.
      crate fn similar_tokens(&self) -> Option<Vec<Token>> {
          match *self {
-             Comma => Some(vec![Dot, Lt]),
-             Semi => Some(vec![Colon]),
+             Comma => Some(vec![Dot, Lt, Semi]),
+             Semi => Some(vec![Colon, Comma]),
              _ => None
          }
      }
              // FIXME(#43081): Avoid this pretty-print + reparse hack
              let source = pprust::token_to_string(self);
              let filename = FileName::macro_expansion_source_code(&source);
-             parse_stream_from_source_str(filename, source, sess, Some(span))
+             let (tokens, errors) = parse_stream_from_source_str(
+                 filename, source, sess, Some(span));
+             emit_unclosed_delims(&errors, &sess.span_diagnostic);
+             tokens
          });
  
          // During early phases of the compiler the AST could get modified
@@@ -800,12 -790,13 +804,13 @@@ fn prepend_attrs(sess: &ParseSess
          let source = pprust::attr_to_string(attr);
          let macro_filename = FileName::macro_expansion_source_code(&source);
          if attr.is_sugared_doc {
-             let stream = parse_stream_from_source_str(
+             let (stream, errors) = parse_stream_from_source_str(
                  macro_filename,
                  source,
                  sess,
                  Some(span),
              );
+             emit_unclosed_delims(&errors, &sess.span_diagnostic);
              builder.push(stream);
              continue
          }
          // ... and for more complicated paths, fall back to a reparse hack that
          // should eventually be removed.
          } else {
-             let stream = parse_stream_from_source_str(
+             let (stream, errors) = parse_stream_from_source_str(
                  macro_filename,
                  source,
                  sess,
                  Some(span),
              );
+             emit_unclosed_delims(&errors, &sess.span_diagnostic);
              brackets.push(stream);
          }
  
index 730262683c0b7259b2496c68d8d2676f8718ccea,38d12db13ef7fe095da6eddfe627adc156fdb7a5..2158cfc089bdd1cbcd9785a7b0b678581b4888a5
@@@ -1,5 -1,4 +1,5 @@@
 -use errors::{self, Diagnostic, DiagnosticBuilder};
 +use crate::errors::{self, Diagnostic, DiagnosticBuilder};
 +
  use std::panic;
  
  use proc_macro::bridge::{server, TokenTree};
@@@ -12,6 -11,7 +12,7 @@@ use syntax::ast
  use syntax::ext::base::ExtCtxt;
  use syntax::parse::lexer::comments;
  use syntax::parse::{self, token, ParseSess};
+ use syntax::parse::parser::emit_unclosed_delims;
  use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
  use syntax_pos::hygiene::{SyntaxContext, Transparency};
  use syntax_pos::symbol::{keywords, Symbol};
@@@ -370,7 -370,7 +371,7 @@@ pub(crate) struct Rustc<'a> 
  }
  
  impl<'a> Rustc<'a> {
 -    pub fn new(cx: &'a ExtCtxt) -> Self {
 +    pub fn new(cx: &'a ExtCtxt<'_>) -> Self {
          // No way to determine def location for a proc macro right now, so use call location.
          let location = cx.current_expansion.mark.expn_info().unwrap().call_site;
          let to_span = |transparency| {
@@@ -409,12 -409,14 +410,14 @@@ impl server::TokenStream for Rustc<'_> 
          stream.is_empty()
      }
      fn from_str(&mut self, src: &str) -> Self::TokenStream {
-         parse::parse_stream_from_source_str(
+         let (tokens, errors) = parse::parse_stream_from_source_str(
              FileName::proc_macro_source_code(src.clone()),
              src.to_string(),
              self.sess,
              Some(self.call_site),
-         )
+         );
+         emit_unclosed_delims(&errors, &self.sess.span_diagnostic);
+         tokens
      }
      fn to_string(&mut self, stream: &Self::TokenStream) -> String {
          stream.to_string()
@@@ -651,7 -653,7 +654,7 @@@ impl server::Literal for Rustc<'_> 
      }
  }
  
 -impl<'a> server::SourceFile for Rustc<'a> {
 +impl server::SourceFile for Rustc<'_> {
      fn eq(&mut self, file1: &Self::SourceFile, file2: &Self::SourceFile) -> bool {
          Lrc::ptr_eq(file1, file2)
      }