use crate::parse::PResult;
use crate::ThinVec;
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use crate::symbol::{keywords, sym, Symbol};
+use crate::symbol::{kw, sym, Symbol};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
use rustc_target::spec::abi::{self, Abi};
}
#[derive(Clone, Copy, PartialEq, Debug)]
-enum SemiColonMode {
+crate enum SemiColonMode {
Break,
Ignore,
Comma,
}
#[derive(Clone, Copy, PartialEq, Debug)]
-enum BlockMode {
+crate enum BlockMode {
Break,
Ignore,
}
let body = TokenTree::Delimited(
delim_span,
token::Bracket,
- [TokenTree::Token(sp, token::Ident(ast::Ident::from_str("doc"), false)),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::Literal(
- token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+ [
+ TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Token::lit(
+ token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
+ )),
]
.iter().cloned().collect::<TokenStream>().into(),
);
#[derive(Clone, PartialEq)]
crate enum TokenType {
Token(token::Token),
- Keyword(keywords::Keyword),
+ Keyword(Symbol),
Operator,
Lifetime,
Ident,
}
impl TokenType {
- fn to_string(&self) -> String {
+ crate fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
- TokenType::Keyword(kw) => format!("`{}`", kw.name()),
+ TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
/// Creates a placeholder argument.
fn dummy_arg(span: Span) -> Arg {
- let ident = Ident::new(keywords::Invalid.name(), span);
+ let ident = Ident::new(kw::Invalid, span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
}
}
- fn recover_closing_delimiter(
- &mut self,
- tokens: &[token::Token],
- mut err: DiagnosticBuilder<'a>,
- ) -> PResult<'a, bool> {
- let mut pos = None;
- // we want to use the last closing delim that would apply
- for (i, unmatched) in self.unclosed_delims.iter().enumerate().rev() {
- if tokens.contains(&token::CloseDelim(unmatched.expected_delim))
- && Some(self.span) > unmatched.unclosed_span
- {
- pos = Some(i);
- }
- }
- match pos {
- Some(pos) => {
- // Recover and assume that the detected unclosed delimiter was meant for
- // this location. Emit the diagnostic and act as if the delimiter was
- // present for the parser's sake.
-
- // Don't attempt to recover from this unclosed delimiter more than once.
- let unmatched = self.unclosed_delims.remove(pos);
- let delim = TokenType::Token(token::CloseDelim(unmatched.expected_delim));
-
- // We want to suggest the inclusion of the closing delimiter where it makes
- // the most sense, which is immediately after the last token:
- //
- // {foo(bar {}}
- // - ^
- // | |
- // | help: `)` may belong here (FIXME: #58270)
- // |
- // unclosed delimiter
- if let Some(sp) = unmatched.unclosed_span {
- err.span_label(sp, "unclosed delimiter");
- }
- err.span_suggestion_short(
- self.sess.source_map().next_point(self.prev_span),
- &format!("{} may belong here", delim.to_string()),
- delim.to_string(),
- Applicability::MaybeIncorrect,
- );
- err.emit();
- self.expected_tokens.clear(); // reduce errors
- Ok(true)
- }
- _ => Err(err),
- }
- }
-
/// Expect next token to be edible or inedible token. If edible,
/// then consume it; if inedible, then return without consuming
/// anything. Signal a fatal error if next token is unexpected.
TokenType::Token(token::Semi) => true, // we expect a `;` here
_ => false,
}) && ( // a `;` would be expected before the current keyword
- self.token.is_keyword(keywords::Break) ||
- self.token.is_keyword(keywords::Continue) ||
- self.token.is_keyword(keywords::For) ||
- self.token.is_keyword(keywords::If) ||
- self.token.is_keyword(keywords::Let) ||
- self.token.is_keyword(keywords::Loop) ||
- self.token.is_keyword(keywords::Match) ||
- self.token.is_keyword(keywords::Return) ||
- self.token.is_keyword(keywords::While)
+ self.token.is_keyword(kw::Break) ||
+ self.token.is_keyword(kw::Continue) ||
+ self.token.is_keyword(kw::For) ||
+ self.token.is_keyword(kw::If) ||
+ self.token.is_keyword(kw::Let) ||
+ self.token.is_keyword(kw::Loop) ||
+ self.token.is_keyword(kw::Match) ||
+ self.token.is_keyword(kw::Return) ||
+ self.token.is_keyword(kw::While)
);
let cm = self.sess.source_map();
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
is_present
}
- fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
/// If the next token is the given keyword, eats it and returns
/// `true`. Otherwise, returns `false`.
- pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
true
}
}
- fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
+ fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
true
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
- fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
+ fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) {
self.unexpected()
} else {
}
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
- literal::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
+ literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
}
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
/// Is the current token one of the keywords that signals a bare function type?
fn token_is_bare_fn_keyword(&mut self) -> bool {
- self.check_keyword(keywords::Fn) ||
- self.check_keyword(keywords::Unsafe) ||
- self.check_keyword(keywords::Extern)
+ self.check_keyword(kw::Fn) ||
+ self.check_keyword(kw::Unsafe) ||
+ self.check_keyword(kw::Extern)
}
/// Parses a `TyKind::BareFn` type.
*/
let unsafety = self.parse_unsafety();
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
let ret_ty = self.parse_ret_ty(false)?;
let decl = P(FnDecl {
/// Parses asyncness: `async` or nothing.
fn parse_asyncness(&mut self) -> IsAsync {
- if self.eat_keyword(keywords::Async) {
+ if self.eat_keyword(kw::Async) {
IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafety {
- if self.eat_keyword(keywords::Unsafe) {
+ if self.eat_keyword(kw::Unsafe) {
Unsafety::Unsafe
} else {
Unsafety::Normal
mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
let lo = self.span;
self.eat_bad_pub();
- let (name, node, generics) = if self.eat_keyword(keywords::Type) {
+ let (name, node, generics) = if self.eat_keyword(kw::Type) {
self.parse_trait_item_assoc_ty()?
} else if self.is_const_item() {
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
} else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
// trait item macro.
- (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
+ (Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
} else {
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
- } else if self.eat_keyword_noexpect(keywords::Typeof) {
+ } else if self.eat_keyword_noexpect(kw::Typeof) {
// `typeof(EXPR)`
// In order to not be ambiguous, the type must be surrounded by parens.
self.expect(&token::OpenDelim(token::Paren))?;
};
self.expect(&token::CloseDelim(token::Paren))?;
TyKind::Typeof(e)
- } else if self.eat_keyword(keywords::Underscore) {
+ } else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
} else if self.token_is_bare_fn_keyword() {
// Function pointer type
self.parse_ty_bare_fn(Vec::new())?
- } else if self.check_keyword(keywords::For) {
+ } else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let parse_plus = allow_plus && self.check_plus();
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
}
- } else if self.eat_keyword(keywords::Impl) {
+ } else if self.eat_keyword(kw::Impl) {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
- } else if self.check_keyword(keywords::Dyn) &&
+ } else if self.check_keyword(kw::Dyn) &&
(self.span.rust_2018() ||
self.look_ahead(1, |t| t.can_begin_bound() &&
!can_continue_type_after_non_fn_ident(t))) {
}
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
- let mutbl = if self.eat_keyword(keywords::Mut) {
+ let mutbl = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
- } else if self.eat_keyword(keywords::Const) {
+ } else if self.eat_keyword(kw::Const) {
Mutability::Immutable
} else {
let span = self.prev_span;
_ => 0,
}
token::BinOp(token::And) | token::AndAnd => 1,
- _ if self.token.is_keyword(keywords::Mut) => 1,
+ _ if self.token.is_keyword(kw::Mut) => 1,
_ => 0,
};
}
match ty {
Ok(ty) => {
- let ident = Ident::new(keywords::Invalid.name(), self.prev_span);
+ let ident = Ident::new(kw::Invalid, self.prev_span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
- token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
+ token::Ident(ident, false) if ident.name == kw::Underscore => {
let span = self.span;
self.bump();
Ok(Ident::new(ident.name, span))
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
- if self.eat_keyword(keywords::As) {
+ if self.eat_keyword(kw::As) {
let path_lo = self.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_span);
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
- if self.eat_keyword(keywords::Mut) {
+ if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
- if let token::Literal(token::Integer(name), suffix) = self.token {
+ if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
self.expect_no_suffix(self.span, "a tuple index", suffix);
self.bump();
- Ok(Ident::new(name, self.prev_span))
+ Ok(Ident::new(symbol, self.prev_span))
} else {
self.parse_ident_common(false)
}
})
}
- fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
+ crate fn mk_expr(&self, span: Span, node: ExprKind, attrs: ThinVec<Attribute>) -> P<Expr> {
P(Expr { node, span, attrs, id: ast::DUMMY_NODE_ID })
}
hi = path.span;
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
}
- if self.span.rust_2018() && self.check_keyword(keywords::Async) {
+ if self.span.rust_2018() && self.check_keyword(kw::Async) {
return if self.is_async_block() { // check for `async {` and `async move {`
self.parse_async_block(attrs)
} else {
self.parse_lambda_expr(attrs)
};
}
- if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
+ if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
return self.parse_lambda_expr(attrs);
}
- if self.eat_keyword(keywords::If) {
+ if self.eat_keyword(kw::If) {
return self.parse_if_expr(attrs);
}
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
let lo = self.prev_span;
return self.parse_for_expr(None, lo, attrs);
}
- if self.eat_keyword(keywords::While) {
+ if self.eat_keyword(kw::While) {
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
if let Some(label) = self.eat_label() {
let lo = label.ident.span;
self.expect(&token::Colon)?;
- if self.eat_keyword(keywords::While) {
+ if self.eat_keyword(kw::While) {
return self.parse_while_expr(Some(label), lo, attrs)
}
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
return self.parse_for_expr(Some(label), lo, attrs)
}
- if self.eat_keyword(keywords::Loop) {
+ if self.eat_keyword(kw::Loop) {
return self.parse_loop_expr(Some(label), lo, attrs)
}
if self.token == token::OpenDelim(token::Brace) {
err.span_label(self.span, msg);
return Err(err);
}
- if self.eat_keyword(keywords::Loop) {
+ if self.eat_keyword(kw::Loop) {
let lo = self.prev_span;
return self.parse_loop_expr(None, lo, attrs);
}
- if self.eat_keyword(keywords::Continue) {
+ if self.eat_keyword(kw::Continue) {
let label = self.eat_label();
let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
- if self.eat_keyword(keywords::Match) {
+ if self.eat_keyword(kw::Match) {
let match_sp = self.prev_span;
return self.parse_match_expr(attrs).map_err(|mut err| {
err.span_label(match_sp, "while parsing this match expression");
err
});
}
- if self.eat_keyword(keywords::Unsafe) {
+ if self.eat_keyword(kw::Unsafe) {
return self.parse_block_expr(
None,
lo,
}
if self.is_try_block() {
let lo = self.span;
- assert!(self.eat_keyword(keywords::Try));
+ assert!(self.eat_keyword(kw::Try));
return self.parse_try_block(lo, attrs);
}
- if self.eat_keyword(keywords::Return) {
+ if self.eat_keyword(kw::Return) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
} else {
ex = ExprKind::Ret(None);
}
- } else if self.eat_keyword(keywords::Break) {
+ } else if self.eat_keyword(kw::Break) {
let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
};
ex = ExprKind::Break(label, e);
hi = self.prev_span;
- } else if self.eat_keyword(keywords::Yield) {
+ } else if self.eat_keyword(kw::Yield) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
} else {
ex = ExprKind::Yield(None);
}
- } else if self.token.is_keyword(keywords::Let) {
+ } else if self.token.is_keyword(kw::Let) {
// Catch this syntax error here, instead of in `parse_ident`, so
// that we can explicitly mention that let is not to be used as an expression
let mut db = self.fatal("expected expression, found statement (`let`)");
db.span_label(self.span, "expected expression");
db.note("variable declaration using `let` is a statement");
return Err(db);
- } else if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
- let await_sp = self.prev_span;
- let e = self.parse_async_macro_or_stmt(lo, await_sp)?;
- hi = e.0;
- ex = e.1;
+ } else if self.span.rust_2018() && self.eat_keyword(kw::Await) {
+ let (await_hi, e_kind) = self.parse_await_macro_or_alt(lo, self.prev_span)?;
+ hi = await_hi;
+ ex = e_kind;
} else if self.token.is_path_start() {
let path = self.parse_path(PathStyle::Expr)?;
self.maybe_recover_from_bad_qpath(expr, true)
}
- fn parse_async_macro_or_stmt(
+ /// Parse `await!(<expr>)` calls, or alternatively recover from incorrect but reasonable
+ /// alternative syntaxes `await <expr>`, `await? <expr>`, `await(<expr>)` and
+ /// `await { <expr> }`.
+ fn parse_await_macro_or_alt(
&mut self,
lo: Span,
await_sp: Span,
) -> PResult<'a, (Span, ExprKind)> {
- Ok(match self.token {
- token::Not => {
- // Handle correct `await!(<expr>)`
- // FIXME: make this an error when `await!` is no longer supported
- // https://github.com/rust-lang/rust/issues/60610
- self.expect(&token::Not)?;
- self.expect(&token::OpenDelim(token::Paren))?;
- let expr = self.parse_expr().map_err(|mut err| {
- err.span_label(
- await_sp,
- "while parsing this await macro call",
- );
- err
- })?;
- self.expect(&token::CloseDelim(token::Paren))?;
- (expr.span, ExprKind::Await(ast::AwaitOrigin::MacroLike, expr))
- }
- token::Question => {
- // Handle `await? <expr>`
- self.bump(); // `?`
- let expr = self.parse_expr().map_err(|mut err| {
- err.span_label(
- await_sp,
- "while parsing this incorrect await statement",
- );
- err
- })?;
- let sp = lo.to(expr.span);
- let expr_str = self.sess.source_map().span_to_snippet(expr.span)
- .unwrap_or_else(|_| pprust::expr_to_string(&expr));
- let expr = self.mk_expr(
- sp,
- ExprKind::Await(ast::AwaitOrigin::FieldLike, expr),
- ThinVec::new(),
- );
- let mut err = self.struct_span_err(
- await_sp,
- "incorrect use of `await`",
- );
- err.span_suggestion(
- sp,
- "`await` is not a statement",
- format!("{}.await?", expr_str),
- Applicability::MachineApplicable,
- );
- err.emit();
- (sp, ExprKind::Try(expr))
- }
- ref t => {
- // Handle `await <expr>`
- let expr = if t == &token::OpenDelim(token::Brace) {
- // Handle `await { <expr> }`
- // this needs to be handled separatedly from the next arm to avoid
- // interpreting `await { <expr> }?` as `<expr>?.await`
- self.parse_block_expr(
- None,
- self.span,
- BlockCheckMode::Default,
- ThinVec::new(),
- )
- } else {
- self.parse_expr()
- }.map_err(|mut err| {
- err.span_label(
- await_sp,
- "while parsing this incorrect await statement",
- );
- err
- })?;
- let expr_str = self.sess.source_map().span_to_snippet(expr.span)
- .unwrap_or_else(|_| pprust::expr_to_string(&expr));
- let sp = lo.to(expr.span);
- let mut err = self.struct_span_err(
- await_sp,
- "incorrect use of `await`",
- );
- err.span_suggestion(
- sp,
- "`await` is not a statement",
- format!("{}.await", expr_str),
- Applicability::MachineApplicable,
- );
- err.emit();
- (sp, ExprKind::Await(ast::AwaitOrigin::FieldLike, expr))
- }
- })
+ if self.token == token::Not {
+ // Handle correct `await!(<expr>)`.
+ // FIXME: make this an error when `await!` is no longer supported
+ // https://github.com/rust-lang/rust/issues/60610
+ self.expect(&token::Not)?;
+ self.expect(&token::OpenDelim(token::Paren))?;
+ let expr = self.parse_expr().map_err(|mut err| {
+ err.span_label(await_sp, "while parsing this await macro call");
+ err
+ })?;
+ self.expect(&token::CloseDelim(token::Paren))?;
+ Ok((self.prev_span, ExprKind::Await(ast::AwaitOrigin::MacroLike, expr)))
+ } else { // Handle `await <expr>`.
+ self.parse_incorrect_await_syntax(lo, await_sp)
+ }
}
fn maybe_parse_struct_expr(
}
/// Parses a block or unsafe block.
- fn parse_block_expr(&mut self, opt_label: Option<Label>,
- lo: Span, blk_mode: BlockCheckMode,
- outer_attrs: ThinVec<Attribute>)
- -> PResult<'a, P<Expr>> {
+ crate fn parse_block_expr(
+ &mut self,
+ opt_label: Option<Label>,
+ lo: Span,
+ blk_mode: BlockCheckMode,
+ outer_attrs: ThinVec<Attribute>,
+ ) -> PResult<'a, P<Expr>> {
self.expect(&token::OpenDelim(token::Brace))?;
let mut attrs = outer_attrs;
// Assuming we have just parsed `.`, continue parsing into an expression.
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
- if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
+ if self.span.rust_2018() && self.eat_keyword(kw::Await) {
let span = lo.to(self.prev_span);
let await_expr = self.mk_expr(
span,
ExprKind::Await(ast::AwaitOrigin::FieldLike, self_arg),
ThinVec::new(),
);
- if self.token == token::OpenDelim(token::Paren) &&
- self.look_ahead(1, |t| t == &token::CloseDelim(token::Paren))
- {
- // future.await()
- let lo = self.span;
- self.bump(); // (
- let sp = lo.to(self.span);
- self.bump(); // )
- let mut err = self.struct_span_err(span, "incorrect use of `await`");
- err.span_suggestion(
- sp,
- "`await` is not a method call, remove the parentheses",
- String::new(),
- Applicability::MachineApplicable,
- );
- err.emit()
- }
+ self.recover_from_await_method_call();
return Ok(await_expr);
}
let segment = self.parse_path_segment(PathStyle::Expr)?;
token::Ident(..) => {
e = self.parse_dot_suffix(e, lo)?;
}
- token::Literal(token::Integer(name), suffix) => {
+ token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
let span = self.span;
self.bump();
- let field = ExprKind::Field(e, Ident::new(name, span));
+ let field = ExprKind::Field(e, Ident::new(symbol, span));
e = self.mk_expr(lo.to(span), field, ThinVec::new());
self.expect_no_suffix(span, "a tuple index", suffix);
}
- token::Literal(token::Float(n), _suf) => {
+ token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
self.bump();
- let fstr = n.as_str();
- let mut err = self.diagnostic()
- .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+ let fstr = symbol.as_str();
+ let msg = format!("unexpected token: `{}`", symbol);
+ let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
err.span_label(self.prev_span, "unexpected token");
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>().ok() {
return Ok(e);
}
- fn recover_seq_parse_error(
- &mut self,
- delim: token::DelimToken,
- lo: Span,
- result: PResult<'a, P<Expr>>,
- ) -> P<Expr> {
- match result {
- Ok(x) => x,
- Err(mut err) => {
- err.emit();
- // recover from parse error
- self.consume_block(delim);
- self.mk_expr(lo.to(self.prev_span), ExprKind::Err, ThinVec::new())
- }
- }
- }
-
crate fn process_potential_macro_variable(&mut self) {
let (token, span) = match self.token {
token::Dollar if self.span.ctxt() != syntax_pos::hygiene::SyntaxContext::empty() &&
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::AddrOf(m, e))
}
- token::Ident(..) if self.token.is_keyword(keywords::In) => {
+ token::Ident(..) if self.token.is_keyword(kw::In) => {
self.bump();
let place = self.parse_expr_res(
Restrictions::NO_STRUCT_LITERAL,
let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new());
(lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr))
}
- token::Ident(..) if self.token.is_keyword(keywords::Box) => {
+ token::Ident(..) if self.token.is_keyword(kw::Box) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
let binary = self.mk_binary(source_map::respan(cur_op_span, ast_op), lhs, rhs);
self.mk_expr(span, binary, ThinVec::new())
}
- AssocOp::Assign =>
- self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
+ AssocOp::Assign => self.mk_expr(span, ExprKind::Assign(lhs, rhs), ThinVec::new()),
AssocOp::ObsoleteInPlace =>
self.mk_expr(span, ExprKind::ObsoleteInPlace(lhs, rhs), ThinVec::new()),
AssocOp::AssignOp(k) => {
Ok(lhs)
}
- fn could_ascription_be_path(&self, node: &ast::ExprKind) -> bool {
- self.token.is_ident() &&
- if let ast::ExprKind::Path(..) = node { true } else { false } &&
- !self.token.is_reserved_ident() && // v `foo:bar(baz)`
- self.look_ahead(1, |t| t == &token::OpenDelim(token::Paren)) ||
- self.look_ahead(1, |t| t == &token::Lt) && // `foo:bar<baz`
- self.look_ahead(2, |t| t.is_ident()) ||
- self.look_ahead(1, |t| t == &token::Colon) && // `foo:bar:baz`
- self.look_ahead(2, |t| t.is_ident()) ||
- self.look_ahead(1, |t| t == &token::ModSep) && // `foo:bar::baz`
- self.look_ahead(2, |t| t.is_ident())
- }
-
- fn bad_type_ascription(
- &self,
- err: &mut DiagnosticBuilder<'a>,
- lhs_span: Span,
- cur_op_span: Span,
- next_sp: Span,
- maybe_path: bool,
- ) {
- err.span_label(self.span, "expecting a type here because of type ascription");
- let cm = self.sess.source_map();
- let next_pos = cm.lookup_char_pos(next_sp.lo());
- let op_pos = cm.lookup_char_pos(cur_op_span.hi());
- if op_pos.line != next_pos.line {
- err.span_suggestion(
- cur_op_span,
- "try using a semicolon",
- ";".to_string(),
- Applicability::MaybeIncorrect,
- );
- } else {
- if maybe_path {
- err.span_suggestion(
- cur_op_span,
- "maybe you meant to write a path separator here",
- "::".to_string(),
- Applicability::MaybeIncorrect,
- );
- } else {
- err.note("type ascription is a nightly-only feature that lets \
- you annotate an expression with a type: `<expr>: <type>`");
- err.span_note(
- lhs_span,
- "this expression expects an ascribed type after the colon",
- );
- err.help("this might be indicative of a syntax error elsewhere");
- }
- }
- }
-
fn parse_assoc_op_cast(&mut self, lhs: P<Expr>, lhs_span: Span,
expr_kind: fn(P<Expr>, P<Ty>) -> ExprKind)
-> PResult<'a, P<Expr>> {
/// Parses an `if` or `if let` expression (`if` token already eaten).
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- if self.check_keyword(keywords::Let) {
+ if self.check_keyword(kw::Let) {
return self.parse_if_let_expr(attrs);
}
let lo = self.prev_span;
// verify that the last statement is either an implicit return (no `;`) or an explicit
// return. This won't catch blocks with an explicit `return`, but that would be caught by
// the dead code lint.
- if self.eat_keyword(keywords::Else) || !cond.returns() {
+ if self.eat_keyword(kw::Else) || !cond.returns() {
let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement");
})?;
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span;
- if self.eat_keyword(keywords::Else) {
+ if self.eat_keyword(kw::Else) {
let elexpr = self.parse_else_expr()?;
hi = elexpr.span;
els = Some(elexpr);
fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let lo = self.prev_span;
- self.expect_keyword(keywords::Let)?;
+ self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let thn = self.parse_block()?;
- let (hi, els) = if self.eat_keyword(keywords::Else) {
+ let (hi, els) = if self.eat_keyword(kw::Else) {
let expr = self.parse_else_expr()?;
(expr.span, Some(expr))
} else {
-> PResult<'a, P<Expr>>
{
let lo = self.span;
- let movability = if self.eat_keyword(keywords::Static) {
+ let movability = if self.eat_keyword(kw::Static) {
Movability::Static
} else {
Movability::Movable
} else {
IsAsync::NotAsync
};
- let capture_clause = if self.eat_keyword(keywords::Move) {
+ let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
// `else` token already eaten
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
- if self.eat_keyword(keywords::If) {
+ if self.eat_keyword(kw::If) {
return self.parse_if_expr(ThinVec::new());
} else {
let blk = self.parse_block()?;
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let pat = self.parse_top_level_pat()?;
- if !self.eat_keyword(keywords::In) {
+ if !self.eat_keyword(kw::In) {
let in_span = self.prev_span.between(self.span);
let mut err = self.sess.span_diagnostic
.struct_span_err(in_span, "missing `in` in `for` loop");
err.emit();
}
let in_span = self.prev_span;
- if self.eat_keyword(keywords::In) {
+ if self.eat_keyword(kw::In) {
// a common typo: `for _ in in bar {}`
let mut err = self.sess.span_diagnostic.struct_span_err(
self.prev_span,
fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- if self.token.is_keyword(keywords::Let) {
+ if self.token.is_keyword(kw::Let) {
return self.parse_while_let_expr(opt_label, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- self.expect_keyword(keywords::Let)?;
+ self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
-> PResult<'a, P<Expr>>
{
let span_lo = self.span;
- self.expect_keyword(keywords::Async)?;
- let capture_clause = if self.eat_keyword(keywords::Move) {
+ self.expect_keyword(kw::Async)?;
+ let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
{
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
- if self.eat_keyword(keywords::Catch) {
+ if self.eat_keyword(kw::Catch) {
let mut error = self.struct_span_err(self.prev_span,
"keyword `catch` cannot follow a `try` block");
error.help("try using `match` on the result of the `try` block instead");
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
let pats = self.parse_pats()?;
- let guard = if self.eat_keyword(keywords::If) {
+ let guard = if self.eat_keyword(kw::If) {
Some(Guard::If(self.parse_expr()?))
} else {
None
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& self.token != token::CloseDelim(token::Brace);
+ let hi = self.span;
+
if require_comma {
let cm = self.sess.source_map();
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
pats,
guard,
body: expr,
+ span: lo.to(hi),
})
}
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
- let is_box = self.eat_keyword(keywords::Box);
+ let is_box = self.eat_keyword(kw::Box);
let boxed_span = self.span;
- let is_ref = self.eat_keyword(keywords::Ref);
- let is_mut = self.eat_keyword(keywords::Mut);
+ let is_ref = self.eat_keyword(kw::Ref);
+ let is_mut = self.eat_keyword(kw::Mut);
let fieldname = self.parse_ident()?;
hi = self.prev_span;
pat = PatKind::Slice(before, slice, after);
}
// At this point, token != &, &&, (, [
- _ => if self.eat_keyword(keywords::Underscore) {
+ _ => if self.eat_keyword(kw::Underscore) {
// Parse _
pat = PatKind::Wild;
- } else if self.eat_keyword(keywords::Mut) {
+ } else if self.eat_keyword(kw::Mut) {
// Parse mut ident @ pat / mut ref ident @ pat
let mutref_span = self.prev_span.to(self.span);
- let binding_mode = if self.eat_keyword(keywords::Ref) {
+ let binding_mode = if self.eat_keyword(kw::Ref) {
self.diagnostic()
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
.span_suggestion(
BindingMode::ByValue(Mutability::Mutable)
};
pat = self.parse_pat_ident(binding_mode)?;
- } else if self.eat_keyword(keywords::Ref) {
+ } else if self.eat_keyword(kw::Ref) {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
- } else if self.eat_keyword(keywords::Box) {
+ } else if self.eat_keyword(kw::Box) {
// Parse box pat
let subpat = self.parse_pat_with_range_pat(false, None)?;
pat = PatKind::Box(subpat);
Ok(self.parse_stmt_(true))
}
- // Eat tokens until we can be relatively sure we reached the end of the
- // statement. This is something of a best-effort heuristic.
- //
- // We terminate when we find an unmatched `}` (without consuming it).
- fn recover_stmt(&mut self) {
- self.recover_stmt_(SemiColonMode::Ignore, BlockMode::Ignore)
- }
-
- // If `break_on_semi` is `Break`, then we will stop consuming tokens after
- // finding (and consuming) a `;` outside of `{}` or `[]` (note that this is
- // approximate - it can mean we break too early due to macros, but that
- // should only lead to sub-optimal recovery, not inaccurate parsing).
- //
- // If `break_on_block` is `Break`, then we will stop consuming tokens
- // after finding (and consuming) a brace-delimited block.
- fn recover_stmt_(&mut self, break_on_semi: SemiColonMode, break_on_block: BlockMode) {
- let mut brace_depth = 0;
- let mut bracket_depth = 0;
- let mut in_block = false;
- debug!("recover_stmt_ enter loop (semi={:?}, block={:?})",
- break_on_semi, break_on_block);
- loop {
- debug!("recover_stmt_ loop {:?}", self.token);
- match self.token {
- token::OpenDelim(token::DelimToken::Brace) => {
- brace_depth += 1;
- self.bump();
- if break_on_block == BlockMode::Break &&
- brace_depth == 1 &&
- bracket_depth == 0 {
- in_block = true;
- }
- }
- token::OpenDelim(token::DelimToken::Bracket) => {
- bracket_depth += 1;
- self.bump();
- }
- token::CloseDelim(token::DelimToken::Brace) => {
- if brace_depth == 0 {
- debug!("recover_stmt_ return - close delim {:?}", self.token);
- break;
- }
- brace_depth -= 1;
- self.bump();
- if in_block && bracket_depth == 0 && brace_depth == 0 {
- debug!("recover_stmt_ return - block end {:?}", self.token);
- break;
- }
- }
- token::CloseDelim(token::DelimToken::Bracket) => {
- bracket_depth -= 1;
- if bracket_depth < 0 {
- bracket_depth = 0;
- }
- self.bump();
- }
- token::Eof => {
- debug!("recover_stmt_ return - Eof");
- break;
- }
- token::Semi => {
- self.bump();
- if break_on_semi == SemiColonMode::Break &&
- brace_depth == 0 &&
- bracket_depth == 0 {
- debug!("recover_stmt_ return - Semi");
- break;
- }
- }
- token::Comma => {
- if break_on_semi == SemiColonMode::Comma &&
- brace_depth == 0 &&
- bracket_depth == 0 {
- debug!("recover_stmt_ return - Semi");
- break;
- } else {
- self.bump();
- }
- }
- _ => {
- self.bump()
- }
- }
- }
- }
-
fn parse_stmt_(&mut self, macro_legacy_warnings: bool) -> Option<Stmt> {
self.parse_stmt_without_recovery(macro_legacy_warnings).unwrap_or_else(|mut e| {
e.emit();
}
fn is_async_block(&self) -> bool {
- self.token.is_keyword(keywords::Async) &&
+ self.token.is_keyword(kw::Async) &&
(
( // `async move {`
- self.look_ahead(1, |t| t.is_keyword(keywords::Move)) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Move)) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
) || ( // `async {`
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
}
fn is_async_fn(&self) -> bool {
- self.token.is_keyword(keywords::Async) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ self.token.is_keyword(kw::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Fn))
}
fn is_do_catch_block(&self) -> bool {
- self.token.is_keyword(keywords::Do) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
+ self.token.is_keyword(kw::Do) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Catch)) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_try_block(&self) -> bool {
- self.token.is_keyword(keywords::Try) &&
+ self.token.is_keyword(kw::Try) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
self.span.rust_2018() &&
// prevent `while try {} {}`, `if try {} {} else {}`, etc.
}
fn is_union_item(&self) -> bool {
- self.token.is_keyword(keywords::Union) &&
+ self.token.is_keyword(kw::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
}
fn is_crate_vis(&self) -> bool {
- self.token.is_keyword(keywords::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
+ self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
}
fn is_existential_type_decl(&self) -> bool {
- self.token.is_keyword(keywords::Existential) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Type))
+ self.token.is_keyword(kw::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Type))
}
fn is_auto_trait_item(&self) -> bool {
// auto trait
- (self.token.is_keyword(keywords::Auto)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ (self.token.is_keyword(kw::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|| // unsafe auto trait
- (self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Auto)) &&
- self.look_ahead(2, |t| t.is_keyword(keywords::Trait)))
+ (self.token.is_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Auto)) &&
+ self.look_ahead(2, |t| t.is_keyword(kw::Trait)))
}
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.span;
let (ident, def) = match self.token {
- token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
+ token::Ident(ident, false) if ident.name == kw::Macro => {
self.bump();
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
- Ok(Some(if self.eat_keyword(keywords::Let) {
+ Ok(Some(if self.eat_keyword(kw::Let) {
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Local(self.parse_local(attrs.into())?),
// it's a macro invocation
let id = match self.token {
- token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
+ token::OpenDelim(_) => Ident::invalid(), // no special identifier
_ => self.parse_ident()?,
};
_ => {
// we only expect an ident if we didn't parse one
// above.
- let ident_str = if id.name == keywords::Invalid.name() {
+ let ident_str = if id.name == kw::Invalid {
"identifier, "
} else {
""
MacStmtStyle::NoBraces
};
- if id.name == keywords::Invalid.name() {
+ if id.name == kw::Invalid {
let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
let node = if delim == MacDelimiter::Brace ||
self.token == token::Semi || self.token == token::Eof {
let tok = self.this_token_descr();
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
let do_not_suggest_help =
- self.token.is_keyword(keywords::In) || self.token == token::Colon;
+ self.token.is_keyword(kw::In) || self.token == token::Colon;
if self.token.is_ident_named("and") {
e.span_suggestion_short(
let is_bound_start = self.check_path() || self.check_lifetime() ||
self.check(&token::Not) || // used for error reporting only
self.check(&token::Question) ||
- self.check_keyword(keywords::For) ||
+ self.check_keyword(kw::For) ||
self.check(&token::OpenDelim(token::Paren));
if is_bound_start {
let lo = self.span;
}
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
bounds,
kind: ast::GenericParamKind::Lifetime,
});
- } else if self.check_keyword(keywords::Const) {
+ } else if self.check_keyword(kw::Const) {
// Parse const parameter.
params.push(self.parse_const_param(attrs)?);
} else if self.check_ident() {
span: syntax_pos::DUMMY_SP,
};
- if !self.eat_keyword(keywords::Where) {
+ if !self.eat_keyword(kw::Where) {
return Ok(where_clause);
}
let lo = self.prev_span;
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
- this.look_ahead(n, |t| t.is_keyword(keywords::SelfLower)) &&
+ this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) &&
this.look_ahead(n + 1, |t| t != &token::ModSep)
};
(if isolated_self(self, 1) {
self.bump();
SelfKind::Region(None, Mutability::Immutable)
- } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
+ } else if self.look_ahead(1, |t| t.is_keyword(kw::Mut)) &&
isolated_self(self, 2) {
self.bump();
self.bump();
let lt = self.expect_lifetime();
SelfKind::Region(Some(lt), Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
- self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) &&
+ self.look_ahead(2, |t| t.is_keyword(kw::Mut)) &&
isolated_self(self, 3) {
self.bump();
let lt = self.expect_lifetime();
} else {
SelfKind::Value(Mutability::Immutable)
}, eself_ident, eself_hi)
- } else if self.token.is_keyword(keywords::Mut) &&
+ } else if self.token.is_keyword(kw::Mut) &&
isolated_self(self, 1) {
// mut self
// mut self: TYPE
/// Returns `true` if we are looking at `const ID`
/// (returns `false` for things like `const fn`, etc.).
fn is_const_item(&self) -> bool {
- self.token.is_keyword(keywords::Const) &&
- !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
- !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
+ self.token.is_keyword(kw::Const) &&
+ !self.look_ahead(1, |t| t.is_keyword(kw::Fn)) &&
+ !self.look_ahead(1, |t| t.is_keyword(kw::Unsafe))
}
/// Parses all the "front matter" for a `fn` declaration, up to
Abi
)>
{
- let is_const_fn = self.eat_keyword(keywords::Const);
+ let is_const_fn = self.eat_keyword(kw::Const);
let const_span = self.prev_span;
let unsafety = self.parse_unsafety();
let asyncness = self.parse_asyncness();
let (constness, unsafety, abi) = if is_const_fn {
(respan(const_span, Constness::Const), unsafety, Abi::Rust)
} else {
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
(respan(self.prev_span, Constness::NotConst), unsafety, abi)
};
- if !self.eat_keyword(keywords::Fn) {
+ if !self.eat_keyword(kw::Fn) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
} else if self.is_const_item() {
// This parses the grammar:
// ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let typ = self.parse_ty()?;
// code copied from parse_macro_use_or_failure... abstraction!
if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
// method macro
- Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
+ Ok((Ident::invalid(), vec![], ast::Generics::default(),
ast::ImplItemKind::Macro(mac)))
} else {
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
t == &token::Colon || t == &token::Eq) ||
- self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
+ self.look_ahead(1, |t| t.is_keyword(kw::Const)))
}
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
};
// Parse both types and traits as a type, then reinterpret if necessary.
- let err_path = |span| ast::Path::from_ident(Ident::new(keywords::Invalid.name(), span));
- let ty_first = if self.token.is_keyword(keywords::For) &&
+ let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
+ let ty_first = if self.token.is_keyword(kw::For) &&
self.look_ahead(1, |t| t != &token::Lt) {
let span = self.prev_span.between(self.span);
self.struct_span_err(span, "missing trait in a trait impl").emit();
};
// If `for` is missing we try to recover.
- let has_for = self.eat_keyword(keywords::For);
+ let has_for = self.eat_keyword(kw::For);
let missing_for_span = self.prev_span.between(self.span);
let ty_second = if self.token == token::DotDot {
}
};
- Ok((keywords::Invalid.ident(), item_kind, Some(attrs)))
+ Ok((Ident::invalid(), item_kind, Some(attrs)))
}
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
- let vdata = if self.token.is_keyword(keywords::Where) {
+ let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
if self.eat(&token::Semi) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
let mut generics = self.parse_generics()?;
- let vdata = if self.token.is_keyword(keywords::Where) {
+ let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
Ok((class_name, ItemKind::Union(vdata, generics), None))
}
- fn consume_block(&mut self, delim: token::DelimToken) {
- let mut brace_depth = 0;
- loop {
- if self.eat(&token::OpenDelim(delim)) {
- brace_depth += 1;
- } else if self.eat(&token::CloseDelim(delim)) {
- if brace_depth == 0 {
- return;
- } else {
- brace_depth -= 1;
- continue;
- }
- } else if self.token == token::Eof || self.eat(&token::CloseDelim(token::NoDelim)) {
- return;
- } else {
- self.bump();
- }
- }
- }
-
fn parse_record_struct_body(
&mut self,
) -> PResult<'a, (Vec<StructField>, /* recovered */ bool)> {
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x);
- self.expected_tokens.push(TokenType::Keyword(keywords::Crate));
+ self.expected_tokens.push(TokenType::Keyword(kw::Crate));
if self.is_crate_vis() {
self.bump(); // `crate`
return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
}
- if !self.eat_keyword(keywords::Pub) {
+ if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
// by the following tokens.
- if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) &&
+ if self.look_ahead(1, |t| t.is_keyword(kw::Crate)) &&
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
{
// `pub(crate)`
VisibilityKind::Crate(CrateSugar::PubCrate),
);
return Ok(vis)
- } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ } else if self.look_ahead(1, |t| t.is_keyword(kw::In)) {
// `pub(in path)`
self.bump(); // `(`
self.bump(); // `in`
});
return Ok(vis)
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
- t.is_keyword(keywords::SelfLower))
+ self.look_ahead(1, |t| t.is_keyword(kw::Super) ||
+ t.is_keyword(kw::SelfLower))
{
// `pub(self)` or `pub(super)`
self.bump(); // `(`
/// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// `pub` is included for better error messages
- if self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl) ||
- t.is_keyword(keywords::Const) ||
- t.is_keyword(keywords::Fn) ||
- t.is_keyword(keywords::Unsafe) ||
- t.is_keyword(keywords::Extern) ||
- t.is_keyword(keywords::Type) ||
- t.is_keyword(keywords::Pub)) {
+ if self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl) ||
+ t.is_keyword(kw::Const) ||
+ t.is_keyword(kw::Fn) ||
+ t.is_keyword(kw::Unsafe) ||
+ t.is_keyword(kw::Extern) ||
+ t.is_keyword(kw::Type) ||
+ t.is_keyword(kw::Pub)) {
self.bump(); // `default`
Defaultness::Default
} else {
let attr = Attribute {
id: attr::mk_attr_id(),
style: ast::AttrStyle::Outer,
- path: ast::Path::from_ident(Ident::from_str("warn_directory_ownership")),
+ path: ast::Path::from_ident(
+ Ident::with_empty_ctxt(sym::warn_directory_ownership)),
tokens: TokenStream::empty(),
is_sugared_doc: false,
span: syntax_pos::DUMMY_SP,
/// Parses a function declaration from a foreign module.
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let (ident, mut generics) = self.parse_fn_header()?;
let decl = self.parse_fn_decl(true)?;
/// Parses a type from a foreign module.
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
- self.expect_keyword(keywords::Type)?;
+ self.expect_keyword(kw::Type)?;
let ident = self.parse_ident()?;
let hi = self.span;
let error_msg = "crate name using dashes are not valid in `extern crate` statements";
let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
in the code";
- let mut ident = if self.token.is_keyword(keywords::SelfLower) {
+ let mut ident = if self.token.is_keyword(kw::SelfLower) {
self.parse_path_segment_ident()
} else {
self.parse_ident()
abi,
items: foreign_items
};
- let invalid = keywords::Invalid.ident();
+ let invalid = Ident::invalid();
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
}
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
// This parses the grammar:
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
- if self.check_keyword(keywords::Type) ||
- self.check_keyword(keywords::Existential) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Type)) {
- let existential = self.eat_keyword(keywords::Existential);
- assert!(self.eat_keyword(keywords::Type));
+ if self.check_keyword(kw::Type) ||
+ self.check_keyword(kw::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Type)) {
+ let existential = self.eat_keyword(kw::Existential);
+ assert!(self.eat_keyword(kw::Type));
Some(self.parse_existential_or_alias(existential))
} else {
None
/// Parses the part of an enum declaration following the `{`.
fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
let mut variants = Vec::new();
- let mut all_nullary = true;
let mut any_disr = vec![];
while self.token != token::CloseDelim(token::Brace) {
let variant_attrs = self.parse_outer_attributes()?;
let ident = self.parse_ident()?;
if self.check(&token::OpenDelim(token::Brace)) {
// Parse a struct variant.
- all_nullary = false;
let (fields, recovered) = self.parse_record_struct_body()?;
struct_def = VariantData::Struct(fields, recovered);
} else if self.check(&token::OpenDelim(token::Paren)) {
- all_nullary = false;
struct_def = VariantData::Tuple(
self.parse_tuple_struct_body()?,
ast::DUMMY_NODE_ID,
}
}
self.expect(&token::CloseDelim(token::Brace))?;
- if !any_disr.is_empty() && !all_nullary {
- let mut err = self.struct_span_err(
- any_disr.clone(),
- "discriminator values can only be used with a field-less enum",
- );
- for sp in any_disr {
- err.span_label(sp, "only valid in field-less enums");
- }
- err.emit();
- }
+ self.maybe_report_invalid_custom_discriminants(any_disr, &variants);
Ok(ast::EnumDef { variants })
}
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
match self.token {
- token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
+ token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
+ token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
let sp = self.span;
- self.expect_no_suffix(sp, "an ABI spec", suf);
+ self.expect_no_suffix(sp, "an ABI spec", suffix);
self.bump();
- match abi::lookup(&s.as_str()) {
+ match abi::lookup(&symbol.as_str()) {
Some(abi) => Ok(Some(abi)),
None => {
let prev_span = self.prev_span;
prev_span,
E0703,
"invalid ABI: found `{}`",
- s);
+ symbol);
err.span_label(prev_span, "invalid ABI");
err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
err.emit();
}
fn is_static_global(&mut self) -> bool {
- if self.check_keyword(keywords::Static) {
+ if self.check_keyword(kw::Static) {
// Check if this could be a closure
!self.look_ahead(1, |token| {
- if token.is_keyword(keywords::Move) {
+ if token.is_keyword(kw::Move) {
return true;
}
match *token {
let visibility = self.parse_visibility(false)?;
- if self.eat_keyword(keywords::Use) {
+ if self.eat_keyword(kw::Use) {
// USE ITEM
let item_ = ItemKind::Use(P(self.parse_use_tree()?));
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
- let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs);
+ let item =
+ self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Extern) {
- if self.eat_keyword(keywords::Crate) {
+ if self.eat_keyword(kw::Extern) {
+ if self.eat_keyword(kw::Crate) {
return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
}
let opt_abi = self.parse_opt_abi()?;
- if self.eat_keyword(keywords::Fn) {
+ if self.eat_keyword(kw::Fn) {
// EXTERN FUNCTION ITEM
let fn_span = self.prev_span;
let abi = opt_abi.unwrap_or(Abi::C);
if self.is_static_global() {
self.bump();
// STATIC ITEM
- let m = if self.eat_keyword(keywords::Mut) {
+ let m = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Const) {
+ if self.eat_keyword(kw::Const) {
let const_span = self.prev_span;
- if self.check_keyword(keywords::Fn)
- || (self.check_keyword(keywords::Unsafe)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) {
+ if self.check_keyword(kw::Fn)
+ || (self.check_keyword(kw::Unsafe)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Fn))) {
// CONST FUNCTION ITEM
let unsafety = self.parse_unsafety();
self.bump();
}
// CONST ITEM
- if self.eat_keyword(keywords::Mut) {
+ if self.eat_keyword(kw::Mut) {
let prev_span = self.prev_span;
let mut err = self.diagnostic()
.struct_span_err(prev_span, "const globals cannot be mutable");
// `unsafe async fn` or `async fn`
if (
- self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Async))
+ self.check_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Async))
) || (
- self.check_keyword(keywords::Async) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ self.check_keyword(kw::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Fn))
)
{
// ASYNC FUNCTION ITEM
let unsafety = self.parse_unsafety();
- self.expect_keyword(keywords::Async)?;
+ self.expect_keyword(kw::Async)?;
let async_span = self.prev_span;
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(unsafety,
}
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe) &&
- (self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) ||
- self.look_ahead(1, |t| t.is_keyword(keywords::Auto)))
+ if self.check_keyword(kw::Unsafe) &&
+ (self.look_ahead(1, |t| t.is_keyword(kw::Trait)) ||
+ self.look_ahead(1, |t| t.is_keyword(kw::Auto)))
{
// UNSAFE TRAIT ITEM
self.bump(); // `unsafe`
- let is_auto = if self.eat_keyword(keywords::Trait) {
+ let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
- self.expect_keyword(keywords::Auto)?;
- self.expect_keyword(keywords::Trait)?;
+ self.expect_keyword(kw::Auto)?;
+ self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
let (ident, item_, extra_attrs) =
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Impl) ||
- self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
- self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
- self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) {
+ if self.check_keyword(kw::Impl) ||
+ self.check_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
+ self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
+ self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Unsafe)) {
// IMPL ITEM
let defaultness = self.parse_defaultness();
let unsafety = self.parse_unsafety();
- self.expect_keyword(keywords::Impl)?;
+ self.expect_keyword(kw::Impl)?;
let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
let span = lo.to(self.prev_span);
return Ok(Some(self.mk_item(span, ident, item, visibility,
maybe_append(attrs, extra_attrs))));
}
- if self.check_keyword(keywords::Fn) {
+ if self.check_keyword(kw::Fn) {
// FUNCTION ITEM
self.bump();
let fn_span = self.prev_span;
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe)
+ if self.check_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump(); // `unsafe`
// `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
self.check(&token::OpenDelim(token::Brace));
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(Unsafety::Unsafe,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Mod) {
+ if self.eat_keyword(kw::Mod) {
// MODULE ITEM
let (ident, item_, extra_attrs) =
self.parse_item_mod(&attrs[..])?;
attrs);
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Enum) {
+ if self.eat_keyword(kw::Enum) {
// ENUM ITEM
let (ident, item_, extra_attrs) = self.parse_item_enum()?;
let prev_span = self.prev_span;
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Trait)
- || (self.check_keyword(keywords::Auto)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ if self.check_keyword(kw::Trait)
+ || (self.check_keyword(kw::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
{
- let is_auto = if self.eat_keyword(keywords::Trait) {
+ let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
- self.expect_keyword(keywords::Auto)?;
- self.expect_keyword(keywords::Trait)?;
+ self.expect_keyword(kw::Auto)?;
+ self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
// TRAIT ITEM
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Struct) {
+ if self.eat_keyword(kw::Struct) {
// STRUCT ITEM
let (ident, item_, extra_attrs) = self.parse_item_struct()?;
let prev_span = self.prev_span;
// FOREIGN STATIC ITEM
// Treat `const` as `static` for error recovery, but don't add it to expected tokens.
- if self.check_keyword(keywords::Static) || self.token.is_keyword(keywords::Const) {
- if self.token.is_keyword(keywords::Const) {
+ if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
+ if self.token.is_keyword(kw::Const) {
self.diagnostic()
.struct_span_err(self.span, "extern items cannot be `const`")
.span_suggestion(
return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
}
// FOREIGN FUNCTION ITEM
- if self.check_keyword(keywords::Fn) {
+ if self.check_keyword(kw::Fn) {
return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
}
// FOREIGN TYPE ITEM
- if self.check_keyword(keywords::Type) {
+ if self.check_keyword(kw::Type) {
return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
}
Some(mac) => {
Ok(
ForeignItem {
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
span: lo.to(self.prev_span),
id: ast::DUMMY_NODE_ID,
attrs,
let id = if self.token.is_ident() {
self.parse_ident()?
} else {
- keywords::Invalid.ident() // no special identifier
+ Ident::invalid() // no special identifier
};
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
- if self.eat_keyword(keywords::As) {
+ if self.eat_keyword(kw::As) {
self.parse_ident_or_underscore().map(Some)
} else {
Ok(None)
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
let ret = match self.token {
- token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
- token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
+ token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
+ (symbol, ast::StrStyle::Cooked, suffix),
+ token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
+ (symbol, ast::StrStyle::Raw(n), suffix),
_ => return None
};
self.bump();
).emit();
}
- /// Recover from `pub` keyword in places where it seems _reasonable_ but isn't valid.
- fn eat_bad_pub(&mut self) {
- if self.token.is_keyword(keywords::Pub) {
- match self.parse_visibility(false) {
- Ok(vis) => {
- let mut err = self.diagnostic()
- .struct_span_err(vis.span, "unnecessary visibility qualifier");
- err.span_label(vis.span, "`pub` not permitted here");
- err.emit();
- }
- Err(mut err) => err.emit(),
- }
- }
- }
-
/// When lowering a `async fn` to the HIR, we need to move all of the arguments of the function
/// into the generated closure so that they are dropped when the future is polled and not when
/// it is created.