(ident, ItemKind::Static(ty, m, expr))
} else if let Const::Yes(const_span) = self.parse_constness() {
// CONST ITEM
- self.recover_const_mut(const_span);
- let (ident, ty, expr) = self.parse_item_global(None)?;
- (ident, ItemKind::Const(def(), ty, expr))
+ if self.token.is_keyword(kw::Impl) {
+ // recover from `const impl`, suggest `impl const`
+ self.recover_const_impl(const_span, attrs, def())?
+ } else {
+ self.recover_const_mut(const_span);
+ let (ident, ty, expr) = self.parse_item_global(None)?;
+ (ident, ItemKind::Const(def(), ty, expr))
+ }
} else if self.check_keyword(kw::Trait) || self.check_auto_or_unsafe_trait_item() {
// TRAIT ITEM
self.parse_item_trait(attrs, lo)?
}
}
+ /// Recover on `const impl` with `const` already eaten.
+ fn recover_const_impl(
+ &mut self,
+ const_span: Span,
+ attrs: &mut Vec<Attribute>,
+ defaultness: Defaultness,
+ ) -> PResult<'a, ItemInfo> {
+ let impl_span = self.token.span;
+ let mut err = self.expected_ident_found();
+ let mut impl_info = self.parse_item_impl(attrs, defaultness)?;
+ match impl_info.1 {
+ // only try to recover if this is implementing a trait for a type
+ ItemKind::Impl { of_trait: Some(ref trai), ref mut constness, .. } => {
+ *constness = Const::Yes(const_span);
+
+ let before_trait = trai.path.span.shrink_to_lo();
+ let const_up_to_impl = const_span.with_hi(impl_span.lo());
+ err.multipart_suggestion(
+ "you might have meant to write a const trait impl",
+ vec![(const_up_to_impl, "".to_owned()), (before_trait, "const ".to_owned())],
+ Applicability::MaybeIncorrect,
+ )
+ .emit();
+ }
+ ItemKind::Impl { .. } => return Err(err),
+ _ => unreachable!(),
+ }
+ Ok(impl_info)
+ }
+
/// Parse `["const" | ("static" "mut"?)] $ident ":" $ty (= $expr)?` with
/// `["const" | ("static" "mut"?)]` already parsed and stored in `m`.
///
use rustc_ast::ptr::P;
use rustc_ast::token::{self, DelimToken, Token, TokenKind};
use rustc_ast::tokenstream::{self, DelimSpan, LazyTokenStream, Spacing};
-use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree};
+use rustc_ast::tokenstream::{CreateTokenStream, TokenStream, TokenTree, TreeAndSpacing};
use rustc_ast::DUMMY_NODE_ID;
use rustc_ast::{self as ast, AnonConst, AttrStyle, AttrVec, Const, CrateSugar, Extern, Unsafe};
use rustc_ast::{Async, Expr, ExprKind, MacArgs, MacDelimiter, Mutability, StrLit};
// Counts the number of calls to `next` or `next_desugared`,
// depending on whether `desugar_doc_comments` is set.
num_next_calls: usize,
+ // During parsing, we may sometimes need to 'unglue' a
+ // glued token into two component tokens
+ // (e.g. '>>' into '>' and '>), so that the parser
+ // can consume them one at a time. This process
+ // bypasses the normal capturing mechanism
+ // (e.g. `num_next_calls` will not be incremented),
+ // since the 'unglued' tokens due not exist in
+ // the original `TokenStream`.
+ //
+ // If we end up consuming both unglued tokens,
+ // then this is not an issue - we'll end up
+ // capturing the single 'glued' token.
+ //
+ // However, in certain circumstances, we may
+ // want to capture just the first 'unglued' token.
+ // For example, capturing the `Vec<u8>`
+ // in `Option<Vec<u8>>` requires us to unglue
+ // the trailing `>>` token. The `append_unglued_token`
+ // field is used to track this token - it gets
+ // appended to the captured stream when
+ // we evaluate a `LazyTokenStream`
+ append_unglued_token: Option<TreeAndSpacing>,
}
#[derive(Clone)]
stack: Vec::new(),
num_next_calls: 0,
desugar_doc_comments,
+ append_unglued_token: None,
},
desugar_doc_comments,
unmatched_angle_bracket_count: 0,
self.token_cursor.next()
};
self.token_cursor.num_next_calls += 1;
+ // We've retrieved an token from the underlying
+ // cursor, so we no longer need to worry about
+ // an unglued token. See `break_and_eat` for more details
+ self.token_cursor.append_unglued_token = None;
if next.span.is_dummy() {
// Tweak the location for better diagnostics, but keep syntactic context intact.
next.span = fallback_span.with_ctxt(next.span.ctxt());
let first_span = self.sess.source_map().start_point(self.token.span);
let second_span = self.token.span.with_lo(first_span.hi());
self.token = Token::new(first, first_span);
+ // Keep track of this token - if we end token capturing now,
+ // we'll want to append this token to the captured stream.
+ //
+ // If we consume any additional tokens, then this token
+ // is not needed (we'll capture the entire 'glued' token),
+ // and `next_tok` will set this field to `None`
+ self.token_cursor.append_unglued_token =
+ Some((TokenTree::Token(self.token.clone()), Spacing::Alone));
// Use the spacing of the glued token as the spacing
// of the unglued second token.
self.bump_with((Token::new(second, second_span), self.token_spacing));
num_calls: usize,
desugar_doc_comments: bool,
trailing_semi: bool,
+ append_unglued_token: Option<TreeAndSpacing>,
}
impl CreateTokenStream for LazyTokenStreamImpl {
fn create_token_stream(&self) -> TokenStream {
}))
.take(num_calls);
- make_token_stream(tokens)
+ make_token_stream(tokens, self.append_unglued_token.clone())
}
fn add_trailing_semi(&self) -> Box<dyn CreateTokenStream> {
if self.trailing_semi {
panic!("Called `add_trailing_semi` twice!");
}
+ if self.append_unglued_token.is_some() {
+ panic!(
+ "Cannot call `add_trailing_semi` when we have an unglued token {:?}",
+ self.append_unglued_token
+ );
+ }
let mut new = self.clone();
new.trailing_semi = true;
Box::new(new)
cursor_snapshot,
desugar_doc_comments: self.desugar_doc_comments,
trailing_semi: false,
+ append_unglued_token: self.token_cursor.append_unglued_token.clone(),
};
Ok((ret, Some(LazyTokenStream::new(lazy_impl))))
}
/// Converts a flattened iterator of tokens (including open and close delimiter tokens)
/// into a `TokenStream`, creating a `TokenTree::Delimited` for each matching pair
/// of open and close delims.
-fn make_token_stream(tokens: impl Iterator<Item = (Token, Spacing)>) -> TokenStream {
+fn make_token_stream(
+ tokens: impl Iterator<Item = (Token, Spacing)>,
+ append_unglued_token: Option<TreeAndSpacing>,
+) -> TokenStream {
#[derive(Debug)]
struct FrameData {
open: Span,
.inner
.push((delimited, Spacing::Alone));
}
- token => stack
- .last_mut()
- .expect("Bottom token frame is missing!")
- .inner
- .push((TokenTree::Token(token), spacing)),
+ token => {
+ stack
+ .last_mut()
+ .expect("Bottom token frame is missing!")
+ .inner
+ .push((TokenTree::Token(token), spacing));
+ }
}
}
- let final_buf = stack.pop().expect("Missing final buf!");
+ let mut final_buf = stack.pop().expect("Missing final buf!");
+ final_buf.inner.extend(append_unglued_token);
assert!(stack.is_empty(), "Stack should be empty: final_buf={:?} stack={:?}", final_buf, stack);
TokenStream::new(final_buf.inner)
}
Ok(ty::Visibility::Restricted(DefId::local(CRATE_DEF_INDEX)))
}
ast::VisibilityKind::Inherited => {
- Ok(ty::Visibility::Restricted(parent_scope.module.normal_ancestor_id))
+ if matches!(self.parent_scope.module.kind, ModuleKind::Def(DefKind::Enum, _, _)) {
+ // Any inherited visibility resolved directly inside an enum
+ // (e.g. variants or fields) inherits from the visibility of the enum.
+ let parent_enum = self.parent_scope.module.def_id().unwrap().expect_local();
+ Ok(self.r.visibilities[&parent_enum])
+ } else {
+ // If it's not in an enum, its visibility is restricted to the `mod` item
+ // that it's defined in.
+ Ok(ty::Visibility::Restricted(self.parent_scope.module.normal_ancestor_id))
+ }
}
ast::VisibilityKind::Restricted { ref path, id, .. } => {
// For visibilities we are not ready to provide correct implementation of "uniform
},
}
+#[derive(Debug)]
enum ModuleKind {
/// An anonymous module; e.g., just a block.
///
if no_accessible_remaining_fields {
self.report_no_accessible_fields(adt_ty, span);
} else {
- self.report_missing_field(adt_ty, span, remaining_fields);
+ self.report_missing_fields(adt_ty, span, remaining_fields);
}
}
///
/// error: aborting due to previous error
/// ```
- fn report_missing_field(
+ fn report_missing_fields(
&self,
adt_ty: Ty<'tcx>,
span: Span,
/// Chooses a balancing context involving the node as a child, thus between
/// the KV immediately to the left or to the right in the parent node.
/// Returns an `Err` if there is no parent.
+ /// Panics if the parent is empty.
///
- /// This method optimizes for a node that has fewer elements than its left
- /// and right siblings, if they exist, by preferring the left parent KV.
- /// Merging with the left sibling is faster, since we only need to move
+ /// Prefers the left side, to be optimal if the given node is somehow
+ /// underfull, meaning here only that it has fewer elements than its left
+ /// sibling and than its right sibling, if they exist. In that case,
+ /// merging with the left sibling is faster, since we only need to move
/// the node's N elements, instead of shifting them to the right and moving
/// more than N elements in front. Stealing from the left sibling is also
/// typically faster, since we only need to shift the node's N elements to
/// the left.
pub fn choose_parent_kv(self) -> Result<LeftOrRight<BalancingContext<'a, K, V>>, Self> {
match unsafe { ptr::read(&self) }.ascend() {
- Ok(parent) => match parent.left_kv() {
+ Ok(parent_edge) => match parent_edge.left_kv() {
Ok(left_parent_kv) => Ok(LeftOrRight::Left(BalancingContext {
parent: unsafe { ptr::read(&left_parent_kv) },
left_child: left_parent_kv.left_edge().descend(),
right_child: self,
})),
- Err(parent) => match parent.right_kv() {
+ Err(parent_edge) => match parent_edge.right_kv() {
Ok(right_parent_kv) => Ok(LeftOrRight::Right(BalancingContext {
parent: unsafe { ptr::read(&right_parent_kv) },
left_child: self,
right_child: right_parent_kv.right_edge().descend(),
})),
- Err(_) => unreachable!("empty non-root node"),
+ Err(_) => unreachable!("empty internal node"),
},
},
Err(root) => Err(root),
/// This does stealing similar to `steal_left` but steals multiple elements at once.
pub fn bulk_steal_left(&mut self, count: usize) {
+ assert!(count > 0);
unsafe {
let left_node = &mut self.left_child;
let old_left_len = left_node.len();
/// The symmetric clone of `bulk_steal_left`.
pub fn bulk_steal_right(&mut self, count: usize) {
+ assert!(count > 0);
unsafe {
let left_node = &mut self.left_child;
let old_left_len = left_node.len();
pos = unsafe { new_pos.cast_to_leaf_unchecked() };
// Only if we merged, the parent (if any) has shrunk, but skipping
- // the following step does not pay off in benchmarks.
+ // the following step otherwise does not pay off in benchmarks.
//
// SAFETY: We won't destroy or rearrange the leaf where `pos` is at
// by handling its parent recursively; at worst we will destroy or
// rearrange the parent through the grandparent, thus change the
- // leaf's parent pointer.
+ // link to the parent inside the leaf.
if let Ok(parent) = unsafe { pos.reborrow_mut() }.into_node().ascend() {
parent.into_node().handle_shrunk_node_recursively(handle_emptied_internal_root);
}
}
impl<'a, K: 'a, V: 'a> NodeRef<marker::Mut<'a>, K, V, marker::Internal> {
- /// Stocks up a possibly underfull internal node, recursively.
- /// Climbs up until it reaches an ancestor that has elements to spare or the root.
+ /// Stocks up a possibly underfull internal node and its ancestors,
+ /// until it reaches an ancestor that has elements to spare or is the root.
fn handle_shrunk_node_recursively<F: FnOnce()>(mut self, handle_emptied_internal_root: F) {
loop {
self = match self.len() {
) -> Option<NodeRef<marker::Mut<'a>, K, V, marker::Internal>> {
match self.forget_type().choose_parent_kv() {
Ok(Left(left_parent_kv)) => {
- debug_assert!(left_parent_kv.right_child_len() == MIN_LEN - 1);
+ debug_assert_eq!(left_parent_kv.right_child_len(), MIN_LEN - 1);
if left_parent_kv.can_merge() {
let pos = left_parent_kv.merge(None);
let parent_edge = unsafe { unwrap_unchecked(pos.into_node().ascend().ok()) };
}
}
Ok(Right(right_parent_kv)) => {
- debug_assert!(right_parent_kv.left_child_len() == MIN_LEN - 1);
+ debug_assert_eq!(right_parent_kv.left_child_len(), MIN_LEN - 1);
if right_parent_kv.can_merge() {
let pos = right_parent_kv.merge(None);
let parent_edge = unsafe { unwrap_unchecked(pos.into_node().ascend().ok()) };
--- /dev/null
+mod foo {
+ pub struct Pub { private: () }
+
+ pub enum Enum {
+ Variant { x: (), y: () },
+ Other
+ }
+
+ fn correct() {
+ Pub {};
+ //~^ ERROR missing field `private` in initializer of `Pub`
+ Enum::Variant { x: () };
+ //~^ ERROR missing field `y` in initializer of `Enum`
+ }
+}
+
+fn correct() {
+ foo::Pub {};
+ //~^ ERROR cannot construct `Pub` with struct literal syntax due to inaccessible fields
+}
+
+fn wrong() {
+ foo::Enum::Variant { x: () };
+ //~^ ERROR missing field `y` in initializer of `Enum`
+ foo::Enum::Variant { };
+ //~^ ERROR missing fields `x`, `y` in initializer of `Enum`
+}
+
+fn main() {}
--- /dev/null
+error[E0063]: missing field `private` in initializer of `Pub`
+ --> $DIR/issue-79593.rs:10:9
+ |
+LL | Pub {};
+ | ^^^ missing `private`
+
+error[E0063]: missing field `y` in initializer of `Enum`
+ --> $DIR/issue-79593.rs:12:9
+ |
+LL | Enum::Variant { x: () };
+ | ^^^^^^^^^^^^^ missing `y`
+
+error: cannot construct `Pub` with struct literal syntax due to inaccessible fields
+ --> $DIR/issue-79593.rs:18:5
+ |
+LL | foo::Pub {};
+ | ^^^^^^^^
+
+error[E0063]: missing field `y` in initializer of `Enum`
+ --> $DIR/issue-79593.rs:23:5
+ |
+LL | foo::Enum::Variant { x: () };
+ | ^^^^^^^^^^^^^^^^^^ missing `y`
+
+error[E0063]: missing fields `x`, `y` in initializer of `Enum`
+ --> $DIR/issue-79593.rs:25:5
+ |
+LL | foo::Enum::Variant { };
+ | ^^^^^^^^^^^^^^^^^^ missing `x`, `y`
+
+error: aborting due to 5 previous errors
+
+For more information about this error, try `rustc --explain E0063`.
--- /dev/null
+// aux-build:test-macros.rs
+// compile-flags: -Z span-debug
+// check-pass
+
+// Tests that we properly handle parsing a nonterminal
+// where we have two consecutive angle brackets (one inside
+// the nonterminal, and one outside)
+
+#![no_std] // Don't load unnecessary hygiene information from std
+extern crate std;
+extern crate test_macros;
+
+macro_rules! trailing_angle {
+ (Option<$field:ty>) => {
+ test_macros::print_bang_consume!($field);
+ }
+}
+
+trailing_angle!(Option<Vec<u8>>);
+fn main() {}
--- /dev/null
+PRINT-BANG INPUT (DISPLAY): Vec<u8>
+PRINT-BANG RE-COLLECTED (DISPLAY): Vec < u8 >
+PRINT-BANG INPUT (DEBUG): TokenStream [
+ Group {
+ delimiter: None,
+ stream: TokenStream [
+ Ident {
+ ident: "Vec",
+ span: $DIR/capture-unglued-token.rs:19:24: 19:27 (#0),
+ },
+ Punct {
+ ch: '<',
+ spacing: Alone,
+ span: $DIR/capture-unglued-token.rs:19:27: 19:28 (#0),
+ },
+ Ident {
+ ident: "u8",
+ span: $DIR/capture-unglued-token.rs:19:28: 19:30 (#0),
+ },
+ Punct {
+ ch: '>',
+ spacing: Alone,
+ span: $DIR/capture-unglued-token.rs:19:30: 19:31 (#0),
+ },
+ ],
+ span: $DIR/capture-unglued-token.rs:15:42: 15:48 (#4),
+ },
+]
--- /dev/null
+#![feature(const_trait_impl)]
+#![allow(incomplete_features)]
+
+struct Foo;
+
+const impl Foo { //~ ERROR: expected identifier, found keyword
+ fn bar() {}
+}
+
+fn main() {
+ // shouldn't error here because we shouldn't have been able to recover above
+ Foo::bar();
+}
--- /dev/null
+error: expected identifier, found keyword `impl`
+ --> $DIR/const-impl-norecover.rs:6:7
+ |
+LL | const impl Foo {
+ | ^^^^ expected identifier, found keyword
+
+error: aborting due to previous error
+
--- /dev/null
+#![feature(const_trait_impl)]
+#![allow(incomplete_features)]
+
+trait Foo {}
+
+const impl Foo for i32 {} //~ ERROR: expected identifier, found keyword
+
+trait Bar {}
+
+const impl<T: Foo> Bar for T {} //~ ERROR: expected identifier, found keyword
+
+const fn still_implements<T: Bar>() {}
+
+const _: () = still_implements::<i32>();
+
+fn main() {}
--- /dev/null
+error: expected identifier, found keyword `impl`
+ --> $DIR/const-impl-recovery.rs:6:7
+ |
+LL | const impl Foo for i32 {}
+ | ^^^^ expected identifier, found keyword
+ |
+help: you might have meant to write a const trait impl
+ |
+LL | impl const Foo for i32 {}
+ |-- ^^^^^
+
+error: expected identifier, found keyword `impl`
+ --> $DIR/const-impl-recovery.rs:10:7
+ |
+LL | const impl<T: Foo> Bar for T {}
+ | ^^^^ expected identifier, found keyword
+ |
+help: you might have meant to write a const trait impl
+ |
+LL | impl<T: Foo> const Bar for T {}
+ |-- ^^^^^
+
+error: aborting due to 2 previous errors
+