name = "alloc"
version = "0.0.0"
dependencies = [
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"rand 0.6.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rand_xorshift 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-std-workspace-core 1.0.0",
]
[[package]]
name = "compiler_builtins"
-version = "0.1.14"
+version = "0.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.1.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-std-workspace-core 1.0.0",
]
version = "0.3.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-std-workspace-core 1.0.0",
]
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-std-workspace-alloc 1.0.0",
"rustc-std-workspace-core 1.0.0",
]
name = "panic_abort"
version = "0.0.0"
dependencies = [
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
]
version = "0.0.0"
dependencies = [
"alloc 0.0.0",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
"unwind 0.0.0",
version = "0.0.0"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
version = "0.1.10"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-std-workspace-core 1.0.0",
]
"alloc 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"alloc 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
"serialize 0.0.0",
+ "smallvec 0.6.7 (registry+https://github.com/rust-lang/crates.io-index)",
"stable_deref_trait 1.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"syntax 0.0.0",
"syntax_ext 0.0.0",
"alloc 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"alloc 0.0.0",
"build_helper 0.1.0",
"cmake 0.1.38 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
]
"alloc 0.0.0",
"backtrace-sys 0.1.27 (registry+https://github.com/rust-lang/crates.io-index)",
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"dlmalloc 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)",
"fortanix-sgx-abi 0.3.2 (registry+https://github.com/rust-lang/crates.io-index)",
version = "0.0.0"
dependencies = [
"cc 1.0.35 (registry+https://github.com/rust-lang/crates.io-index)",
- "compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)",
+ "compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)",
"core 0.0.0",
"libc 0.2.54 (registry+https://github.com/rust-lang/crates.io-index)",
]
"checksum colored 1.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "b0aa3473e85a3161b59845d6096b289bb577874cafeaf75ea1b1beaa6572c7fc"
"checksum commoncrypto 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "d056a8586ba25a1e4d61cb090900e495952c7886786fc55f909ab2f819b69007"
"checksum commoncrypto-sys 0.2.0 (registry+https://github.com/rust-lang/crates.io-index)" = "1fed34f46747aa73dfaa578069fd8279d2818ade2b55f38f22a9401c7f4083e2"
-"checksum compiler_builtins 0.1.14 (registry+https://github.com/rust-lang/crates.io-index)" = "e3f235c329e5cb9fa3d2ca2cc36256ba9a7f23fa76e0f4db6f68c23b73b2ac69"
+"checksum compiler_builtins 0.1.15 (registry+https://github.com/rust-lang/crates.io-index)" = "e899b947d7e71c3d35c0b6194d64025b84946640510e215090c815b20828964e"
"checksum compiletest_rs 0.3.22 (registry+https://github.com/rust-lang/crates.io-index)" = "f40ecc9332b68270998995c00f8051ee856121764a0d3230e64c9efd059d27b6"
"checksum constant_time_eq 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "8ff012e225ce166d4422e0e78419d901719760f62ae2b7969ca6b564d1b54a9e"
"checksum core-foundation 0.6.3 (registry+https://github.com/rust-lang/crates.io-index)" = "4e2640d6d0bf22e82bed1b73c6aef8d5dd31e5abe6666c57e6d45e2649f4f887"
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
}
+ // Determine if we're going to compile in optimized C intrinsics to
+ // the `compiler-builtins` crate. These intrinsics live in LLVM's
+ // `compiler-rt` repository, but our `src/llvm-project` submodule isn't
+ // always checked out, so we need to conditionally look for this. (e.g. if
+ // an external LLVM is used we skip the LLVM submodule checkout).
+ //
+ // Note that this shouldn't affect the correctness of `compiler-builtins`,
+ // but only its speed. Some intrinsics in C haven't been translated to Rust
+ // yet but that's pretty rare. Other intrinsics have optimized
+ // implementations in C which have only had slower versions ported to Rust,
+ // so we favor the C version where we can, but it's not critical.
+ //
+ // If `compiler-rt` is available ensure that the `c` feature of the
+ // `compiler-builtins` crate is enabled and it's configured to learn where
+ // `compiler-rt` is located.
+ let compiler_builtins_root = builder.src.join("src/llvm-project/compiler-rt");
+ let compiler_builtins_c_feature = if compiler_builtins_root.exists() {
+ cargo.env("RUST_COMPILER_RT_ROOT", &compiler_builtins_root);
+ " compiler-builtins-c".to_string()
+ } else {
+ String::new()
+ };
+
if builder.no_std(target) == Some(true) {
+ let mut features = "compiler-builtins-mem".to_string();
+ features.push_str(&compiler_builtins_c_feature);
+
// for no-std targets we only compile a few no_std crates
cargo
.args(&["-p", "alloc"])
.arg("--features")
.arg("compiler-builtins-mem compiler-builtins-c");
} else {
- let features = builder.std_features();
+ let mut features = builder.std_features();
+ features.push_str(&compiler_builtins_c_feature);
if compiler.stage != 0 && builder.config.sanitizers {
// This variable is used by the sanitizer runtime crates, e.g.
} else {
format!("static={}", link_name)
};
- // The source for `compiler-rt` comes from the `compiler-builtins` crate, so
- // load our env var set by cargo to find the source code.
- let dir = env::var_os("DEP_COMPILER_RT_COMPILER_RT").unwrap();
+ // This env var is provided by rustbuild to tell us where `compiler-rt`
+ // lives.
+ let dir = env::var_os("RUST_COMPILER_RT_ROOT").unwrap();
let lib = native_lib_boilerplate(
dir.as_ref(),
sanitizer_name,
gpg-agent
RUN apt-key adv --batch --yes --keyserver keyserver.ubuntu.com --recv-keys 74DA7924C5513486
-RUN add-apt-repository -y 'deb http://apt.dilos.org/dilos dilos2-testing main'
+RUN add-apt-repository -y 'deb http://apt.dilos.org/dilos dilos2 main'
WORKDIR /tmp
COPY dist-various-2/shared.sh /tmp/
--build=x86_64-unknown-linux-gnu \
--enable-full-bootstrap
ENV SCRIPT python2.7 ../x.py build
+
+# In general this just slows down the build and we're just a smoke test that
+# a full bootstrap works in general, so there's not much need to take this
+# penalty in build times.
+ENV NO_LLVM_ASSERTIONS 1
+ENV NO_DEBUG_ASSERTIONS 1
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<&'a T> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<T> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<T> {
- self.next_back()
- }
}
#[stable(feature = "drain", since = "1.6.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.length, Some(self.length))
}
-
- #[inline]
- fn last(mut self) -> Option<(&'a K, &'a V)> {
- self.next_back()
- }
}
#[stable(feature = "fused", since = "1.26.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.length, Some(self.length))
}
-
- #[inline]
- fn last(mut self) -> Option<(&'a K, &'a mut V)> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
(self.length, Some(self.length))
}
-
- #[inline]
- fn last(mut self) -> Option<(K, V)> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<&'a K> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<&'a V> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
unsafe { Some(self.next_unchecked()) }
}
}
-
- #[inline]
- fn last(mut self) -> Option<(&'a K, &'a V)> {
- self.next_back()
- }
}
#[stable(feature = "map_values_mut", since = "1.10.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<&'a mut V> {
- self.next_back()
- }
}
#[stable(feature = "map_values_mut", since = "1.10.0")]
unsafe { Some(self.next_unchecked()) }
}
}
-
- #[inline]
- fn last(mut self) -> Option<(&'a K, &'a mut V)> {
- self.next_back()
- }
}
impl<'a, K, V> RangeMut<'a, K, V> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<&'a T> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<'a, T> DoubleEndedIterator for Iter<'a, T> {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<T> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl<T> DoubleEndedIterator for IntoIter<T> {
fn next(&mut self) -> Option<&'a T> {
self.iter.next().map(|(k, _)| k)
}
-
- #[inline]
- fn last(mut self) -> Option<&'a T> {
- self.next_back()
- }
}
#[stable(feature = "btree_range", since = "1.17.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
- #[inline]
- fn last(mut self) -> Option<char> {
- self.next_back()
- }
}
#[stable(feature = "drain", since = "1.6.0")]
fn count(self) -> usize {
self.len()
}
-
- #[inline]
- fn last(mut self) -> Option<T> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<T> {
- self.next_back()
- }
}
#[stable(feature = "drain", since = "1.6.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.drain.size_hint()
}
-
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "vec_splice", since = "1.21.0")]
}
}
+ #[inline]
+ unsafe fn write_from_iter<T, I: Iterator<Item = T>>(
+ &self,
+ mut iter: I,
+ len: usize,
+ mem: *mut T,
+ ) -> &mut [T] {
+ let mut i = 0;
+ // Use a manual loop since LLVM manages to optimize it better for
+ // slice iterators
+ loop {
+ let value = iter.next();
+ if i >= len || value.is_none() {
+ // We only return as many items as the iterator gave us, even
+ // though it was supposed to give us `len`
+ return slice::from_raw_parts_mut(mem, i);
+ }
+ ptr::write(mem.offset(i as isize), value.unwrap());
+ i += 1;
+ }
+ }
+
#[inline]
pub fn alloc_from_iter<T, I: IntoIterator<Item = T>>(&self, iter: I) -> &mut [T] {
- let mut iter = iter.into_iter();
+ let iter = iter.into_iter();
assert!(mem::size_of::<T>() != 0);
assert!(!mem::needs_drop::<T>());
let size = len.checked_mul(mem::size_of::<T>()).unwrap();
let mem = self.alloc_raw(size, mem::align_of::<T>()) as *mut _ as *mut T;
unsafe {
- for i in 0..len {
- ptr::write(mem.offset(i as isize), iter.next().unwrap())
- }
- slice::from_raw_parts_mut(mem, len)
+ self.write_from_iter(iter, len, mem)
}
}
(_, _) => {
type Item = u8;
fn next(&mut self) -> Option<u8> { self.range.next().map(|i| self.data[i]) }
fn size_hint(&self) -> (usize, Option<usize>) { self.range.size_hint() }
- #[inline]
- fn last(mut self) -> Option<u8> { self.next_back() }
}
#[stable(feature = "rust1", since = "1.0.0")]
impl DoubleEndedIterator for EscapeDefault {
//! mutability:
//!
//! ```
+//! use std::cell::{RefCell, RefMut};
//! use std::collections::HashMap;
-//! use std::cell::RefCell;
//! use std::rc::Rc;
//!
//! fn main() {
//! let shared_map: Rc<RefCell<_>> = Rc::new(RefCell::new(HashMap::new()));
-//! shared_map.borrow_mut().insert("africa", 92388);
-//! shared_map.borrow_mut().insert("kyoto", 11837);
-//! shared_map.borrow_mut().insert("piccadilly", 11826);
-//! shared_map.borrow_mut().insert("marbles", 38);
+//! // Create a new block to limit the scope of the dynamic borrow
+//! {
+//! let mut map: RefMut<_> = shared_map.borrow_mut();
+//! map.insert("africa", 92388);
+//! map.insert("kyoto", 11837);
+//! map.insert("piccadilly", 11826);
+//! map.insert("marbles", 38);
+//! }
+//!
+//! // Note that if we had not let the previous borrow of the cache fall out
+//! // of scope then the subsequent borrow would cause a dynamic thread panic.
+//! // This is the major hazard of using `RefCell`.
+//! let total: i32 = shared_map.borrow().values().sum();
+//! println!("{}", total);
//! }
//! ```
//!
//!
//! impl Graph {
//! fn minimum_spanning_tree(&self) -> Vec<(i32, i32)> {
-//! // Create a new scope to contain the lifetime of the
-//! // dynamic borrow
-//! {
-//! // Take a reference to the inside of cache cell
-//! let mut cache = self.span_tree_cache.borrow_mut();
-//! if cache.is_some() {
-//! return cache.as_ref().unwrap().clone();
-//! }
-//!
-//! let span_tree = self.calc_span_tree();
-//! *cache = Some(span_tree);
-//! }
+//! self.span_tree_cache.borrow_mut()
+//! .get_or_insert_with(|| self.calc_span_tree())
+//! .clone()
+//! }
//!
-//! // Recursive call to return the just-cached value.
-//! // Note that if we had not let the previous borrow
-//! // of the cache fall out of scope then the subsequent
-//! // recursive borrow would cause a dynamic thread panic.
-//! // This is the major hazard of using `RefCell`.
-//! self.minimum_spanning_tree()
+//! fn calc_span_tree(&self) -> Vec<(i32, i32)> {
+//! // Expensive computation goes here
+//! vec![]
//! }
-//! # fn calc_span_tree(&self) -> Vec<(i32, i32)> { vec![] }
//! }
//! ```
//!
{
self.iter.position(predicate)
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Copy, Clone, Eq, PartialEq, Ord, PartialOrd, Hash)]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
+ #[cfg_attr(not(stage0), rustc_nonnull_optimization_guaranteed)]
pub struct $Ty($Int);
}
#[stable(feature = "nonnull", since = "1.25.0")]
#[repr(transparent)]
#[rustc_layout_scalar_valid_range_start(1)]
+#[cfg_attr(not(stage0), rustc_nonnull_optimization_guaranteed)]
pub struct NonNull<T: ?Sized> {
pointer: *const T,
}
(1, Some(self.v.len() + 1))
}
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
(1, Some(self.v.len() + 1))
}
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "slice_rsplit", since = "1.27.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.0.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "split_whitespace", since = "1.1.0")]
fn size_hint(&self) -> (usize, Option<usize>) {
self.inner.size_hint()
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
cfg.define("COMPILER_RT_HAS_ATOMICS", Some("1"));
}
- // The source for `compiler-rt` comes from the `compiler-builtins` crate, so
- // load our env var set by cargo to find the source code.
- let root = env::var_os("DEP_COMPILER_RT_COMPILER_RT").unwrap();
+ let root = env::var_os("RUST_COMPILER_RT_ROOT").unwrap();
let root = Path::new(&root);
for src in profile_sources {
use std::marker::PhantomData;
use smallvec::SmallVec;
+/// This declares a list of types which can be allocated by `Arena`.
+///
+/// The `few` modifier will cause allocation to use the shared arena and recording the destructor.
+/// This is faster and more memory efficient if there's only a few allocations of the type.
+/// Leaving `few` out will cause the type to get its own dedicated `TypedArena` which is
+/// faster and more memory efficient if there is lots of allocations.
+///
+/// Specifying the `decode` modifier will add decode impls for &T and &[T] where T is the type
+/// listed. These impls will appear in the implement_ty_decoder! macro.
#[macro_export]
macro_rules! arena_types {
($macro:path, $args:tt, $tcx:lifetime) => (
rustc::hir::def_id::DefId,
rustc::ty::subst::SubstsRef<$tcx>
)>,
- [few] mir_keys: rustc::util::nodemap::DefIdSet,
+ [few, decode] mir_keys: rustc::util::nodemap::DefIdSet,
[decode] specialization_graph: rustc::traits::specialization_graph::Graph,
[] region_scope_tree: rustc::middle::region::ScopeTree,
[] item_local_set: rustc::util::nodemap::ItemLocalSet,
rustc::infer::canonical::Canonical<'tcx,
rustc::infer::canonical::QueryResponse<'tcx, rustc::ty::Ty<'tcx>>
>,
+ [few] crate_inherent_impls: rustc::ty::CrateInherentImpls,
+ [decode] borrowck: rustc::middle::borrowck::BorrowCheckResult,
+ [few] upstream_monomorphizations:
+ rustc::util::nodemap::DefIdMap<
+ rustc_data_structures::fx::FxHashMap<
+ rustc::ty::subst::SubstsRef<'tcx>,
+ rustc::hir::def_id::CrateNum
+ >
+ >,
+ [few] resolve_lifetimes: rustc::middle::resolve_lifetime::ResolveLifetimes,
+ [decode] generic_predicates: rustc::ty::GenericPredicates<'tcx>,
+ [few] lint_levels: rustc::lint::LintLevelMap,
+ [few] stability_index: rustc::middle::stability::Index<'tcx>,
+ [few] features: syntax::feature_gate::Features,
+ [few] all_traits: Vec<rustc::hir::def_id::DefId>,
+ [few] privacy_access_levels: rustc::middle::privacy::AccessLevels,
+ [few] target_features_whitelist: rustc_data_structures::fx::FxHashMap<
+ String,
+ Option<syntax::symbol::Symbol>
+ >,
+ [few] wasm_import_module_map: rustc_data_structures::fx::FxHashMap<
+ rustc::hir::def_id::DefId,
+ String
+ >,
+ [few] get_lib_features: rustc::middle::lib_features::LibFeatures,
+ [few] defined_lib_features: rustc::middle::lang_items::LanguageItems,
+ [few] visible_parent_map: rustc::util::nodemap::DefIdMap<rustc::hir::def_id::DefId>,
+ [few] foreign_module: rustc::middle::cstore::ForeignModule,
+ [few] foreign_modules: Vec<rustc::middle::cstore::ForeignModule>,
+ [few] reachable_non_generics: rustc::util::nodemap::DefIdMap<
+ rustc::middle::exported_symbols::SymbolExportLevel
+ >,
+ [few] crate_variances: rustc::ty::CrateVariancesMap<'tcx>,
+ [few] inferred_outlives_crate: rustc::ty::CratePredicatesMap<'tcx>,
], $tcx);
)
}
impl<T: Copy> ArenaAllocatable for T {}
-pub unsafe trait ArenaField<'tcx>: Sized {
+unsafe trait ArenaField<'tcx>: Sized {
/// Returns a specific arena to allocate from.
/// If None is returned, the DropArena will be used.
fn arena<'a>(arena: &'a Arena<'tcx>) -> Option<&'a TypedArena<Self>>;
for arm in arms {
// Add an exit node for when we've visited all the
// patterns and the guard (if there is one) in the arm.
- let arm_exit = self.add_dummy_node(&[]);
+ let bindings_exit = self.add_dummy_node(&[]);
for pat in &arm.pats {
// Visit the pattern, coming from the discriminant exit
// Add an edge from the exit of this pattern to the
// exit of the arm
- self.add_contained_edge(pat_exit, arm_exit);
+ self.add_contained_edge(pat_exit, bindings_exit);
}
// Visit the body of this arm
- let body_exit = self.expr(&arm.body, arm_exit);
+ let body_exit = self.expr(&arm.body, bindings_exit);
+
+ let arm_exit = self.add_ast_node(arm.hir_id.local_id, &[body_exit]);
// Link the body to the exit of the expression
- self.add_contained_edge(body_exit, expr_exit);
+ self.add_contained_edge(arm_exit, expr_exit);
}
expr_exit
}
pub fn walk_arm<'v, V: Visitor<'v>>(visitor: &mut V, arm: &'v Arm) {
+ visitor.visit_id(arm.hir_id);
walk_list!(visitor, visit_pat, &arm.pats);
if let Some(ref g) = arm.guard {
match g {
use syntax::source_map::{respan, CompilerDesugaringKind, Spanned};
use syntax::source_map::CompilerDesugaringKind::IfTemporary;
use syntax::std_inject;
-use syntax::symbol::{keywords, Symbol, sym};
+use syntax::symbol::{kw, sym, Symbol};
use syntax::tokenstream::{TokenStream, TokenTree};
use syntax::parse::token::Token;
use syntax::visit::{self, Visitor};
hir::LifetimeParamKind::InBand,
),
ParamName::Fresh(_) => (
- keywords::UnderscoreLifetime.name().as_interned_str(),
+ kw::UnderscoreLifetime.as_interned_str(),
hir::LifetimeParamKind::Elided,
),
ParamName::Error => (
- keywords::UnderscoreLifetime.name().as_interned_str(),
+ kw::UnderscoreLifetime.as_interned_str(),
hir::LifetimeParamKind::Error,
),
};
fn lower_arm(&mut self, arm: &Arm) -> hir::Arm {
hir::Arm {
+ hir_id: self.next_id(),
attrs: self.lower_attrs(&arm.attrs),
pats: arm.pats.iter().map(|x| self.lower_pat(x)).collect(),
guard: match arm.guard {
_ => None,
},
body: P(self.lower_expr(&arm.body)),
+ span: arm.span,
}
}
P(hir::Path {
res,
segments: hir_vec![hir::PathSegment::from_ident(
- keywords::SelfUpper.ident()
+ Ident::with_empty_ctxt(kw::SelfUpper)
)],
span: t.span,
}),
trace!("registering existential type with id {:#?}", exist_ty_id);
let exist_ty_item = hir::Item {
hir_id: exist_ty_id,
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
attrs: Default::default(),
node: exist_ty_item_kind,
vis: respan(span.shrink_to_lo(), hir::VisibilityKind::Inherited),
let (name, kind) = match name {
hir::LifetimeName::Underscore => (
- hir::ParamName::Plain(keywords::UnderscoreLifetime.ident()),
+ hir::ParamName::Plain(Ident::with_empty_ctxt(kw::UnderscoreLifetime)),
hir::LifetimeParamKind::Elided,
),
hir::LifetimeName::Param(param_name) => (
.iter()
.map(|arg| match arg.pat.node {
PatKind::Ident(_, ident, _) => ident,
- _ => Ident::new(keywords::Invalid.name(), arg.pat.span),
+ _ => Ident::new(kw::Invalid, arg.pat.span),
})
.collect()
}
fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
let span = l.ident.span;
match l.ident {
- ident if ident.name == keywords::StaticLifetime.name() =>
+ ident if ident.name == kw::StaticLifetime =>
self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
- ident if ident.name == keywords::UnderscoreLifetime.name() =>
+ ident if ident.name == kw::UnderscoreLifetime =>
match self.anonymous_lifetime_mode {
AnonymousLifetimeMode::CreateParameter => {
let fresh_name = self.collect_fresh_in_band_lifetime(span);
// Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use `gensym("Self")` to create a distinct name that looks the same.
- let ident = if param.ident.name == keywords::SelfUpper.name() {
+ let ident = if param.ident.name == kw::SelfUpper {
param.ident.gensym()
} else {
param.ident
// Correctly resolve `self` imports.
if path.segments.len() > 1
- && path.segments.last().unwrap().ident.name == keywords::SelfLower.name()
+ && path.segments.last().unwrap().ident.name == kw::SelfLower
{
let _ = path.segments.pop();
if rename.is_none() {
fn arm(&mut self, pats: hir::HirVec<P<hir::Pat>>, expr: P<hir::Expr>) -> hir::Arm {
hir::Arm {
+ hir_id: self.next_id(),
attrs: hir_vec![],
pats,
guard: None,
+ span: expr.span,
body: expr,
}
}
});
}
+ fn visit_arm(&mut self, arm: &'hir Arm) {
+ let node = Node::Arm(arm);
+
+ self.insert(arm.span, arm.hir_id, node);
+
+ self.with_parent(arm.hir_id, |this| {
+ intravisit::walk_arm(this, arm);
+ });
+ }
+
fn visit_anon_const(&mut self, constant: &'hir AnonConst) {
self.insert(DUMMY_SP, constant.hir_id, Node::AnonConst(constant));
use syntax::ast::*;
use syntax::ext::hygiene::Mark;
use syntax::visit;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax::symbol::Symbol;
use syntax::parse::token::{self, Token};
use syntax_pos::Span;
// information we encapsulate into, the better
let def_data = match i.node {
ItemKind::Impl(..) => DefPathData::Impl,
- ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
+ ItemKind::Mod(..) if i.ident.name == kw::Invalid => {
return visit::walk_item(self, i);
}
ItemKind::Mod(..) | ItemKind::Trait(..) | ItemKind::TraitAlias(..) |
Node::Pat(_) |
Node::Binding(_) |
Node::Local(_) |
+ Node::Arm(_) |
Node::Lifetime(_) |
Node::Visibility(_) |
Node::Block(_) |
pub fn ty_param_name(&self, id: HirId) -> Name {
match self.get_by_hir_id(id) {
Node::Item(&Item { node: ItemKind::Trait(..), .. }) |
- Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => keywords::SelfUpper.name(),
+ Node::Item(&Item { node: ItemKind::TraitAlias(..), .. }) => kw::SelfUpper,
Node::GenericParam(param) => param.name.ident().name,
_ => bug!("ty_param_name: {} not a type parameter", self.hir_to_string(id)),
}
Some(Node::Field(ref f)) => Some(&f.attrs[..]),
Some(Node::Expr(ref e)) => Some(&*e.attrs),
Some(Node::Stmt(ref s)) => Some(s.node.attrs()),
+ Some(Node::Arm(ref a)) => Some(&*a.attrs),
Some(Node::GenericParam(param)) => Some(¶m.attrs[..]),
// Unit/tuple structs/variants take the attributes straight from
// the struct/variant definition.
Some(Node::TraitRef(tr)) => tr.path.span,
Some(Node::Binding(pat)) => pat.span,
Some(Node::Pat(pat)) => pat.span,
+ Some(Node::Arm(arm)) => arm.span,
Some(Node::Block(block)) => block.span,
Some(Node::Ctor(..)) => match self.find_by_hir_id(
self.get_parent_node_by_hir_id(hir_id))
Node::TraitRef(a) => self.print_trait_ref(&a),
Node::Binding(a) |
Node::Pat(a) => self.print_pat(&a),
+ Node::Arm(a) => self.print_arm(&a),
Node::Block(a) => {
use syntax::print::pprust::PrintState;
Some(Node::Pat(_)) => {
format!("pat {}{}", map.hir_to_pretty_string(id), id_str)
}
+ Some(Node::Arm(_)) => {
+ format!("arm {}{}", map.hir_to_pretty_string(id), id_str)
+ }
Some(Node::Block(_)) => {
format!("block {}{}", map.hir_to_pretty_string(id), id_str)
}
use syntax::attr::{InlineAttr, OptimizeAttr};
use syntax::ext::hygiene::SyntaxContext;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, keywords};
+use syntax::symbol::{Symbol, kw};
use syntax::tokenstream::TokenStream;
use syntax::util::parser::ExprPrecedence;
use crate::ty::AdtKind;
pub span: Span,
/// Either "`'a`", referring to a named lifetime definition,
- /// or "``" (i.e., `keywords::Invalid`), for elision placeholders.
+ /// or "``" (i.e., `kw::Invalid`), for elision placeholders.
///
/// HIR lowering inserts these placeholders in type paths that
/// refer to type definitions needing lifetime parameters,
pub fn ident(&self) -> Ident {
match *self {
ParamName::Plain(ident) => ident,
- ParamName::Error | ParamName::Fresh(_) => keywords::UnderscoreLifetime.ident(),
+ ParamName::Fresh(_) |
+ ParamName::Error => Ident::with_empty_ctxt(kw::UnderscoreLifetime),
}
}
impl LifetimeName {
pub fn ident(&self) -> Ident {
match *self {
- LifetimeName::Implicit => keywords::Invalid.ident(),
- LifetimeName::Error => keywords::Invalid.ident(),
- LifetimeName::Underscore => keywords::UnderscoreLifetime.ident(),
- LifetimeName::Static => keywords::StaticLifetime.ident(),
+ LifetimeName::Implicit | LifetimeName::Error => Ident::invalid(),
+ LifetimeName::Underscore => Ident::with_empty_ctxt(kw::UnderscoreLifetime),
+ LifetimeName::Static => Ident::with_empty_ctxt(kw::StaticLifetime),
LifetimeName::Param(param_name) => param_name.ident(),
}
}
impl Path {
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].ident.name == keywords::PathRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == kw::PathRoot
}
}
/// `<pats> (if <guard>) => <body>`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct Arm {
+ #[stable_hasher(ignore)]
+ pub hir_id: HirId,
+ pub span: Span,
pub attrs: HirVec<Attribute>,
/// Multiple patterns can be combined with `|`
pub pats: HirVec<P<Pat>>,
TraitRef(&'hir TraitRef),
Binding(&'hir Pat),
Pat(&'hir Pat),
+ Arm(&'hir Arm),
Block(&'hir Block),
Local(&'hir Local),
MacroDef(&'hir MacroDef),
use syntax::print::pp::Breaks::{Consistent, Inconsistent};
use syntax::print::pprust::{self, PrintState};
use syntax::ptr::P;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax::util::parser::{self, AssocOp, Fixity};
use syntax_pos::{self, BytePos, FileName};
hir::VisibilityKind::Restricted { ref path, .. } => {
self.s.word("pub(")?;
if path.segments.len() == 1 &&
- path.segments[0].ident.name == keywords::Super.name() {
+ path.segments[0].ident.name == kw::Super {
// Special case: `super` can print like `pub(super)`.
self.s.word("super")?;
} else {
fn print_literal(&mut self, lit: &hir::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo())?;
- let (token, suffix) = lit.node.to_lit_token();
- self.writer().word(pprust::literal_to_string(token, suffix))
+ self.writer().word(pprust::literal_to_string(lit.node.to_lit_token()))
}
pub fn print_expr(&mut self, expr: &hir::Expr) -> io::Result<()> {
if i > 0 {
self.s.word("::")?
}
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args, segment.infer_types,
}
pub fn print_path_segment(&mut self, segment: &hir::PathSegment) -> io::Result<()> {
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args, segment.infer_types, false)
if i > 0 {
self.s.word("::")?
}
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
self.ann.post(self, AnnNode::Pat(pat))
}
- fn print_arm(&mut self, arm: &hir::Arm) -> io::Result<()> {
+ pub fn print_arm(&mut self, arm: &hir::Arm) -> io::Result<()> {
// I have no idea why this check is necessary, but here it
// is :(
if arm.attrs.is_empty() {
impl_stable_hash_for!(struct ::syntax::ast::Lit {
node,
token,
- suffix,
span
});
}
}
-impl_stable_hash_for!(enum token::Lit {
- Bool(val),
- Byte(val),
- Char(val),
- Err(val),
- Integer(val),
- Float(val),
- Str_(val),
- ByteStr(val),
- StrRaw(val, n),
- ByteStrRaw(val, n)
+impl_stable_hash_for!(enum token::LitKind {
+ Bool,
+ Byte,
+ Char,
+ Integer,
+ Float,
+ Str,
+ ByteStr,
+ StrRaw(n),
+ ByteStrRaw(n),
+ Err
+});
+
+impl_stable_hash_for!(struct token::Lit {
+ kind,
+ symbol,
+ suffix
});
fn hash_token<'a, 'gcx, W: StableHasherResult>(
token::Token::CloseDelim(delim_token) => {
std_hash::Hash::hash(&delim_token, hasher);
}
- token::Token::Literal(lit, opt_name) => {
- lit.hash_stable(hcx, hasher);
- opt_name.hash_stable(hcx, hasher);
- }
+ token::Token::Literal(lit) => lit.hash_stable(hcx, hasher),
token::Token::Ident(ident, is_raw) => {
ident.name.hash_stable(hcx, hasher);
pub use self::Level::*;
pub use self::LintSource::*;
-use rustc_data_structures::sync::{self, Lrc};
+use rustc_data_structures::sync;
use crate::hir::def_id::{CrateNum, LOCAL_CRATE};
use crate::hir::intravisit;
}
fn lint_levels<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, cnum: CrateNum)
- -> Lrc<LintLevelMap>
+ -> &'tcx LintLevelMap
{
assert_eq!(cnum, LOCAL_CRATE);
let mut builder = LintLevelMapBuilder {
intravisit::walk_crate(&mut builder, krate);
builder.levels.pop(push);
- Lrc::new(builder.levels.build_map())
+ tcx.arena.alloc(builder.levels.build_map())
}
struct LintLevelMapBuilder<'a, 'tcx: 'a> {
})
}
+ fn visit_arm(&mut self, a: &'tcx hir::Arm) {
+ self.with_lint_attrs(a.hir_id, &a.attrs, |builder| {
+ intravisit::walk_arm(builder, a);
+ })
+ }
+
fn visit_trait_item(&mut self, trait_item: &'tcx hir::TraitItem) {
self.with_lint_attrs(trait_item.hir_id, &trait_item.attrs, |builder| {
intravisit::walk_trait_item(builder, trait_item);
Some((cnum, path))
})
.collect::<Vec<_>>();
- let mut ordering = tcx.postorder_cnums(LOCAL_CRATE);
- Lrc::make_mut(&mut ordering).reverse();
+ let mut ordering = tcx.postorder_cnums(LOCAL_CRATE).to_owned();
+ ordering.reverse();
libs.sort_by_cached_key(|&(a, _)| {
ordering.iter().position(|x| *x == a)
});
use std::rc::Rc;
use syntax::ast::{self, NodeId};
use syntax::ptr::P;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use crate::hir;
let sp = ident.span;
let var = self.variable(hir_id, sp);
// Ignore unused self.
- if ident.name != keywords::SelfLower.name() {
+ if ident.name != kw::SelfLower {
if !self.warn_about_unused(vec![sp], hir_id, entry_ln, var) {
if self.live_on_entry(entry_ln, var).is_none() {
self.report_dead_assign(hir_id, sp, var, true);
pub enum ScopeData {
Node,
- // Scope of the call-site for a function or closure
- // (outlives the arguments as well as the body).
+ /// Scope of the call-site for a function or closure
+ /// (outlives the arguments as well as the body).
CallSite,
- // Scope of arguments passed to a function or closure
- // (they outlive its body).
+ /// Scope of arguments passed to a function or closure
+ /// (they outlive its body).
Arguments,
- // Scope of destructors for temporaries of node-id.
+ /// Scope of destructors for temporaries of node-id.
Destruction,
- // Scope following a `let id = expr;` binding in a block.
+ /// Scope following a `let id = expr;` binding in a block.
Remainder(FirstStatementIndex)
}
///
/// * The subscope with `first_statement_index == 1` is scope of `c`,
/// and thus does not include EXPR_2, but covers the `...`.
- pub struct FirstStatementIndex { .. }
+ pub struct FirstStatementIndex {
+ derive [HashStable]
+ }
}
-impl_stable_hash_for!(struct crate::middle::region::FirstStatementIndex { private });
-
// compilation error if size of `ScopeData` is not the same as a `u32`
static_assert_size!(ScopeData, 4);
}
fn resolve_arm<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, arm: &'tcx hir::Arm) {
+ let prev_cx = visitor.cx;
+
+ visitor.enter_scope(
+ Scope {
+ id: arm.hir_id.local_id,
+ data: ScopeData::Node,
+ }
+ );
+ visitor.cx.var_parent = visitor.cx.parent;
+
visitor.terminating_scopes.insert(arm.body.hir_id.local_id);
if let Some(hir::Guard::If(ref expr)) = arm.guard {
}
intravisit::walk_arm(visitor, arm);
+
+ visitor.cx = prev_cx;
}
fn resolve_pat<'a, 'tcx>(visitor: &mut RegionResolutionVisitor<'a, 'tcx>, pat: &'tcx hir::Pat) {
terminating(body.hir_id.local_id);
}
- hir::ExprKind::Match(..) => {
- visitor.cx.var_parent = visitor.cx.parent;
- }
-
hir::ExprKind::DropTemps(ref expr) => {
// `DropTemps(expr)` does not denote a conditional scope.
// Rather, we want to achieve the same behavior as `{ let _t = expr; _t }`.
use crate::session::Session;
use crate::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, HirIdMap, HirIdSet};
use errors::{Applicability, DiagnosticBuilder};
-use rustc_data_structures::sync::Lrc;
use rustc_macros::HashStable;
use std::borrow::Cow;
use std::cell::Cell;
use syntax::ast;
use syntax::attr;
use syntax::ptr::P;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use crate::hir::intravisit::{self, NestedVisitorMap, Visitor};
/// See [`NamedRegionMap`].
#[derive(Default)]
pub struct ResolveLifetimes {
- defs: FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Region>>>,
- late_bound: FxHashMap<LocalDefId, Lrc<FxHashSet<ItemLocalId>>>,
+ defs: FxHashMap<LocalDefId, FxHashMap<ItemLocalId, Region>>,
+ late_bound: FxHashMap<LocalDefId, FxHashSet<ItemLocalId>>,
object_lifetime_defaults:
- FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Lrc<Vec<ObjectLifetimeDefault>>>>>,
+ FxHashMap<LocalDefId, FxHashMap<ItemLocalId, Vec<ObjectLifetimeDefault>>>,
}
impl_stable_hash_for!(struct crate::middle::resolve_lifetime::ResolveLifetimes {
named_region_map: |tcx, id| {
let id = LocalDefId::from_def_id(DefId::local(id)); // (*)
- tcx.resolve_lifetimes(LOCAL_CRATE).defs.get(&id).cloned()
+ tcx.resolve_lifetimes(LOCAL_CRATE).defs.get(&id)
},
is_late_bound_map: |tcx, id| {
tcx.resolve_lifetimes(LOCAL_CRATE)
.late_bound
.get(&id)
- .cloned()
},
object_lifetime_defaults_map: |tcx, id| {
tcx.resolve_lifetimes(LOCAL_CRATE)
.object_lifetime_defaults
.get(&id)
- .cloned()
},
..*providers
fn resolve_lifetimes<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
for_krate: CrateNum,
-) -> Lrc<ResolveLifetimes> {
+) -> &'tcx ResolveLifetimes {
assert_eq!(for_krate, LOCAL_CRATE);
let named_region_map = krate(tcx);
for (hir_id, v) in named_region_map.defs {
let map = rl.defs.entry(hir_id.owner_local_def_id()).or_default();
- Lrc::get_mut(map).unwrap().insert(hir_id.local_id, v);
+ map.insert(hir_id.local_id, v);
}
for hir_id in named_region_map.late_bound {
let map = rl.late_bound
.entry(hir_id.owner_local_def_id())
.or_default();
- Lrc::get_mut(map).unwrap().insert(hir_id.local_id);
+ map.insert(hir_id.local_id);
}
for (hir_id, v) in named_region_map.object_lifetime_defaults {
let map = rl.object_lifetime_defaults
.entry(hir_id.owner_local_def_id())
.or_default();
- Lrc::get_mut(map)
- .unwrap()
- .insert(hir_id.local_id, Lrc::new(v));
+ map.insert(hir_id.local_id, v);
}
- Lrc::new(rl)
+ tcx.arena.alloc(rl)
}
fn krate<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> NamedRegionMap {
GenericParamKind::Lifetime { .. } => {
let (name, reg) = Region::early(&self.tcx.hir(), &mut index, ¶m);
if let hir::ParamName::Plain(param_name) = name {
- if param_name.name == keywords::UnderscoreLifetime.name() {
+ if param_name.name == kw::UnderscoreLifetime {
// Pick the elided lifetime "definition" if one exists
// and use it to make an elision scope.
elision = Some(reg);
} {
debug!("id = {:?} span = {:?} name = {:?}", id, span, name);
- if name == keywords::UnderscoreLifetime.ident() {
+ if name.name == kw::UnderscoreLifetime {
continue;
}
for (i, (lifetime_i, lifetime_i_name)) in lifetimes.iter().enumerate() {
if let hir::ParamName::Plain(_) = lifetime_i_name {
let name = lifetime_i_name.ident().name;
- if name == keywords::UnderscoreLifetime.name()
- || name == keywords::StaticLifetime.name()
+ if name == kw::UnderscoreLifetime
+ || name == kw::StaticLifetime
{
let mut err = struct_span_err!(
self.tcx.sess,
remaining_lib_features.remove(&Symbol::intern("test"));
let check_features =
- |remaining_lib_features: &mut FxHashMap<_, _>, defined_features: &Vec<_>| {
+ |remaining_lib_features: &mut FxHashMap<_, _>, defined_features: &[_]| {
for &(feature, since) in defined_features {
if let Some(since) = since {
if let Some(span) = remaining_lib_features.get(&feature) {
if remaining_lib_features.is_empty() {
break;
}
- check_features(&mut remaining_lib_features, &tcx.defined_lib_features(cnum));
+ check_features(&mut remaining_lib_features, tcx.defined_lib_features(cnum));
}
}
/// predicate gets in the way of some checks, which are intended
/// to operate over only the actual where-clauses written by the
/// user.)
- query predicates_of(_: DefId) -> Lrc<ty::GenericPredicates<'tcx>> {}
+ query predicates_of(_: DefId) -> &'tcx ty::GenericPredicates<'tcx> {}
query native_libraries(_: CrateNum) -> Lrc<Vec<NativeLibrary>> {
desc { "looking up the native libraries of a linked crate" }
}
- query lint_levels(_: CrateNum) -> Lrc<lint::LintLevelMap> {
+ query lint_levels(_: CrateNum) -> &'tcx lint::LintLevelMap {
eval_always
desc { "computing the lint levels for items in this crate" }
}
}
Linking {
- query wasm_import_module_map(_: CrateNum) -> Lrc<FxHashMap<DefId, String>> {
+ query wasm_import_module_map(_: CrateNum) -> &'tcx FxHashMap<DefId, String> {
desc { "wasm import module map" }
}
}
/// equal to the `explicit_predicates_of` predicates plus the
/// `inferred_outlives_of` predicates.
query predicates_defined_on(_: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>> {}
+ -> &'tcx ty::GenericPredicates<'tcx> {}
/// Returns the predicates written explicitly by the user.
query explicit_predicates_of(_: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>> {}
+ -> &'tcx ty::GenericPredicates<'tcx> {}
/// Returns the inferred outlives predicates (e.g., for `struct
/// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`).
/// evaluate them even during type conversion, often before the
/// full predicates are available (note that supertraits have
/// additional acyclicity requirements).
- query super_predicates_of(key: DefId) -> Lrc<ty::GenericPredicates<'tcx>> {
+ query super_predicates_of(key: DefId) -> &'tcx ty::GenericPredicates<'tcx> {
desc { |tcx| "computing the supertraits of `{}`", tcx.def_path_str(key) }
}
/// To avoid cycles within the predicates of a single item we compute
/// per-type-parameter predicates for resolving `T::AssocTy`.
query type_param_predicates(key: (DefId, DefId))
- -> Lrc<ty::GenericPredicates<'tcx>> {
+ -> &'tcx ty::GenericPredicates<'tcx> {
no_force
desc { |tcx| "computing the bounds for type parameter `{}`", {
let id = tcx.hir().as_local_hir_id(key.1).unwrap();
query static_mutability(_: DefId) -> Option<hir::Mutability> {}
/// Gets a map with the variance of every item; use `item_variance` instead.
- query crate_variances(_: CrateNum) -> Lrc<ty::CrateVariancesMap<'tcx>> {
+ query crate_variances(_: CrateNum) -> &'tcx ty::CrateVariancesMap<'tcx> {
desc { "computing the variances for items in this crate" }
}
TypeChecking {
/// Maps from thee `DefId` of a type to its (inferred) outlives.
query inferred_outlives_crate(_: CrateNum)
- -> Lrc<ty::CratePredicatesMap<'tcx>> {
+ -> &'tcx ty::CratePredicatesMap<'tcx> {
desc { "computing the inferred outlives predicates for items in this crate" }
}
}
Other {
/// Maps from an impl/trait `DefId to a list of the `DefId`s of its items.
- query associated_item_def_ids(_: DefId) -> Lrc<Vec<DefId>> {}
+ query associated_item_def_ids(_: DefId) -> &'tcx [DefId] {}
/// Maps from a trait item to the trait item "descriptor".
query associated_item(_: DefId) -> ty::AssociatedItem {}
/// Maps a `DefId` of a type to a list of its inherent impls.
/// Contains implementations of methods that are inherent to a type.
/// Methods in these implementations don't need to be exported.
- query inherent_impls(_: DefId) -> Lrc<Vec<DefId>> {
+ query inherent_impls(_: DefId) -> &'tcx [DefId] {
eval_always
}
}
}
Other {
- query used_trait_imports(_: DefId) -> Lrc<DefIdSet> {}
+ query used_trait_imports(_: DefId) -> &'tcx DefIdSet {}
}
TypeChecking {
}
BorrowChecking {
- query borrowck(_: DefId) -> Lrc<BorrowCheckResult> {}
+ query borrowck(_: DefId) -> &'tcx BorrowCheckResult {}
/// Borrow-checks the function body. If this is a closure, returns
/// additional requirements that the closure's creator must verify.
/// Not meant to be used directly outside of coherence.
/// (Defined only for `LOCAL_CRATE`.)
query crate_inherent_impls(k: CrateNum)
- -> Lrc<CrateInherentImpls> {
+ -> &'tcx CrateInherentImpls {
eval_always
desc { "all inherent impls defined in crate `{:?}`", k }
}
query check_match(_: DefId) -> () {}
/// Performs part of the privacy check and computes "access levels".
- query privacy_access_levels(_: CrateNum) -> Lrc<AccessLevels> {
+ query privacy_access_levels(_: CrateNum) -> &'tcx AccessLevels {
eval_always
desc { "privacy access levels" }
}
Other {
query dylib_dependency_formats(_: CrateNum)
- -> Lrc<Vec<(CrateNum, LinkagePreference)>> {
+ -> &'tcx [(CrateNum, LinkagePreference)] {
desc { "dylib dependency formats of crate" }
}
}
desc { "test whether a crate has #![no_builtins]" }
}
- query extern_crate(_: DefId) -> Lrc<Option<ExternCrate>> {
+ query extern_crate(_: DefId) -> Option<&'tcx ExternCrate> {
eval_always
desc { "getting crate's ExternCrateData" }
}
desc { "computing whether impls specialize one another" }
}
query in_scope_traits_map(_: DefIndex)
- -> Option<Lrc<FxHashMap<ItemLocalId, Lrc<StableVec<TraitCandidate>>>>> {
+ -> Option<&'tcx FxHashMap<ItemLocalId, StableVec<TraitCandidate>>> {
eval_always
desc { "traits in scope at a block" }
}
}
Other {
- query module_exports(_: DefId) -> Option<Lrc<Vec<Export<hir::HirId>>>> {
+ query module_exports(_: DefId) -> Option<&'tcx [Export<hir::HirId>]> {
eval_always
}
}
// Does not include external symbols that don't have a corresponding DefId,
// like the compiler-generated `main` function and so on.
query reachable_non_generics(_: CrateNum)
- -> Lrc<DefIdMap<SymbolExportLevel>> {
+ -> &'tcx DefIdMap<SymbolExportLevel> {
desc { "looking up the exported symbols of a crate" }
}
query is_reachable_non_generic(_: DefId) -> bool {}
Codegen {
query upstream_monomorphizations(
k: CrateNum
- ) -> Lrc<DefIdMap<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>>> {
+ ) -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>> {
desc { "collecting available upstream monomorphizations `{:?}`", k }
}
query upstream_monomorphizations_for(_: DefId)
- -> Option<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>> {}
+ -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>> {}
}
Other {
- query foreign_modules(_: CrateNum) -> Lrc<Vec<ForeignModule>> {
+ query foreign_modules(_: CrateNum) -> &'tcx [ForeignModule] {
desc { "looking up the foreign modules of a linked crate" }
}
TypeChecking {
query implementations_of_trait(_: (CrateNum, DefId))
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
no_force
desc { "looking up implementations of a trait in a crate" }
}
query all_trait_implementations(_: CrateNum)
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
desc { "looking up all (?) trait implementations" }
}
}
Other {
query dllimport_foreign_items(_: CrateNum)
- -> Lrc<FxHashSet<DefId>> {
+ -> &'tcx FxHashSet<DefId> {
desc { "dllimport_foreign_items" }
}
query is_dllimport_foreign_item(_: DefId) -> bool {}
BorrowChecking {
// Lifetime resolution. See `middle::resolve_lifetimes`.
- query resolve_lifetimes(_: CrateNum) -> Lrc<ResolveLifetimes> {
+ query resolve_lifetimes(_: CrateNum) -> &'tcx ResolveLifetimes {
desc { "resolving lifetimes" }
}
query named_region_map(_: DefIndex) ->
- Option<Lrc<FxHashMap<ItemLocalId, Region>>> {
+ Option<&'tcx FxHashMap<ItemLocalId, Region>> {
desc { "looking up a named region" }
}
query is_late_bound_map(_: DefIndex) ->
- Option<Lrc<FxHashSet<ItemLocalId>>> {
+ Option<&'tcx FxHashSet<ItemLocalId>> {
desc { "testing if a region is late bound" }
}
query object_lifetime_defaults_map(_: DefIndex)
- -> Option<Lrc<FxHashMap<ItemLocalId, Lrc<Vec<ObjectLifetimeDefault>>>>> {
+ -> Option<&'tcx FxHashMap<ItemLocalId, Vec<ObjectLifetimeDefault>>> {
desc { "looking up lifetime defaults for a region" }
}
}
eval_always
desc { "fetching what a crate is named" }
}
- query item_children(_: DefId) -> Lrc<Vec<Export<hir::HirId>>> {}
+ query item_children(_: DefId) -> &'tcx [Export<hir::HirId>] {}
query extern_mod_stmt_cnum(_: DefId) -> Option<CrateNum> {}
- query get_lib_features(_: CrateNum) -> Lrc<LibFeatures> {
+ query get_lib_features(_: CrateNum) -> &'tcx LibFeatures {
eval_always
desc { "calculating the lib features map" }
}
query defined_lib_features(_: CrateNum)
- -> Lrc<Vec<(Symbol, Option<Symbol>)>> {
+ -> &'tcx [(Symbol, Option<Symbol>)] {
desc { "calculating the lib features defined in a crate" }
}
- query get_lang_items(_: CrateNum) -> Lrc<LanguageItems> {
+ query get_lang_items(_: CrateNum) -> &'tcx LanguageItems {
eval_always
desc { "calculating the lang items map" }
}
- query defined_lang_items(_: CrateNum) -> Lrc<Vec<(DefId, usize)>> {
+ query defined_lang_items(_: CrateNum) -> &'tcx [(DefId, usize)] {
desc { "calculating the lang items defined in a crate" }
}
- query missing_lang_items(_: CrateNum) -> Lrc<Vec<LangItem>> {
+ query missing_lang_items(_: CrateNum) -> &'tcx [LangItem] {
desc { "calculating the missing lang items in a crate" }
}
query visible_parent_map(_: CrateNum)
- -> Lrc<DefIdMap<DefId>> {
+ -> &'tcx DefIdMap<DefId> {
desc { "calculating the visible parent map" }
}
query missing_extern_crate_item(_: CrateNum) -> bool {
eval_always
desc { "looking at the source for a crate" }
}
- query postorder_cnums(_: CrateNum) -> Lrc<Vec<CrateNum>> {
+ query postorder_cnums(_: CrateNum) -> &'tcx [CrateNum] {
eval_always
desc { "generating a postorder list of CrateNums" }
}
- query upvars(_: DefId) -> Option<Lrc<Vec<hir::Upvar>>> {
+ query upvars(_: DefId) -> Option<&'tcx [hir::Upvar]> {
eval_always
}
query maybe_unused_trait_import(_: DefId) -> bool {
eval_always
}
query maybe_unused_extern_crates(_: CrateNum)
- -> Lrc<Vec<(DefId, Span)>> {
+ -> &'tcx [(DefId, Span)] {
eval_always
desc { "looking up all possibly unused extern crates" }
}
eval_always
}
- query stability_index(_: CrateNum) -> Lrc<stability::Index<'tcx>> {
+ query stability_index(_: CrateNum) -> &'tcx stability::Index<'tcx> {
eval_always
desc { "calculating the stability index for the local crate" }
}
- query all_crate_nums(_: CrateNum) -> Lrc<Vec<CrateNum>> {
+ query all_crate_nums(_: CrateNum) -> &'tcx [CrateNum] {
eval_always
desc { "fetching all foreign CrateNum instances" }
}
/// A vector of every trait accessible in the whole crate
/// (i.e., including those from subcrates). This is used only for
/// error reporting.
- query all_traits(_: CrateNum) -> Lrc<Vec<DefId>> {
+ query all_traits(_: CrateNum) -> &'tcx [DefId] {
desc { "fetching all foreign and local traits" }
}
}
}
Other {
- query target_features_whitelist(_: CrateNum) -> Lrc<FxHashMap<String, Option<Symbol>>> {
+ query target_features_whitelist(_: CrateNum) -> &'tcx FxHashMap<String, Option<Symbol>> {
eval_always
desc { "looking up the whitelist of target features" }
}
desc { |tcx| "estimating size for `{}`", tcx.def_path_str(def.def_id()) }
}
- query features_query(_: CrateNum) -> Lrc<feature_gate::Features> {
+ query features_query(_: CrateNum) -> &'tcx feature_gate::Features {
eval_always
desc { "looking up enabled feature gates" }
}
}
}
- // We can only call poly_project_and_unify_type when our predicate's
- // Ty contains an inference variable - otherwise, there won't be anything to
- // unify
- if p.ty().skip_binder().has_infer_types() {
- debug!("Projecting and unifying projection predicate {:?}",
- predicate);
- match poly_project_and_unify_type(select, &obligation.with(p)) {
- Err(e) => {
- debug!(
- "evaluate_nested_obligations: Unable to unify predicate \
- '{:?}' '{:?}', bailing out",
- ty, e
- );
- return false;
- }
- Ok(Some(v)) => {
+ // There are three possible cases when we project a predicate:
+ //
+ // 1. We encounter an error. This means that it's impossible for
+ // our current type to implement the auto trait - there's bound
+ // that we could add to our ParamEnv that would 'fix' this kind
+ // of error, as it's not caused by an unimplemented type.
+ //
+ // 2. We succesfully project the predicate (Ok(Some(_))), generating
+ // some subobligations. We then process these subobligations
+ // like any other generated sub-obligations.
+ //
+ // 3. We receieve an 'ambiguous' result (Ok(None))
+ // If we were actually trying to compile a crate,
+ // we would need to re-process this obligation later.
+ // However, all we care about is finding out what bounds
+ // are needed for our type to implement a particular auto trait.
+ // We've already added this obligation to our computed ParamEnv
+ // above (if it was necessary). Therefore, we don't need
+ // to do any further processing of the obligation.
+ //
+ // Note that we *must* try to project *all* projection predicates
+ // we encounter, even ones without inference variable.
+ // This ensures that we detect any projection errors,
+ // which indicate that our type can *never* implement the given
+ // auto trait. In that case, we will generate an explicit negative
+ // impl (e.g. 'impl !Send for MyType'). However, we don't
+ // try to process any of the generated subobligations -
+ // they contain no new information, since we already know
+ // that our type implements the projected-through trait,
+ // and can lead to weird region issues.
+ //
+ // Normally, we'll generate a negative impl as a result of encountering
+ // a type with an explicit negative impl of an auto trait
+ // (for example, raw pointers have !Send and !Sync impls)
+ // However, through some **interesting** manipulations of the type
+ // system, it's actually possible to write a type that never
+ // implements an auto trait due to a projection error, not a normal
+ // negative impl error. To properly handle this case, we need
+ // to ensure that we catch any potential projection errors,
+ // and turn them into an explicit negative impl for our type.
+ debug!("Projecting and unifying projection predicate {:?}",
+ predicate);
+
+ match poly_project_and_unify_type(select, &obligation.with(p)) {
+ Err(e) => {
+ debug!(
+ "evaluate_nested_obligations: Unable to unify predicate \
+ '{:?}' '{:?}', bailing out",
+ ty, e
+ );
+ return false;
+ }
+ Ok(Some(v)) => {
+ // We only care about sub-obligations
+ // when we started out trying to unify
+ // some inference variables. See the comment above
+ // for more infomration
+ if p.ty().skip_binder().has_infer_types() {
if !self.evaluate_nested_obligations(
ty,
v.clone().iter().cloned(),
return false;
}
}
- Ok(None) => {
+ }
+ Ok(None) => {
+ // It's ok not to make progress when hvave no inference variables -
+ // in that case, we were only performing unifcation to check if an
+ // error occured (which would indicate that it's impossible for our
+ // type to implement the auto trait).
+ // However, we should always make progress (either by generating
+ // subobligations or getting an error) when we started off with
+ // inference variables
+ if p.ty().skip_binder().has_infer_types() {
panic!("Unexpected result when selecting {:?} {:?}", ty, obligation)
}
}
use syntax::attr;
use syntax::source_map::MultiSpan;
use syntax::feature_gate;
-use syntax::symbol::{Symbol, keywords, InternedString, sym};
+use syntax::symbol::{Symbol, InternedString, kw, sym};
use syntax_pos::Span;
use crate::hir;
}
}
+pub struct Common<'tcx> {
+ pub empty_predicates: ty::GenericPredicates<'tcx>,
+}
+
pub struct CommonTypes<'tcx> {
pub unit: Ty<'tcx>,
pub bool: Ty<'tcx>,
pub dep_graph: DepGraph,
+ /// Common objects.
+ pub common: Common<'tcx>,
+
/// Common types, pre-interned for your convenience.
pub types: CommonTypes<'tcx>,
/// Map indicating what traits are in scope for places where this
/// is relevant; generated by resolve.
trait_map: FxHashMap<DefIndex,
- Lrc<FxHashMap<ItemLocalId,
- Lrc<StableVec<TraitCandidate>>>>>,
+ FxHashMap<ItemLocalId,
+ StableVec<TraitCandidate>>>,
/// Export map produced by name resolution.
- export_map: FxHashMap<DefId, Lrc<Vec<Export<hir::HirId>>>>,
+ export_map: FxHashMap<DefId, Vec<Export<hir::HirId>>>,
hir_map: hir_map::Map<'tcx>,
// Records the captured variables referenced by every closure
// expression. Do not track deps for this, just recompute it from
// scratch every time.
- upvars: FxHashMap<DefId, Lrc<Vec<hir::Upvar>>>,
+ upvars: FxHashMap<DefId, Vec<hir::Upvar>>,
maybe_unused_trait_imports: FxHashSet<DefId>,
maybe_unused_extern_crates: Vec<(DefId, Span)>,
s.fatal(&err);
});
let interners = CtxtInterners::new(&arenas.interner);
+ let common = Common {
+ empty_predicates: ty::GenericPredicates {
+ parent: None,
+ predicates: vec![],
+ },
+ };
let common_types = CommonTypes::new(&interners);
let common_lifetimes = CommonLifetimes::new(&interners);
let common_consts = CommonConsts::new(&interners, &common_types);
None
};
- let mut trait_map: FxHashMap<_, Lrc<FxHashMap<_, _>>> = FxHashMap::default();
+ let mut trait_map: FxHashMap<_, FxHashMap<_, _>> = FxHashMap::default();
for (k, v) in resolutions.trait_map {
let hir_id = hir.node_to_hir_id(k);
let map = trait_map.entry(hir_id.owner).or_default();
- Lrc::get_mut(map).unwrap()
- .insert(hir_id.local_id,
- Lrc::new(StableVec::new(v)));
+ map.insert(hir_id.local_id, StableVec::new(v));
}
GlobalCtxt {
global_arenas: &arenas.global,
global_interners: interners,
dep_graph,
+ common,
types: common_types,
lifetimes: common_lifetimes,
consts: common_consts,
let exports: Vec<_> = v.into_iter().map(|e| {
e.map_id(|id| hir.node_to_hir_id(id))
}).collect();
- (k, Lrc::new(exports))
+ (k, exports)
}).collect(),
upvars: resolutions.upvars.into_iter().map(|(k, v)| {
let vars: Vec<_> = v.into_iter().map(|e| {
e.map_id(|id| hir.node_to_hir_id(id))
}).collect();
- (hir.local_def_id(k), Lrc::new(vars))
+ (hir.local_def_id(k), vars)
}).collect(),
maybe_unused_trait_imports:
resolutions.maybe_unused_trait_imports
self.sess.consider_optimizing(&cname, msg)
}
- pub fn lib_features(self) -> Lrc<middle::lib_features::LibFeatures> {
+ pub fn lib_features(self) -> &'gcx middle::lib_features::LibFeatures {
self.get_lib_features(LOCAL_CRATE)
}
- pub fn lang_items(self) -> Lrc<middle::lang_items::LanguageItems> {
+ pub fn lang_items(self) -> &'gcx middle::lang_items::LanguageItems {
self.get_lang_items(LOCAL_CRATE)
}
else { None }
}
- pub fn stability(self) -> Lrc<stability::Index<'tcx>> {
+ pub fn stability(self) -> &'gcx stability::Index<'gcx> {
self.stability_index(LOCAL_CRATE)
}
- pub fn crates(self) -> Lrc<Vec<CrateNum>> {
+ pub fn crates(self) -> &'gcx [CrateNum] {
self.all_crate_nums(LOCAL_CRATE)
}
- pub fn features(self) -> Lrc<feature_gate::Features> {
+ pub fn features(self) -> &'gcx feature_gate::Features {
self.features_query(LOCAL_CRATE)
}
#[inline]
pub fn mk_self_type(self) -> Ty<'tcx> {
- self.mk_ty_param(0, keywords::SelfUpper.name().as_interned_str())
+ self.mk_ty_param(0, kw::SelfUpper.as_interned_str())
}
pub fn mk_param_from_def(self, param: &ty::GenericParamDef) -> Kind<'tcx> {
lint::struct_lint_level(self.sess, lint, level, src, None, msg)
}
- pub fn in_scope_traits(self, id: HirId) -> Option<Lrc<StableVec<TraitCandidate>>> {
+ pub fn in_scope_traits(self, id: HirId) -> Option<&'gcx StableVec<TraitCandidate>> {
self.in_scope_traits_map(id.owner)
- .and_then(|map| map.get(&id.local_id).cloned())
+ .and_then(|map| map.get(&id.local_id))
}
pub fn named_region(self, id: HirId) -> Option<resolve_lifetime::Region> {
}
pub fn object_lifetime_defaults(self, id: HirId)
- -> Option<Lrc<Vec<ObjectLifetimeDefault>>>
+ -> Option<&'gcx [ObjectLifetimeDefault]>
{
self.object_lifetime_defaults_map(id.owner)
- .and_then(|map| map.get(&id.local_id).cloned())
+ .and_then(|map| map.get(&id.local_id).map(|v| &**v))
}
}
}
pub fn provide(providers: &mut ty::query::Providers<'_>) {
- providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id).cloned();
- providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).cloned();
+ providers.in_scope_traits_map = |tcx, id| tcx.gcx.trait_map.get(&id);
+ providers.module_exports = |tcx, id| tcx.gcx.export_map.get(&id).map(|v| &v[..]);
providers.crate_name = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
tcx.crate_name
};
providers.get_lib_features = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
- Lrc::new(middle::lib_features::collect(tcx))
+ tcx.arena.alloc(middle::lib_features::collect(tcx))
};
providers.get_lang_items = |tcx, id| {
assert_eq!(id, LOCAL_CRATE);
- Lrc::new(middle::lang_items::collect(tcx))
+ tcx.arena.alloc(middle::lang_items::collect(tcx))
};
- providers.upvars = |tcx, id| tcx.gcx.upvars.get(&id).cloned();
+ providers.upvars = |tcx, id| tcx.gcx.upvars.get(&id).map(|v| &v[..]);
providers.maybe_unused_trait_import = |tcx, id| {
tcx.maybe_unused_trait_imports.contains(&id)
};
providers.maybe_unused_extern_crates = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.maybe_unused_extern_crates.clone())
+ &tcx.maybe_unused_extern_crates[..]
};
providers.names_imported_by_glob_use = |tcx, id| {
assert_eq!(id.krate, LOCAL_CRATE);
providers.stability_index = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(stability::Index::new(tcx))
+ tcx.arena.alloc(stability::Index::new(tcx))
};
providers.lookup_stability = |tcx, id| {
assert_eq!(id.krate, LOCAL_CRATE);
};
providers.all_crate_nums = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.cstore.crates_untracked())
+ tcx.arena.alloc_slice(&tcx.cstore.crates_untracked())
};
providers.postorder_cnums = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.cstore.postorder_cnums_untracked())
+ tcx.arena.alloc_slice(&tcx.cstore.postorder_cnums_untracked())
};
providers.output_filenames = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
};
providers.features_query = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(tcx.sess.features_untracked().clone())
+ tcx.arena.alloc(tcx.sess.features_untracked().clone())
};
providers.is_panic_runtime = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
use syntax::ast::{self, Name, Ident, NodeId};
use syntax::attr;
use syntax::ext::hygiene::Mark;
-use syntax::symbol::{keywords, sym, Symbol, LocalInternedString, InternedString};
+use syntax::symbol::{kw, sym, Symbol, LocalInternedString, InternedString};
use syntax_pos::Span;
use smallvec;
/// Does this early bound region have a name? Early bound regions normally
/// always have names except when using anonymous lifetimes (`'_`).
pub fn has_name(&self) -> bool {
- self.name != keywords::UnderscoreLifetime.name().as_interned_str()
+ self.name != kw::UnderscoreLifetime.as_interned_str()
}
}
}
#[inline]
- pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Lrc<GenericPredicates<'gcx>> {
+ pub fn predicates(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx GenericPredicates<'gcx> {
tcx.predicates_of(self.did)
}
pub struct AssociatedItemsIterator<'a, 'gcx: 'tcx, 'tcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'tcx>,
- def_ids: Lrc<Vec<DefId>>,
+ def_ids: &'gcx [DefId],
next_index: usize,
}
fn associated_item_def_ids<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let item = tcx.hir().expect_item_by_hir_id(id);
- let vec: Vec<_> = match item.node {
+ match item.node {
hir::ItemKind::Trait(.., ref trait_item_refs) => {
- trait_item_refs.iter()
- .map(|trait_item_ref| trait_item_ref.id)
- .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
- .collect()
+ tcx.arena.alloc_from_iter(
+ trait_item_refs.iter()
+ .map(|trait_item_ref| trait_item_ref.id)
+ .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
+ )
}
hir::ItemKind::Impl(.., ref impl_item_refs) => {
- impl_item_refs.iter()
- .map(|impl_item_ref| impl_item_ref.id)
- .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
- .collect()
+ tcx.arena.alloc_from_iter(
+ impl_item_refs.iter()
+ .map(|impl_item_ref| impl_item_ref.id)
+ .map(|id| tcx.hir().local_def_id_from_hir_id(id.hir_id))
+ )
}
- hir::ItemKind::TraitAlias(..) => vec![],
+ hir::ItemKind::TraitAlias(..) => &[],
_ => span_bug!(item.span, "associated_item_def_ids: not impl or trait")
- };
- Lrc::new(vec)
+ }
}
fn def_span<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Span {
/// (constructing this map requires touching the entire crate).
#[derive(Clone, Debug, Default, HashStable)]
pub struct CrateInherentImpls {
- pub inherent_impls: DefIdMap<Lrc<Vec<DefId>>>,
+ pub inherent_impls: DefIdMap<Vec<DefId>>,
}
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable)]
use crate::ty::subst::{Kind, Subst, UnpackedKind};
use crate::mir::interpret::ConstValue;
use rustc_target::spec::abi::Abi;
-use syntax::symbol::{keywords, InternedString};
+use syntax::symbol::{kw, InternedString};
use std::cell::Cell;
use std::fmt::{self, Write as _};
// 2. for an extern inferred from a path or an indirect crate,
// where there is no explicit `extern crate`, we just prepend
// the crate name.
- match *self.tcx().extern_crate(def_id) {
- Some(ExternCrate {
+ match self.tcx().extern_crate(def_id) {
+ Some(&ExternCrate {
src: ExternCrateSource::Extern(def_id),
direct: true,
span,
if self.tcx.sess.rust_2018() {
// We add the `crate::` keyword on Rust 2018, only when desired.
if SHOULD_PREFIX_WITH_CRATE.with(|flag| flag.get()) {
- write!(self, "{}", keywords::Crate.name())?;
+ write!(self, "{}", kw::Crate)?;
self.empty_path = false;
}
}
match *region {
ty::ReEarlyBound(ref data) => {
- data.name.as_symbol() != keywords::Invalid.name() &&
- data.name.as_symbol() != keywords::UnderscoreLifetime.name()
+ data.name.as_symbol() != kw::Invalid &&
+ data.name.as_symbol() != kw::UnderscoreLifetime
}
ty::ReLateBound(_, br) |
ty::ReFree(ty::FreeRegion { bound_region: br, .. }) |
ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
if let ty::BrNamed(_, name) = br {
- if name.as_symbol() != keywords::Invalid.name() &&
- name.as_symbol() != keywords::UnderscoreLifetime.name() {
+ if name.as_symbol() != kw::Invalid &&
+ name.as_symbol() != kw::UnderscoreLifetime {
return true;
}
}
// `explain_region()` or `note_and_explain_region()`.
match *region {
ty::ReEarlyBound(ref data) => {
- if data.name.as_symbol() != keywords::Invalid.name() {
+ if data.name.as_symbol() != kw::Invalid {
p!(write("{}", data.name));
return Ok(self);
}
ty::ReFree(ty::FreeRegion { bound_region: br, .. }) |
ty::RePlaceholder(ty::Placeholder { name: br, .. }) => {
if let ty::BrNamed(_, name) = br {
- if name.as_symbol() != keywords::Invalid.name() &&
- name.as_symbol() != keywords::UnderscoreLifetime.name() {
+ if name.as_symbol() != kw::Invalid &&
+ name.as_symbol() != kw::UnderscoreLifetime {
p!(write("{}", name));
return Ok(self);
}
}
}
-impl<'tcx, T: Default> Value<'tcx> for T {
- default fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> T {
- T::default()
- }
-}
-
impl<'tcx> Value<'tcx> for Ty<'tcx> {
fn from_cycle_error<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> Ty<'tcx> {
tcx.types.err
use std::ops::Range;
use rustc_target::spec::abi;
use syntax::ast::{self, Ident};
-use syntax::symbol::{keywords, InternedString};
+use syntax::symbol::{kw, InternedString};
use serialize;
use self::InferTy::*;
}
pub fn for_self() -> ParamTy {
- ParamTy::new(0, keywords::SelfUpper.name().as_interned_str())
+ ParamTy::new(0, kw::SelfUpper.as_interned_str())
}
pub fn for_def(def: &ty::GenericParamDef) -> ParamTy {
// FIXME(#50125): Ignoring `Self` with `index != 0` might lead to weird behavior elsewhere,
// but this should only be possible when using `-Z continue-parse-after-error` like
// `compile-fail/issue-36638.rs`.
- self.name.as_symbol() == keywords::SelfUpper.name() && self.index == 0
+ self.name.as_symbol() == kw::SelfUpper && self.index == 0
}
}
mut_visit::{self, MutVisitor},
parse::ParseSess,
ptr::P,
- symbol::{keywords, Symbol, sym}
+ symbol::{kw, sym, Symbol}
};
use syntax_pos::Span;
// We will generate a new submodule. To `use` the static from that module, we need to get
// the `super::...` path.
- let super_path =
- f.cx.path(f.span, vec![Ident::with_empty_ctxt(keywords::Super.name()), f.global]);
+ let super_path = f.cx.path(f.span, vec![Ident::with_empty_ctxt(kw::Super), f.global]);
// Generate the items in the submodule
let mut items = vec![
use std::cell::{Cell, RefCell};
use std::fmt;
use std::rc::Rc;
-use rustc_data_structures::sync::Lrc;
use std::hash::{Hash, Hasher};
use syntax::source_map::CompilerDesugaringKind;
use syntax_pos::{MultiSpan, Span};
}
fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId)
- -> Lrc<BorrowCheckResult>
+ -> &'tcx BorrowCheckResult
{
assert!(tcx.use_ast_borrowck() || tcx.migrate_borrowck());
// those things (notably the synthesized constructors from
// tuple structs/variants) do not have an associated body
// and do not need borrowchecking.
- return Lrc::new(BorrowCheckResult {
+ return tcx.arena.alloc(BorrowCheckResult {
used_mut_nodes: Default::default(),
signalled_any_error: SignalledError::NoErrorsSeen,
})
check_loans::check_loans(&mut bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
}
- Lrc::new(BorrowCheckResult {
+ tcx.arena.alloc(BorrowCheckResult {
used_mut_nodes: bccx.used_mut_nodes.into_inner(),
signalled_any_error: bccx.signalled_any_error.into_inner(),
})
use rustc::ty::layout::HasTyCtxt;
use rustc::ty::query::Providers;
use rustc_data_structures::small_c_str::SmallCStr;
-use rustc_data_structures::sync::Lrc;
use rustc_data_structures::fx::FxHashMap;
use rustc_target::spec::PanicStrategy;
use rustc_codegen_ssa::traits::*;
if tcx.sess.opts.actually_rustdoc {
// rustdoc needs to be able to document functions that use all the features, so
// whitelist them all
- Lrc::new(llvm_util::all_known_features()
+ tcx.arena.alloc(llvm_util::all_known_features()
.map(|(a, b)| (a.to_string(), b))
.collect())
} else {
- Lrc::new(llvm_util::target_feature_whitelist(tcx.sess)
+ tcx.arena.alloc(llvm_util::target_feature_whitelist(tcx.sess)
.iter()
.map(|&(a, b)| (a.to_string(), b))
.collect())
}));
}
- Lrc::new(ret)
+ tcx.arena.alloc(ret)
};
}
}
}
if sess.opts.debugging_opts.emit_artifact_notifications {
- sess.parse_sess.span_diagnostic.emit_artifact_notification(&out_filename);
+ sess.parse_sess.span_diagnostic.emit_artifact_notification(&out_filename, "link");
}
}
-use rustc_data_structures::sync::Lrc;
use std::sync::Arc;
use rustc::ty::Instance;
fn reachable_non_generics_provider<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
cnum: CrateNum)
- -> Lrc<DefIdMap<SymbolExportLevel>>
+ -> &'tcx DefIdMap<SymbolExportLevel>
{
assert_eq!(cnum, LOCAL_CRATE);
if !tcx.sess.opts.output_types.should_codegen() {
- return Default::default();
+ return tcx.arena.alloc(Default::default());
}
// Check to see if this crate is a "special runtime crate". These
reachable_non_generics.insert(id, SymbolExportLevel::C);
}
- Lrc::new(reachable_non_generics)
+ tcx.arena.alloc(reachable_non_generics)
}
fn is_reachable_non_generic_provider_local<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
fn upstream_monomorphizations_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
cnum: CrateNum)
- -> Lrc<DefIdMap<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>>>
+ -> &'tcx DefIdMap<FxHashMap<SubstsRef<'tcx>, CrateNum>>
{
debug_assert!(cnum == LOCAL_CRATE);
}
}
- Lrc::new(instances.into_iter()
- .map(|(key, value)| (key, Lrc::new(value)))
- .collect())
+ tcx.arena.alloc(instances)
}
fn upstream_monomorphizations_for_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
- -> Option<Lrc<FxHashMap<SubstsRef<'tcx>, CrateNum>>>
+ -> Option<&'tcx FxHashMap<SubstsRef<'tcx>, CrateNum>>
{
debug_assert!(!def_id.is_local());
- tcx.upstream_monomorphizations(LOCAL_CRATE)
- .get(&def_id)
- .cloned()
+ tcx.upstream_monomorphizations(LOCAL_CRATE).get(&def_id)
}
fn is_unreachable_local_definition_provider(tcx: TyCtxt<'_, '_, '_>, def_id: DefId) -> bool {
use rustc_mir::monomorphize::partitioning::{CodegenUnit, CodegenUnitExt};
use rustc::util::nodemap::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::sync::Lrc;
use rustc_codegen_utils::{symbol_names_test, check_for_rustc_errors_attr};
use rustc::ty::layout::{FAT_PTR_ADDR, FAT_PTR_EXTRA};
use crate::mir::place::PlaceRef;
.map(|id| &module_map[&id])
.flat_map(|module| module.foreign_items.iter().cloned())
.collect();
- Lrc::new(dllimports)
+ tcx.arena.alloc(dllimports)
};
providers.is_dllimport_foreign_item = |tcx, def_id| {
use crate::traits::*;
use syntax_pos::{DUMMY_SP, NO_EXPANSION, BytePos, Span};
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use std::iter;
};
bx.declare_local(
&fx.debug_context,
- arg_decl.name.unwrap_or(keywords::Invalid.name()),
+ arg_decl.name.unwrap_or(kw::Invalid),
arg_ty, scope,
variable_access,
VariableKind::ArgumentVariable(arg_index + 1),
bx.declare_local(
&fx.debug_context,
- arg_decl.name.unwrap_or(keywords::Invalid.name()),
+ arg_decl.name.unwrap_or(kw::Invalid),
arg.layout.ty,
scope,
variable_access,
) -> Option<OperandRef<'tcx, Bx::Value>> {
debug!("maybe_codegen_consume_direct(place={:?})", place);
- // watch out for locals that do not have an
- // alloca; they are handled somewhat differently
- if let mir::Place::Base(mir::PlaceBase::Local(index)) = *place {
- match self.locals[index] {
- LocalRef::Operand(Some(o)) => {
- return Some(o);
- }
- LocalRef::Operand(None) => {
- bug!("use of {:?} before def", place);
- }
- LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
- // use path below
- }
- }
- }
+ place.iterate(|place_base, place_projection| {
+ if let mir::PlaceBase::Local(index) = place_base {
+ match self.locals[*index] {
+ LocalRef::Operand(Some(mut o)) => {
+ // Moves out of scalar and scalar pair fields are trivial.
+ for proj in place_projection {
+ match proj.elem {
+ mir::ProjectionElem::Field(ref f, _) => {
+ o = o.extract_field(bx, f.index());
+ }
+ mir::ProjectionElem::Index(_) |
+ mir::ProjectionElem::ConstantIndex { .. } => {
+ // ZSTs don't require any actual memory access.
+ // FIXME(eddyb) deduplicate this with the identical
+ // checks in `codegen_consume` and `extract_field`.
+ let elem = o.layout.field(bx.cx(), 0);
+ if elem.is_zst() {
+ o = OperandRef::new_zst(bx, elem);
+ } else {
+ return None;
+ }
+ }
+ _ => return None,
+ }
+ }
- // Moves out of scalar and scalar pair fields are trivial.
- if let &mir::Place::Projection(ref proj) = place {
- if let Some(o) = self.maybe_codegen_consume_direct(bx, &proj.base) {
- match proj.elem {
- mir::ProjectionElem::Field(ref f, _) => {
- return Some(o.extract_field(bx, f.index()));
+ Some(o)
}
- mir::ProjectionElem::Index(_) |
- mir::ProjectionElem::ConstantIndex { .. } => {
- // ZSTs don't require any actual memory access.
- // FIXME(eddyb) deduplicate this with the identical
- // checks in `codegen_consume` and `extract_field`.
- let elem = o.layout.field(bx.cx(), 0);
- if elem.is_zst() {
- return Some(OperandRef::new_zst(bx, elem));
- }
+ LocalRef::Operand(None) => {
+ bug!("use of {:?} before def", place);
+ }
+ LocalRef::Place(..) | LocalRef::UnsizedPlace(..) => {
+ // watch out for locals that do not have an
+ // alloca; they are handled somewhat differently
+ None
}
- _ => {}
}
+ } else {
+ None
}
- }
-
- None
+ })
}
pub fn codegen_consume(
/// Emit a notification that an artifact has been output.
/// This is currently only supported for the JSON format,
/// other formats can, and will, simply ignore it.
- fn emit_artifact_notification(&mut self, _path: &Path) {}
+ fn emit_artifact_notification(&mut self, _path: &Path, _artifact_type: &str) {}
/// Checks if should show explanations about "rustc --explain"
fn should_show_explain(&self) -> bool {
}
}
Style::Quotation => {}
- Style::OldSchoolNoteText | Style::MainHeaderMsg => {
+ Style::MainHeaderMsg => {
spec.set_bold(true);
if cfg!(windows) {
spec.set_intense(true)
}
}
- pub fn emit_artifact_notification(&self, path: &Path) {
- self.emitter.borrow_mut().emit_artifact_notification(path);
+ pub fn emit_artifact_notification(&self, path: &Path, artifact_type: &str) {
+ self.emitter.borrow_mut().emit_artifact_notification(path, artifact_type);
}
}
UnderlineSecondary,
LabelPrimary,
LabelSecondary,
- OldSchoolNoteText,
NoStyle,
Level(Level),
Highlight,
tcx.sess.fatal(&format!("failed to write {}: {}", out_filename.display(), e));
}
if tcx.sess.opts.debugging_opts.emit_artifact_notifications {
- tcx.sess.parse_sess.span_diagnostic.emit_artifact_notification(&out_filename);
+ tcx.sess.parse_sess.span_diagnostic
+ .emit_artifact_notification(&out_filename, "metadata");
}
}
use syntax::feature_gate::{AttributeGate, AttributeTemplate, AttributeType};
use syntax::feature_gate::{Stability, deprecated_attributes};
use syntax_pos::{BytePos, Span, SyntaxContext};
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::errors::{Applicability, DiagnosticBuilder};
use syntax::print::pprust::expr_to_string;
use syntax::visit::FnKind;
for arg in sig.decl.inputs.iter() {
match arg.pat.node {
ast::PatKind::Ident(_, ident, None) => {
- if ident.name == keywords::Invalid.name() {
+ if ident.name == kw::Invalid {
let ty_snip = cx
.sess
.source_map()
#![allow(non_snake_case)]
use rustc::hir::{ExprKind, Node};
+use crate::hir::def_id::DefId;
use rustc::hir::lowering::is_range_literal;
use rustc::ty::subst::SubstsRef;
use rustc::ty::{self, AdtKind, ParamEnv, Ty, TyCtxt};
-use rustc::ty::layout::{self, IntegerExt, LayoutOf, VariantIdx};
+use rustc::ty::layout::{self, IntegerExt, LayoutOf, VariantIdx, SizeSkeleton};
use rustc::{lint, util};
use rustc_data_structures::indexed_vec::Idx;
use util::nodemap::FxHashSet;
use std::cmp;
use std::{i8, i16, i32, i64, u8, u16, u32, u64, f32, f64};
-use syntax::{ast, attr};
+use syntax::{ast, attr, source_map};
use syntax::errors::Applicability;
+use syntax::symbol::sym;
use rustc_target::spec::abi::Abi;
use syntax_pos::Span;
-use syntax::source_map;
use rustc::hir;
},
}
+fn is_zst<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, did: DefId, ty: Ty<'tcx>) -> bool {
+ tcx.layout_of(tcx.param_env(did).and(ty)).map(|layout| layout.is_zst()).unwrap_or(false)
+}
+
+fn ty_is_known_nonnull<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, ty: Ty<'tcx>) -> bool {
+ match ty.sty {
+ ty::FnPtr(_) => true,
+ ty::Ref(..) => true,
+ ty::Adt(field_def, substs) if field_def.repr.transparent() && field_def.is_struct() => {
+ for field in &field_def.non_enum_variant().fields {
+ let field_ty = tcx.normalize_erasing_regions(
+ ParamEnv::reveal_all(),
+ field.ty(tcx, substs),
+ );
+ if is_zst(tcx, field.did, field_ty) {
+ continue;
+ }
+
+ let attrs = tcx.get_attrs(field_def.did);
+ if attrs.iter().any(|a| a.check_name(sym::rustc_nonnull_optimization_guaranteed)) ||
+ ty_is_known_nonnull(tcx, field_ty) {
+ return true;
+ }
+ }
+
+ false
+ }
+ _ => false,
+ }
+}
+
/// Check if this enum can be safely exported based on the
/// "nullable pointer optimization". Currently restricted
-/// to function pointers and references, but could be
-/// expanded to cover NonZero raw pointers and newtypes.
+/// to function pointers, references, core::num::NonZero*,
+/// core::ptr::NonNull, and #[repr(transparent)] newtypes.
/// FIXME: This duplicates code in codegen.
fn is_repr_nullable_ptr<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def: &'tcx ty::AdtDef,
+ ty: Ty<'tcx>,
+ ty_def: &'tcx ty::AdtDef,
substs: SubstsRef<'tcx>)
-> bool {
- if def.variants.len() == 2 {
- let data_idx;
+ if ty_def.variants.len() != 2 {
+ return false;
+ }
- let zero = VariantIdx::new(0);
- let one = VariantIdx::new(1);
+ let get_variant_fields = |index| &ty_def.variants[VariantIdx::new(index)].fields;
+ let variant_fields = [get_variant_fields(0), get_variant_fields(1)];
+ let fields = if variant_fields[0].is_empty() {
+ &variant_fields[1]
+ } else if variant_fields[1].is_empty() {
+ &variant_fields[0]
+ } else {
+ return false;
+ };
- if def.variants[zero].fields.is_empty() {
- data_idx = one;
- } else if def.variants[one].fields.is_empty() {
- data_idx = zero;
- } else {
- return false;
- }
+ if fields.len() != 1 {
+ return false;
+ }
- if def.variants[data_idx].fields.len() == 1 {
- match def.variants[data_idx].fields[0].ty(tcx, substs).sty {
- ty::FnPtr(_) => {
- return true;
- }
- ty::Ref(..) => {
- return true;
- }
- _ => {}
- }
- }
+ let field_ty = fields[0].ty(tcx, substs);
+ if !ty_is_known_nonnull(tcx, field_ty) {
+ return false;
}
- false
+
+ // At this point, the field's type is known to be nonnull and the parent enum is Option-like.
+ // If the computed size for the field and the enum are different, the nonnull optimization isn't
+ // being applied (and we've got a problem somewhere).
+ let compute_size_skeleton = |t| SizeSkeleton::compute(t, tcx, ParamEnv::reveal_all()).unwrap();
+ if !compute_size_skeleton(ty).same_size(compute_size_skeleton(field_ty)) {
+ bug!("improper_ctypes: Option nonnull optimization not applied?");
+ }
+
+ true
}
impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
);
// repr(transparent) types are allowed to have arbitrary ZSTs, not just
// PhantomData -- skip checking all ZST fields
- if def.repr.transparent() {
- let is_zst = cx
- .layout_of(cx.param_env(field.did).and(field_ty))
- .map(|layout| layout.is_zst())
- .unwrap_or(false);
- if is_zst {
- continue;
- }
+ if def.repr.transparent() && is_zst(cx, field.did, field_ty) {
+ continue;
}
let r = self.check_type_for_ffi(cache, field_ty);
match r {
// discriminant.
if !def.repr.c() && def.repr.int.is_none() {
// Special-case types like `Option<extern fn()>`.
- if !is_repr_nullable_ptr(cx, def, substs) {
+ if !is_repr_nullable_ptr(cx, ty, def, substs) {
return FfiUnsafe {
ty: ty,
reason: "enum has no representation hint",
use syntax::errors::Applicability;
use syntax::feature_gate::{AttributeType, BuiltinAttribute, BUILTIN_ATTRIBUTE_MAP};
use syntax::print::pprust;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::symbol::Symbol;
use syntax::util::parser;
use syntax_pos::Span;
match items[0].0.kind {
ast::UseTreeKind::Simple(rename, ..) => {
let orig_ident = items[0].0.prefix.segments.last().unwrap().ident;
- if orig_ident.name == keywords::SelfLower.name() {
+ if orig_ident.name == kw::SelfLower {
return;
}
node_ident = rename.unwrap_or(orig_ident);
let mut keyword_stream = quote! {};
let mut symbols_stream = quote! {};
let mut prefill_stream = quote! {};
- let mut from_str_stream = quote! {};
let mut counter = 0u32;
let mut keys = HashSet::<String>::new();
#value,
});
keyword_stream.extend(quote! {
- pub const #name: Keyword = Keyword {
- ident: Ident::with_empty_ctxt(super::Symbol::new(#counter))
- };
- });
- from_str_stream.extend(quote! {
- #value => Ok(#name),
+ pub const #name: Symbol = Symbol::new(#counter);
});
counter += 1;
}
macro_rules! keywords {
() => {
#keyword_stream
-
- impl std::str::FromStr for Keyword {
- type Err = ();
-
- fn from_str(s: &str) -> Result<Self, ()> {
- match s {
- #from_str_stream
- _ => Err(()),
- }
- }
- }
}
}
flate2 = "1.0"
log = "0.4"
memmap = "0.6"
+smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
rustc = { path = "../librustc" }
rustc_data_structures = { path = "../librustc_data_structures" }
errors = { path = "../librustc_errors", package = "rustc_errors" }
fn verify_no_symbol_conflicts(&self,
span: Span,
- root: &CrateRoot) {
+ root: &CrateRoot<'_>) {
// Check for (potential) conflicts with the local crate
if self.local_crate_name == root.name &&
self.sess.local_crate_disambiguator() == root.disambiguator {
// Go through the crate metadata and load any crates that it references
fn resolve_crate_deps(&mut self,
root: &Option<CratePaths>,
- crate_root: &CrateRoot,
+ crate_root: &CrateRoot<'_>,
metadata: &MetadataBlob,
krate: CrateNum,
span: Span,
/// implemented as dynamic libraries, but we have a possible future where
/// custom derive (and other macro-1.1 style features) are implemented via
/// executables and custom IPC.
- fn load_derive_macros(&mut self, root: &CrateRoot, dylib: Option<PathBuf>, span: Span)
+ fn load_derive_macros(&mut self, root: &CrateRoot<'_>, dylib: Option<PathBuf>, span: Span)
-> Vec<(ast::Name, Lrc<SyntaxExtension>)> {
use std::{env, mem};
use crate::dynamic_lib::DynamicLibrary;
/// Used for decoding interpret::AllocIds in a cached & thread-safe manner.
pub alloc_decoding_state: AllocDecodingState,
- pub root: schema::CrateRoot,
+ // NOTE(eddyb) we pass `'static` to a `'tcx` parameter because this
+ // lifetime is only used behind `Lazy` / `LazySeq`, and therefore
+ // acts like an universal (`for<'tcx>`), that is paired up with
+ // whichever `TyCtxt` is being used to decode those values.
+ pub root: schema::CrateRoot<'static>,
/// For each public item in this crate, we encode a key. When the
/// crate is loaded, we read all the keys and put them in this
use rustc::util::nodemap::DefIdMap;
use rustc_data_structures::svh::Svh;
+use smallvec::SmallVec;
use std::any::Any;
use rustc_data_structures::sync::Lrc;
use std::sync::Arc;
generics_of => {
tcx.alloc_generics(cdata.get_generics(def_id.index, tcx.sess))
}
- predicates_of => { Lrc::new(cdata.get_predicates(def_id.index, tcx)) }
- predicates_defined_on => { Lrc::new(cdata.get_predicates_defined_on(def_id.index, tcx)) }
- super_predicates_of => { Lrc::new(cdata.get_super_predicates(def_id.index, tcx)) }
+ predicates_of => { tcx.arena.alloc(cdata.get_predicates(def_id.index, tcx)) }
+ predicates_defined_on => {
+ tcx.arena.alloc(cdata.get_predicates_defined_on(def_id.index, tcx))
+ }
+ super_predicates_of => { tcx.arena.alloc(cdata.get_super_predicates(def_id.index, tcx)) }
trait_def => {
tcx.alloc_trait_def(cdata.get_trait_def(def_id.index, tcx.sess))
}
}
variances_of => { tcx.arena.alloc_from_iter(cdata.get_item_variances(def_id.index)) }
associated_item_def_ids => {
- let mut result = vec![];
+ let mut result = SmallVec::<[_; 8]>::new();
cdata.each_child_of_item(def_id.index,
|child| result.push(child.res.def_id()), tcx.sess);
- Lrc::new(result)
+ tcx.arena.alloc_slice(&result)
}
associated_item => { cdata.get_associated_item(def_id.index) }
impl_trait_ref => { cdata.get_impl_trait(def_id.index, tcx) }
(cdata.mir_const_qualif(def_id.index), tcx.arena.alloc(BitSet::new_empty(0)))
}
fn_sig => { cdata.fn_sig(def_id.index, tcx) }
- inherent_impls => { Lrc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
+ inherent_impls => { cdata.get_inherent_implementations_for_type(tcx, def_id.index) }
is_const_fn_raw => { cdata.is_const_fn_raw(def_id.index) }
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
static_mutability => { cdata.static_mutability(def_id.index) }
}
is_mir_available => { cdata.is_item_mir_available(def_id.index) }
- dylib_dependency_formats => { Lrc::new(cdata.get_dylib_dependency_formats()) }
+ dylib_dependency_formats => { cdata.get_dylib_dependency_formats(tcx) }
is_panic_runtime => { cdata.root.panic_runtime }
is_compiler_builtins => { cdata.root.compiler_builtins }
has_global_allocator => { cdata.root.has_global_allocator }
is_profiler_runtime => { cdata.root.profiler_runtime }
panic_strategy => { cdata.root.panic_strategy }
extern_crate => {
- let r = Lrc::new(*cdata.extern_crate.lock());
- r
+ let r = *cdata.extern_crate.lock();
+ r.map(|c| &*tcx.arena.alloc(c))
}
is_no_builtins => { cdata.root.no_builtins }
impl_defaultness => { cdata.get_impl_defaultness(def_id.index) }
})
.collect();
- Lrc::new(reachable_non_generics)
+ tcx.arena.alloc(reachable_non_generics)
}
native_libraries => { Lrc::new(cdata.get_native_libraries(tcx.sess)) }
- foreign_modules => { Lrc::new(cdata.get_foreign_modules(tcx.sess)) }
+ foreign_modules => { cdata.get_foreign_modules(tcx) }
plugin_registrar_fn => {
cdata.root.plugin_registrar_fn.map(|index| {
DefId { krate: def_id.krate, index }
extra_filename => { cdata.root.extra_filename.clone() }
-
implementations_of_trait => {
- let mut result = vec![];
- let filter = Some(other);
- cdata.get_implementations_for_trait(filter, &mut result);
- Lrc::new(result)
+ cdata.get_implementations_for_trait(tcx, Some(other))
}
all_trait_implementations => {
- let mut result = vec![];
- cdata.get_implementations_for_trait(None, &mut result);
- Lrc::new(result)
+ cdata.get_implementations_for_trait(tcx, None)
}
visibility => { cdata.get_visibility(def_id.index) }
}
crate_name => { cdata.name }
item_children => {
- let mut result = vec![];
+ let mut result = SmallVec::<[_; 8]>::new();
cdata.each_child_of_item(def_id.index, |child| result.push(child), tcx.sess);
- Lrc::new(result)
+ tcx.arena.alloc_slice(&result)
}
- defined_lib_features => { Lrc::new(cdata.get_lib_features()) }
- defined_lang_items => { Lrc::new(cdata.get_lang_items()) }
- missing_lang_items => { Lrc::new(cdata.get_missing_lang_items()) }
+ defined_lib_features => { cdata.get_lib_features(tcx) }
+ defined_lang_items => { cdata.get_lang_items(tcx) }
+ missing_lang_items => { cdata.get_missing_lang_items(tcx) }
missing_extern_crate_item => {
let r = match *cdata.extern_crate.borrow() {
used_crate_source => { Lrc::new(cdata.source.clone()) }
- exported_symbols => {
- let cnum = cdata.cnum;
- assert!(cnum != LOCAL_CRATE);
-
- Arc::new(cdata.exported_symbols(tcx))
- }
+ exported_symbols => { Arc::new(cdata.exported_symbols(tcx)) }
}
pub fn provide<'tcx>(providers: &mut Providers<'tcx>) {
},
foreign_modules: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(foreign_modules::collect(tcx))
+ &tcx.arena.alloc(foreign_modules::collect(tcx))[..]
},
link_args: |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
// which is to say, its not deterministic in general. But
// we believe that libstd is consistently assigned crate
// num 1, so it should be enough to resolve #46112.
- let mut crates: Vec<CrateNum> = (*tcx.crates()).clone();
+ let mut crates: Vec<CrateNum> = (*tcx.crates()).to_owned();
crates.sort();
for &cnum in crates.iter() {
}
}
- Lrc::new(visible_parent_map)
+ tcx.arena.alloc(visible_parent_map)
},
..*providers
implement_ty_decoder!( DecodeContext<'a, 'tcx> );
-impl<'a, 'tcx> MetadataBlob {
+impl<'tcx> MetadataBlob {
pub fn is_compatible(&self) -> bool {
self.raw_bytes().starts_with(METADATA_HEADER)
}
Lazy::with_position(METADATA_HEADER.len() + 4).decode(self)
}
- pub fn get_root(&self) -> CrateRoot {
+ pub fn get_root(&self) -> CrateRoot<'tcx> {
let slice = self.raw_bytes();
let offset = METADATA_HEADER.len();
let pos = (((slice[offset + 0] as u32) << 24) | ((slice[offset + 1] as u32) << 16) |
/// |- proc macro #0 (DefIndex 1:N)
/// |- proc macro #1 (DefIndex 1:N+1)
/// \- ...
-crate fn proc_macro_def_path_table(crate_root: &CrateRoot,
+crate fn proc_macro_def_path_table(crate_root: &CrateRoot<'_>,
proc_macros: &[(ast::Name, Lrc<SyntaxExtension>)])
-> DefPathTable
{
fn maybe_entry(&self, item_id: DefIndex) -> Option<Lazy<Entry<'tcx>>> {
assert!(!self.is_proc_macro(item_id));
- self.root.index.lookup(self.blob.raw_bytes(), item_id)
+ self.root.entries_index.lookup(self.blob.raw_bytes(), item_id)
}
fn entry(&self, item_id: DefIndex) -> Entry<'tcx> {
}
/// Iterates over all the stability attributes in the given crate.
- pub fn get_lib_features(&self) -> Vec<(ast::Name, Option<ast::Name>)> {
+ pub fn get_lib_features(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [(ast::Name, Option<ast::Name>)] {
// FIXME: For a proc macro crate, not sure whether we should return the "host"
// features or an empty Vec. Both don't cause ICEs.
- self.root
+ tcx.arena.alloc_from_iter(self.root
.lib_features
- .decode(self)
- .collect()
+ .decode(self))
}
/// Iterates over the language items in the given crate.
- pub fn get_lang_items(&self) -> Vec<(DefId, usize)> {
+ pub fn get_lang_items(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [(DefId, usize)] {
if self.proc_macros.is_some() {
// Proc macro crates do not export any lang-items to the target.
- vec![]
+ &[]
} else {
- self.root
+ tcx.arena.alloc_from_iter(self.root
.lang_items
.decode(self)
- .map(|(def_index, index)| (self.local_def_id(def_index), index))
- .collect()
+ .map(|(def_index, index)| (self.local_def_id(def_index), index)))
}
}
None
}
- pub fn get_inherent_implementations_for_type(&self, id: DefIndex) -> Vec<DefId> {
- self.entry(id)
- .inherent_impls
- .decode(self)
- .map(|index| self.local_def_id(index))
- .collect()
+ pub fn get_inherent_implementations_for_type(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ id: DefIndex
+ ) -> &'tcx [DefId] {
+ tcx.arena.alloc_from_iter(self.entry(id)
+ .inherent_impls
+ .decode(self)
+ .map(|index| self.local_def_id(index)))
}
- pub fn get_implementations_for_trait(&self,
- filter: Option<DefId>,
- result: &mut Vec<DefId>) {
+ pub fn get_implementations_for_trait(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ filter: Option<DefId>,
+ ) -> &'tcx [DefId] {
if self.proc_macros.is_some() {
// proc-macro crates export no trait impls.
- return
+ return &[]
}
// Do a reverse lookup beforehand to avoid touching the crate_num
// hash map in the loop below.
let filter = match filter.map(|def_id| self.reverse_translate_def_id(def_id)) {
Some(Some(def_id)) => Some((def_id.krate.as_u32(), def_id.index)),
- Some(None) => return,
+ Some(None) => return &[],
None => None,
};
if let Some(filter) = filter {
- if let Some(impls) = self.trait_impls
- .get(&filter) {
- result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
+ if let Some(impls) = self.trait_impls.get(&filter) {
+ tcx.arena.alloc_from_iter(impls.decode(self).map(|idx| self.local_def_id(idx)))
+ } else {
+ &[]
}
} else {
- for impls in self.trait_impls.values() {
- result.extend(impls.decode(self).map(|idx| self.local_def_id(idx)));
- }
+ tcx.arena.alloc_from_iter(self.trait_impls.values().flat_map(|impls| {
+ impls.decode(self).map(|idx| self.local_def_id(idx))
+ }))
}
}
}
}
- pub fn get_foreign_modules(&self, sess: &Session) -> Vec<ForeignModule> {
+ pub fn get_foreign_modules(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [ForeignModule] {
if self.proc_macros.is_some() {
// Proc macro crates do not have any *target* foreign modules.
- vec![]
+ &[]
} else {
- self.root.foreign_modules.decode((self, sess)).collect()
+ tcx.arena.alloc_from_iter(self.root.foreign_modules.decode((self, tcx.sess)))
}
}
- pub fn get_dylib_dependency_formats(&self) -> Vec<(CrateNum, LinkagePreference)> {
- self.root
+ pub fn get_dylib_dependency_formats(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [(CrateNum, LinkagePreference)] {
+ tcx.arena.alloc_from_iter(self.root
.dylib_dependency_formats
.decode(self)
.enumerate()
.flat_map(|(i, link)| {
let cnum = CrateNum::new(i + 1);
link.map(|link| (self.cnum_map[cnum], link))
- })
- .collect()
+ }))
}
- pub fn get_missing_lang_items(&self) -> Vec<lang_items::LangItem> {
+ pub fn get_missing_lang_items(
+ &self,
+ tcx: TyCtxt<'_, 'tcx, '_>,
+ ) -> &'tcx [lang_items::LangItem] {
if self.proc_macros.is_some() {
// Proc macro crates do not depend on any target weak lang-items.
- vec![]
+ &[]
} else {
- self.root
+ tcx.arena.alloc_from_iter(self.root
.lang_items_missing
- .decode(self)
- .collect()
+ .decode(self))
}
}
// link those in so we skip those crates.
vec![]
} else {
- let lazy_seq: LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)> =
- LazySeq::with_position_and_length(self.root.exported_symbols.position,
- self.root.exported_symbols.len);
- lazy_seq.decode((self, tcx)).collect()
+ self.root.exported_symbols.decode((self, tcx)).collect()
}
}
use crate::index::Index;
-use crate::index_builder::{FromId, IndexBuilder, Untracked};
-use crate::isolated_encoder::IsolatedEncoder;
use crate::schema::*;
use rustc::middle::cstore::{LinkagePreference, NativeLibrary,
use syntax::ast;
use syntax::attr;
use syntax::source_map::Spanned;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::{self, FileName, SourceFile, Span};
use log::{debug, trace};
opaque: opaque::Encoder,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ entries_index: Index<'tcx>,
+
lazy_state: LazyState,
type_shorthands: FxHashMap<Ty<'tcx>, usize>,
predicate_shorthands: FxHashMap<ty::Predicate<'tcx>, usize>,
})
}
- // Encodes something that corresponds to a single DepNode::GlobalMetaData
- // and registers the Fingerprint in the `metadata_hashes` map.
- pub fn tracked<'x, DATA, R>(&'x mut self,
- op: fn(&mut IsolatedEncoder<'x, 'a, 'tcx>, DATA) -> R,
- data: DATA)
- -> R {
- op(&mut IsolatedEncoder::new(self), data)
+ /// Emit the data for a `DefId` to the metadata. The function to
+ /// emit the data is `op`, and it will be given `data` as
+ /// arguments. This `record` function will call `op` to generate
+ /// the `Entry` (which may point to other encoded information)
+ /// and will then record the `Lazy<Entry>` for use in the index.
+ // FIXME(eddyb) remove this.
+ pub fn record<DATA>(&mut self,
+ id: DefId,
+ op: impl FnOnce(&mut Self, DATA) -> Entry<'tcx>,
+ data: DATA)
+ {
+ assert!(id.is_local());
+
+ let entry = op(self, data);
+ let entry = self.lazy(&entry);
+ self.entries_index.record(id, entry);
}
- fn encode_info_for_items(&mut self) -> Index {
+ fn encode_info_for_items(&mut self) {
let krate = self.tcx.hir().krate();
- let mut index = IndexBuilder::new(self);
let vis = Spanned { span: syntax_pos::DUMMY_SP, node: hir::VisibilityKind::Public };
- index.record(DefId::local(CRATE_DEF_INDEX),
- IsolatedEncoder::encode_info_for_mod,
- FromId(hir::CRATE_HIR_ID, (&krate.module, &krate.attrs, &vis)));
- let mut visitor = EncodeVisitor { index };
- krate.visit_all_item_likes(&mut visitor.as_deep_visitor());
+ self.record(DefId::local(CRATE_DEF_INDEX),
+ EncodeContext::encode_info_for_mod,
+ (hir::CRATE_HIR_ID, &krate.module, &krate.attrs, &vis));
+ krate.visit_all_item_likes(&mut self.as_deep_visitor());
for macro_def in &krate.exported_macros {
- visitor.visit_macro_def(macro_def);
+ self.visit_macro_def(macro_def);
}
- visitor.index.into_items()
}
fn encode_def_path_table(&mut self) -> Lazy<DefPathTable> {
self.lazy_seq_ref(adapted.iter().map(|rc| &**rc))
}
- fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
+ fn encode_crate_root(&mut self) -> Lazy<CrateRoot<'tcx>> {
let mut i = self.position();
- let crate_deps = self.tracked(IsolatedEncoder::encode_crate_deps, ());
- let dylib_dependency_formats = self.tracked(
- IsolatedEncoder::encode_dylib_dependency_formats,
- ());
+ let crate_deps = self.encode_crate_deps();
+ let dylib_dependency_formats = self.encode_dylib_dependency_formats();
let dep_bytes = self.position() - i;
// Encode the lib features.
i = self.position();
- let lib_features = self.tracked(IsolatedEncoder::encode_lib_features, ());
+ let lib_features = self.encode_lib_features();
let lib_feature_bytes = self.position() - i;
// Encode the language items.
i = self.position();
- let lang_items = self.tracked(IsolatedEncoder::encode_lang_items, ());
- let lang_items_missing = self.tracked(
- IsolatedEncoder::encode_lang_items_missing,
- ());
+ let lang_items = self.encode_lang_items();
+ let lang_items_missing = self.encode_lang_items_missing();
let lang_item_bytes = self.position() - i;
// Encode the native libraries used
i = self.position();
- let native_libraries = self.tracked(
- IsolatedEncoder::encode_native_libraries,
- ());
+ let native_libraries = self.encode_native_libraries();
let native_lib_bytes = self.position() - i;
- let foreign_modules = self.tracked(
- IsolatedEncoder::encode_foreign_modules,
- ());
+ let foreign_modules = self.encode_foreign_modules();
// Encode source_map
i = self.position();
// Encode the def IDs of impls, for coherence checking.
i = self.position();
- let impls = self.tracked(IsolatedEncoder::encode_impls, ());
+ let impls = self.encode_impls();
let impl_bytes = self.position() - i;
// Encode exported symbols info.
i = self.position();
let exported_symbols = self.tcx.exported_symbols(LOCAL_CRATE);
- let exported_symbols = self.tracked(
- IsolatedEncoder::encode_exported_symbols,
- &exported_symbols);
+ let exported_symbols = self.encode_exported_symbols(&exported_symbols);
let exported_symbols_bytes = self.position() - i;
let tcx = self.tcx;
// Encode the items.
i = self.position();
- let items = self.encode_info_for_items();
+ self.encode_info_for_items();
let item_bytes = self.position() - i;
// Encode the allocation index
self.lazy_seq(interpret_alloc_index)
};
- // Index the items
i = self.position();
- let index = items.write_index(&mut self.opaque);
- let index_bytes = self.position() - i;
+ let entries_index = self.entries_index.write_index(&mut self.opaque);
+ let entries_index_bytes = self.position() - i;
let attrs = tcx.hir().krate_attrs();
let is_proc_macro = tcx.sess.crate_types.borrow().contains(&CrateType::ProcMacro);
impls,
exported_symbols,
interpret_alloc_index,
- index,
+ entries_index,
});
let total_bytes = self.position();
println!(" exp. symbols bytes: {}", exported_symbols_bytes);
println!(" def-path table bytes: {}", def_path_table_bytes);
println!(" item bytes: {}", item_bytes);
- println!(" index bytes: {}", index_bytes);
+ println!(" entries index bytes: {}", entries_index_bytes);
println!(" zero bytes: {}", zero_bytes);
println!(" total bytes: {}", total_bytes);
}
}
}
-// These are methods for encoding various things. They are meant to be used with
-// IndexBuilder::record() and EncodeContext::tracked(). They actually
-// would not have to be methods of IsolatedEncoder (free standing functions
-// taking IsolatedEncoder as first argument would be just fine) but by making
-// them methods we don't have to repeat the lengthy `<'a, 'b: 'a, 'tcx: 'b>`
-// clause again and again.
-impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
+impl EncodeContext<'_, 'tcx> {
fn encode_variances_of(&mut self, def_id: DefId) -> LazySeq<ty::Variance> {
- debug!("IsolatedEncoder::encode_variances_of({:?})", def_id);
+ debug!("EncodeContext::encode_variances_of({:?})", def_id);
let tcx = self.tcx;
- self.lazy_seq_from_slice(&tcx.variances_of(def_id))
+ self.lazy_seq_ref(&tcx.variances_of(def_id)[..])
}
fn encode_item_type(&mut self, def_id: DefId) -> Lazy<Ty<'tcx>> {
let tcx = self.tcx;
let ty = tcx.type_of(def_id);
- debug!("IsolatedEncoder::encode_item_type({:?}) => {:?}", def_id, ty);
+ debug!("EncodeContext::encode_item_type({:?}) => {:?}", def_id, ty);
self.lazy(&ty)
}
- /// Encode data for the given variant of the given ADT. The
- /// index of the variant is untracked: this is ok because we
- /// will have to lookup the adt-def by its id, and that gives us
- /// the right to access any information in the adt-def (including,
- /// e.g., the length of the various vectors).
fn encode_enum_variant_info(
&mut self,
- (enum_did, Untracked(index)): (DefId, Untracked<VariantIdx>),
+ (enum_did, index): (DefId, VariantIdx),
) -> Entry<'tcx> {
let tcx = self.tcx;
let def = tcx.adt_def(enum_did);
let variant = &def.variants[index];
let def_id = variant.def_id;
- debug!("IsolatedEncoder::encode_enum_variant_info({:?})", def_id);
+ debug!("EncodeContext::encode_enum_variant_info({:?})", def_id);
let data = VariantData {
ctor_kind: variant.ctor_kind,
}
}
- /// Encode the constructor for the given variant of the given ADT. See
- /// `encode_enum_variant_info` for an explanation about why the index is untracked.
fn encode_enum_variant_ctor(
&mut self,
- (enum_did, Untracked(index)): (DefId, Untracked<VariantIdx>),
+ (enum_did, index): (DefId, VariantIdx),
) -> Entry<'tcx> {
let tcx = self.tcx;
let def = tcx.adt_def(enum_did);
let variant = &def.variants[index];
let def_id = variant.ctor_def_id.unwrap();
- debug!("IsolatedEncoder::encode_enum_variant_ctor({:?})", def_id);
+ debug!("EncodeContext::encode_enum_variant_ctor({:?})", def_id);
let data = VariantData {
ctor_kind: variant.ctor_kind,
}
}
- fn encode_info_for_mod(&mut self,
- FromId(id, (md, attrs, vis)): FromId<(&hir::Mod,
- &[ast::Attribute],
- &hir::Visibility)>)
- -> Entry<'tcx> {
+ fn encode_info_for_mod(
+ &mut self,
+ (id, md, attrs, vis): (hir::HirId, &hir::Mod, &[ast::Attribute], &hir::Visibility),
+ ) -> Entry<'tcx> {
let tcx = self.tcx;
let def_id = tcx.hir().local_def_id_from_hir_id(id);
- debug!("IsolatedEncoder::encode_info_for_mod({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_mod({:?})", def_id);
let data = ModData {
reexports: match tcx.module_exports(def_id) {
- Some(ref exports) => self.lazy_seq_from_slice(exports.as_slice()),
+ Some(exports) => self.lazy_seq_ref(exports),
_ => LazySeq::empty(),
},
};
}
}
- /// Encode data for the given field of the given variant of the
- /// given ADT. The indices of the variant/field are untracked:
- /// this is ok because we will have to lookup the adt-def by its
- /// id, and that gives us the right to access any information in
- /// the adt-def (including, e.g., the length of the various
- /// vectors).
- fn encode_field(&mut self,
- (adt_def_id, Untracked((variant_index, field_index))): (DefId,
- Untracked<(VariantIdx,
- usize)>))
- -> Entry<'tcx> {
+ fn encode_field(
+ &mut self,
+ (adt_def_id, variant_index, field_index): (DefId, VariantIdx, usize),
+ ) -> Entry<'tcx> {
let tcx = self.tcx;
let variant = &tcx.adt_def(adt_def_id).variants[variant_index];
let field = &variant.fields[field_index];
let def_id = field.did;
- debug!("IsolatedEncoder::encode_field({:?})", def_id);
+ debug!("EncodeContext::encode_field({:?})", def_id);
let variant_id = tcx.hir().as_local_hir_id(variant.def_id).unwrap();
let variant_data = tcx.hir().expect_variant_data(variant_id);
}
fn encode_struct_ctor(&mut self, (adt_def_id, def_id): (DefId, DefId)) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_struct_ctor({:?})", def_id);
+ debug!("EncodeContext::encode_struct_ctor({:?})", def_id);
let tcx = self.tcx;
let adt_def = tcx.adt_def(adt_def_id);
let variant = adt_def.non_enum_variant();
}
fn encode_generics(&mut self, def_id: DefId) -> Lazy<ty::Generics> {
- debug!("IsolatedEncoder::encode_generics({:?})", def_id);
+ debug!("EncodeContext::encode_generics({:?})", def_id);
let tcx = self.tcx;
self.lazy(tcx.generics_of(def_id))
}
fn encode_predicates(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
- debug!("IsolatedEncoder::encode_predicates({:?})", def_id);
+ debug!("EncodeContext::encode_predicates({:?})", def_id);
let tcx = self.tcx;
self.lazy(&tcx.predicates_of(def_id))
}
fn encode_predicates_defined_on(&mut self, def_id: DefId) -> Lazy<ty::GenericPredicates<'tcx>> {
- debug!("IsolatedEncoder::encode_predicates_defined_on({:?})", def_id);
+ debug!("EncodeContext::encode_predicates_defined_on({:?})", def_id);
let tcx = self.tcx;
self.lazy(&tcx.predicates_defined_on(def_id))
}
fn encode_info_for_trait_item(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_trait_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_trait_item({:?})", def_id);
let tcx = self.tcx;
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
}
fn encode_info_for_impl_item(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_impl_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_impl_item({:?})", def_id);
let tcx = self.tcx;
let hir_id = self.tcx.hir().as_local_hir_id(def_id).unwrap();
self.lazy_seq(body.arguments.iter().map(|arg| {
match arg.pat.node {
PatKind::Binding(_, _, ident, _) => ident.name,
- _ => keywords::Invalid.name(),
+ _ => kw::Invalid,
}
}))
})
// Encodes the inherent implementations of a structure, enumeration, or trait.
fn encode_inherent_implementations(&mut self, def_id: DefId) -> LazySeq<DefIndex> {
- debug!("IsolatedEncoder::encode_inherent_implementations({:?})", def_id);
+ debug!("EncodeContext::encode_inherent_implementations({:?})", def_id);
let implementations = self.tcx.inherent_impls(def_id);
if implementations.is_empty() {
LazySeq::empty()
}
fn encode_stability(&mut self, def_id: DefId) -> Option<Lazy<attr::Stability>> {
- debug!("IsolatedEncoder::encode_stability({:?})", def_id);
+ debug!("EncodeContext::encode_stability({:?})", def_id);
self.tcx.lookup_stability(def_id).map(|stab| self.lazy(stab))
}
fn encode_deprecation(&mut self, def_id: DefId) -> Option<Lazy<attr::Deprecation>> {
- debug!("IsolatedEncoder::encode_deprecation({:?})", def_id);
+ debug!("EncodeContext::encode_deprecation({:?})", def_id);
self.tcx.lookup_deprecation(def_id).map(|depr| self.lazy(&depr))
}
fn encode_info_for_item(&mut self, (def_id, item): (DefId, &'tcx hir::Item)) -> Entry<'tcx> {
let tcx = self.tcx;
- debug!("IsolatedEncoder::encode_info_for_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_item({:?})", def_id);
let kind = match item.node {
hir::ItemKind::Static(_, hir::MutMutable, _) => EntryKind::MutStatic,
EntryKind::Fn(self.lazy(&data))
}
hir::ItemKind::Mod(ref m) => {
- return self.encode_info_for_mod(FromId(item.hir_id, (m, &item.attrs, &item.vis)));
+ return self.encode_info_for_mod((item.hir_id, m, &item.attrs, &item.vis));
}
hir::ItemKind::ForeignMod(_) => EntryKind::ForeignMod,
hir::ItemKind::GlobalAsm(..) => EntryKind::GlobalAsm,
fn encode_info_for_ty_param(
&mut self,
- (def_id, Untracked(encode_type)): (DefId, Untracked<bool>),
+ (def_id, encode_type): (DefId, bool),
) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_ty_param({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_ty_param({:?})", def_id);
self.encode_info_for_generic_param(def_id, EntryKind::TypeParam, encode_type)
}
&mut self,
def_id: DefId,
) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_const_param({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_const_param({:?})", def_id);
self.encode_info_for_generic_param(def_id, EntryKind::ConstParam, true)
}
fn encode_info_for_closure(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_closure({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_closure({:?})", def_id);
let tcx = self.tcx;
let tables = self.tcx.typeck_tables_of(def_id);
}
fn encode_info_for_anon_const(&mut self, def_id: DefId) -> Entry<'tcx> {
- debug!("IsolatedEncoder::encode_info_for_anon_const({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_anon_const({:?})", def_id);
let tcx = self.tcx;
let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let body_id = tcx.hir().body_owned_by(id);
}
fn encode_attributes(&mut self, attrs: &[ast::Attribute]) -> LazySeq<ast::Attribute> {
- // NOTE: This must use lazy_seq_from_slice(), not lazy_seq() because
- // we rely on the HashStable specialization for [Attribute]
- // to properly filter things out.
- self.lazy_seq_from_slice(attrs)
+ self.lazy_seq_ref(attrs)
}
- fn encode_native_libraries(&mut self, _: ()) -> LazySeq<NativeLibrary> {
+ fn encode_native_libraries(&mut self) -> LazySeq<NativeLibrary> {
let used_libraries = self.tcx.native_libraries(LOCAL_CRATE);
self.lazy_seq(used_libraries.iter().cloned())
}
- fn encode_foreign_modules(&mut self, _: ()) -> LazySeq<ForeignModule> {
+ fn encode_foreign_modules(&mut self) -> LazySeq<ForeignModule> {
let foreign_modules = self.tcx.foreign_modules(LOCAL_CRATE);
self.lazy_seq(foreign_modules.iter().cloned())
}
- fn encode_crate_deps(&mut self, _: ()) -> LazySeq<CrateDep> {
+ fn encode_crate_deps(&mut self) -> LazySeq<CrateDep> {
let crates = self.tcx.crates();
let mut deps = crates
self.lazy_seq_ref(deps.iter().map(|&(_, ref dep)| dep))
}
- fn encode_lib_features(&mut self, _: ()) -> LazySeq<(ast::Name, Option<ast::Name>)> {
+ fn encode_lib_features(&mut self) -> LazySeq<(ast::Name, Option<ast::Name>)> {
let tcx = self.tcx;
let lib_features = tcx.lib_features();
self.lazy_seq(lib_features.to_vec())
}
- fn encode_lang_items(&mut self, _: ()) -> LazySeq<(DefIndex, usize)> {
+ fn encode_lang_items(&mut self) -> LazySeq<(DefIndex, usize)> {
let tcx = self.tcx;
let lang_items = tcx.lang_items();
let lang_items = lang_items.items().iter();
}))
}
- fn encode_lang_items_missing(&mut self, _: ()) -> LazySeq<lang_items::LangItem> {
+ fn encode_lang_items_missing(&mut self) -> LazySeq<lang_items::LangItem> {
let tcx = self.tcx;
self.lazy_seq_ref(&tcx.lang_items().missing)
}
/// Encodes an index, mapping each trait to its (local) implementations.
- fn encode_impls(&mut self, _: ()) -> LazySeq<TraitImpls> {
- debug!("IsolatedEncoder::encode_impls()");
+ fn encode_impls(&mut self) -> LazySeq<TraitImpls> {
+ debug!("EncodeContext::encode_impls()");
let tcx = self.tcx;
let mut visitor = ImplVisitor {
tcx,
TraitImpls {
trait_id: (trait_def_id.krate.as_u32(), trait_def_id.index),
- impls: self.lazy_seq_from_slice(&impls[..]),
+ impls: self.lazy_seq_ref(&impls),
}
})
.collect();
- self.lazy_seq_from_slice(&all_impls[..])
+ self.lazy_seq_ref(&all_impls)
}
// Encodes all symbols exported from this crate into the metadata.
// symbol associated with them (they weren't translated) or if they're an FFI
// definition (as that's not defined in this crate).
fn encode_exported_symbols(&mut self,
- exported_symbols: &[(ExportedSymbol<'_>, SymbolExportLevel)])
- -> EncodedExportedSymbols {
+ exported_symbols: &[(ExportedSymbol<'tcx>, SymbolExportLevel)])
+ -> LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)> {
// The metadata symbol name is special. It should not show up in
// downstream crates.
let metadata_symbol_name = SymbolName::new(&metadata_symbol_name(self.tcx));
- let lazy_seq = self.lazy_seq(exported_symbols
+ self.lazy_seq(exported_symbols
.iter()
.filter(|&&(ref exported_symbol, _)| {
match *exported_symbol {
_ => true,
}
})
- .cloned());
-
- EncodedExportedSymbols {
- len: lazy_seq.len,
- position: lazy_seq.position,
- }
+ .cloned())
}
- fn encode_dylib_dependency_formats(&mut self, _: ()) -> LazySeq<Option<LinkagePreference>> {
+ fn encode_dylib_dependency_formats(&mut self) -> LazySeq<Option<LinkagePreference>> {
match self.tcx.sess.dependency_formats.borrow().get(&config::CrateType::Dylib) {
Some(arr) => {
self.lazy_seq(arr.iter().map(|slot| {
-> Entry<'tcx> {
let tcx = self.tcx;
- debug!("IsolatedEncoder::encode_info_for_foreign_item({:?})", def_id);
+ debug!("EncodeContext::encode_info_for_foreign_item({:?})", def_id);
let kind = match nitem.node {
hir::ForeignItemKind::Fn(_, ref names, _) => {
}
}
-struct EncodeVisitor<'a, 'b: 'a, 'tcx: 'b> {
- index: IndexBuilder<'a, 'b, 'tcx>,
-}
-
-impl<'a, 'b, 'tcx> Visitor<'tcx> for EncodeVisitor<'a, 'b, 'tcx> {
+impl Visitor<'tcx> for EncodeContext<'_, 'tcx> {
fn nested_visit_map<'this>(&'this mut self) -> NestedVisitorMap<'this, 'tcx> {
- NestedVisitorMap::OnlyBodies(&self.index.tcx.hir())
+ NestedVisitorMap::OnlyBodies(&self.tcx.hir())
}
fn visit_expr(&mut self, ex: &'tcx hir::Expr) {
intravisit::walk_expr(self, ex);
- self.index.encode_info_for_expr(ex);
+ self.encode_info_for_expr(ex);
}
fn visit_item(&mut self, item: &'tcx hir::Item) {
intravisit::walk_item(self, item);
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(item.hir_id);
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
match item.node {
hir::ItemKind::ExternCrate(_) |
- hir::ItemKind::Use(..) => (), // ignore these
- _ => self.index.record(def_id, IsolatedEncoder::encode_info_for_item, (def_id, item)),
+ hir::ItemKind::Use(..) => {} // ignore these
+ _ => self.record(def_id, EncodeContext::encode_info_for_item, (def_id, item)),
}
- self.index.encode_addl_info_for_item(item);
+ self.encode_addl_info_for_item(item);
}
fn visit_foreign_item(&mut self, ni: &'tcx hir::ForeignItem) {
intravisit::walk_foreign_item(self, ni);
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(ni.hir_id);
- self.index.record(def_id,
- IsolatedEncoder::encode_info_for_foreign_item,
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(ni.hir_id);
+ self.record(def_id,
+ EncodeContext::encode_info_for_foreign_item,
(def_id, ni));
}
fn visit_variant(&mut self,
intravisit::walk_variant(self, v, g, id);
if let Some(ref discr) = v.node.disr_expr {
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(discr.hir_id);
- self.index.record(def_id, IsolatedEncoder::encode_info_for_anon_const, def_id);
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(discr.hir_id);
+ self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id);
}
}
fn visit_generics(&mut self, generics: &'tcx hir::Generics) {
intravisit::walk_generics(self, generics);
- self.index.encode_info_for_generics(generics);
+ self.encode_info_for_generics(generics);
}
fn visit_ty(&mut self, ty: &'tcx hir::Ty) {
intravisit::walk_ty(self, ty);
- self.index.encode_info_for_ty(ty);
+ self.encode_info_for_ty(ty);
}
fn visit_macro_def(&mut self, macro_def: &'tcx hir::MacroDef) {
- let def_id = self.index.tcx.hir().local_def_id_from_hir_id(macro_def.hir_id);
- self.index.record(def_id, IsolatedEncoder::encode_info_for_macro_def, macro_def);
+ let def_id = self.tcx.hir().local_def_id_from_hir_id(macro_def.hir_id);
+ self.record(def_id, EncodeContext::encode_info_for_macro_def, macro_def);
}
}
-impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
+impl EncodeContext<'_, 'tcx> {
fn encode_fields(&mut self, adt_def_id: DefId) {
let def = self.tcx.adt_def(adt_def_id);
for (variant_index, variant) in def.variants.iter_enumerated() {
for (field_index, field) in variant.fields.iter().enumerate() {
self.record(field.did,
- IsolatedEncoder::encode_field,
- (adt_def_id, Untracked((variant_index, field_index))));
+ EncodeContext::encode_field,
+ (adt_def_id, variant_index, field_index));
}
}
}
GenericParamKind::Type { ref default, .. } => {
self.record(
def_id,
- IsolatedEncoder::encode_info_for_ty_param,
- (def_id, Untracked(default.is_some())),
+ EncodeContext::encode_info_for_ty_param,
+ (def_id, default.is_some()),
);
}
GenericParamKind::Const { .. } => {
- self.record(def_id, IsolatedEncoder::encode_info_for_const_param, def_id);
+ self.record(def_id, EncodeContext::encode_info_for_const_param, def_id);
}
}
}
match ty.node {
hir::TyKind::Array(_, ref length) => {
let def_id = self.tcx.hir().local_def_id_from_hir_id(length.hir_id);
- self.record(def_id, IsolatedEncoder::encode_info_for_anon_const, def_id);
+ self.record(def_id, EncodeContext::encode_info_for_anon_const, def_id);
}
_ => {}
}
match expr.node {
hir::ExprKind::Closure(..) => {
let def_id = self.tcx.hir().local_def_id_from_hir_id(expr.hir_id);
- self.record(def_id, IsolatedEncoder::encode_info_for_closure, def_id);
+ self.record(def_id, EncodeContext::encode_info_for_closure, def_id);
}
_ => {}
}
let def = self.tcx.adt_def(def_id);
for (i, variant) in def.variants.iter_enumerated() {
self.record(variant.def_id,
- IsolatedEncoder::encode_enum_variant_info,
- (def_id, Untracked(i)));
+ EncodeContext::encode_enum_variant_info,
+ (def_id, i));
if let Some(ctor_def_id) = variant.ctor_def_id {
self.record(ctor_def_id,
- IsolatedEncoder::encode_enum_variant_ctor,
- (def_id, Untracked(i)));
+ EncodeContext::encode_enum_variant_ctor,
+ (def_id, i));
}
}
}
if let Some(ctor_hir_id) = struct_def.ctor_hir_id() {
let ctor_def_id = self.tcx.hir().local_def_id_from_hir_id(ctor_hir_id);
self.record(ctor_def_id,
- IsolatedEncoder::encode_struct_ctor,
+ EncodeContext::encode_struct_ctor,
(def_id, ctor_def_id));
}
}
hir::ItemKind::Impl(..) => {
for &trait_item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(trait_item_def_id,
- IsolatedEncoder::encode_info_for_impl_item,
+ EncodeContext::encode_info_for_impl_item,
trait_item_def_id);
}
}
hir::ItemKind::Trait(..) => {
for &item_def_id in self.tcx.associated_item_def_ids(def_id).iter() {
self.record(item_def_id,
- IsolatedEncoder::encode_info_for_trait_item,
+ EncodeContext::encode_info_for_trait_item,
item_def_id);
}
}
// Will be filled with the root position after encoding everything.
encoder.emit_raw_bytes(&[0, 0, 0, 0]);
- let (root, mut result) = {
+ // Since encoding metadata is not in a query, and nothing is cached,
+ // there's no need to do dep-graph tracking for any of it.
+ let (root, mut result) = tcx.dep_graph.with_ignore(move || {
let mut ecx = EncodeContext {
opaque: encoder,
tcx,
+ entries_index: Index::new(tcx.hir().definitions().def_index_count()),
lazy_state: LazyState::NoNode,
type_shorthands: Default::default(),
predicate_shorthands: Default::default(),
// culminating in the `CrateRoot` which points to all of it.
let root = ecx.encode_crate_root();
(root, ecx.opaque.into_inner())
- };
+ });
// Encode the root position.
let header = METADATA_HEADER.len();
use rustc::hir::def_id::{DefId, DefIndex};
use rustc_serialize::opaque::Encoder;
+use std::marker::PhantomData;
use std::u32;
use log::debug;
/// `u32::MAX`. Whenever an index is visited, we fill in the
/// appropriate spot by calling `record_position`. We should never
/// visit the same index twice.
-pub struct Index {
+pub struct Index<'tcx> {
positions: Vec<u8>,
+ _marker: PhantomData<&'tcx ()>,
}
-impl Index {
- pub fn new(max_index: usize) -> Index {
+impl Index<'tcx> {
+ pub fn new(max_index: usize) -> Self {
Index {
positions: vec![0xff; max_index * 4],
+ _marker: PhantomData,
}
}
- pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry<'_>>) {
+ pub fn record(&mut self, def_id: DefId, entry: Lazy<Entry<'tcx>>) {
assert!(def_id.is_local());
self.record_index(def_id.index, entry);
}
- pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry<'_>>) {
+ pub fn record_index(&mut self, item: DefIndex, entry: Lazy<Entry<'tcx>>) {
assert!(entry.position < (u32::MAX as usize));
let position = entry.position as u32;
let array_index = item.index();
position.write_to_bytes_at(positions, array_index)
}
- pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Index> {
+ pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Self> {
let pos = buf.position();
// First we write the length of the lower range ...
}
}
-impl<'tcx> LazySeq<Index> {
+impl LazySeq<Index<'tcx>> {
/// Given the metadata, extract out the offset of a particular
/// DefIndex (if any).
#[inline(never)]
+++ /dev/null
-//! Builder types for generating the "item data" section of the
-//! metadata. This section winds up looking like this:
-//!
-//! ```
-//! <common::data> // big list of item-like things...
-//! <common::data_item> // ...for most `DefId`s, there is an entry.
-//! </common::data_item>
-//! </common::data>
-//! ```
-//!
-//! As we generate this listing, we collect the offset of each
-//! `data_item` entry and store it in an index. Then, when we load the
-//! metadata, we can skip right to the metadata for a particular item.
-//!
-//! In addition to the offset, we need to track the data that was used
-//! to generate the contents of each `data_item`. This is so that we
-//! can figure out which HIR nodes contributed to that data for
-//! incremental compilation purposes.
-//!
-//! The `IndexBuilder` facilitates both of these. It is created
-//! with an `EncodingContext` (`ecx`), which it encapsulates.
-//! It has one main method, `record()`. You invoke `record`
-//! like so to create a new `data_item` element in the list:
-//!
-//! ```
-//! index.record(some_def_id, callback_fn, data)
-//! ```
-//!
-//! What record will do is to (a) record the current offset, (b) emit
-//! the `common::data_item` tag, and then call `callback_fn` with the
-//! given data as well as the `EncodingContext`. Once `callback_fn`
-//! returns, the `common::data_item` tag will be closed.
-//!
-//! `EncodingContext` does not offer the `record` method, so that we
-//! can ensure that `common::data_item` elements are never nested.
-//!
-//! In addition, while the `callback_fn` is executing, we will push a
-//! task `MetaData(some_def_id)`, which can then observe the
-//! reads/writes that occur in the task. For this reason, the `data`
-//! argument that is given to the `callback_fn` must implement the
-//! trait `DepGraphRead`, which indicates how to register reads on the
-//! data in this new task (note that many types of data, such as
-//! `DefId`, do not currently require any reads to be registered,
-//! since they are not derived from a HIR node). This is also why we
-//! give a callback fn, rather than taking a closure: it allows us to
-//! easily control precisely what data is given to that fn.
-
-use crate::encoder::EncodeContext;
-use crate::index::Index;
-use crate::schema::*;
-use crate::isolated_encoder::IsolatedEncoder;
-
-use rustc::hir;
-use rustc::hir::def_id::DefId;
-use rustc::ty::TyCtxt;
-use syntax::ast;
-
-use std::ops::{Deref, DerefMut};
-
-/// Builder that can encode new items, adding them into the index.
-/// Item encoding cannot be nested.
-pub struct IndexBuilder<'a, 'b: 'a, 'tcx: 'b> {
- items: Index,
- pub ecx: &'a mut EncodeContext<'b, 'tcx>,
-}
-
-impl<'a, 'b, 'tcx> Deref for IndexBuilder<'a, 'b, 'tcx> {
- type Target = EncodeContext<'b, 'tcx>;
- fn deref(&self) -> &Self::Target {
- self.ecx
- }
-}
-
-impl<'a, 'b, 'tcx> DerefMut for IndexBuilder<'a, 'b, 'tcx> {
- fn deref_mut(&mut self) -> &mut Self::Target {
- self.ecx
- }
-}
-
-impl<'a, 'b, 'tcx> IndexBuilder<'a, 'b, 'tcx> {
- pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
- IndexBuilder {
- items: Index::new(ecx.tcx.hir().definitions().def_index_count()),
- ecx,
- }
- }
-
- /// Emit the data for a `DefId` to the metadata. The function to
- /// emit the data is `op`, and it will be given `data` as
- /// arguments. This `record` function will call `op` to generate
- /// the `Entry` (which may point to other encoded information)
- /// and will then record the `Lazy<Entry>` for use in the index.
- ///
- /// In addition, it will setup a dep-graph task to track what data
- /// `op` accesses to generate the metadata, which is later used by
- /// incremental compilation to compute a hash for the metadata and
- /// track changes.
- ///
- /// The reason that `op` is a function pointer, and not a closure,
- /// is that we want to be able to completely track all data it has
- /// access to, so that we can be sure that `DATA: DepGraphRead`
- /// holds, and that it is therefore not gaining "secret" access to
- /// bits of HIR or other state that would not be trackd by the
- /// content system.
- pub fn record<'x, DATA>(&'x mut self,
- id: DefId,
- op: fn(&mut IsolatedEncoder<'x, 'b, 'tcx>, DATA) -> Entry<'tcx>,
- data: DATA)
- where DATA: DepGraphRead
- {
- assert!(id.is_local());
-
- // We don't track this since we are explicitly computing the incr. comp.
- // hashes anyway. In theory we could do some tracking here and use it to
- // avoid rehashing things (and instead cache the hashes) but it's
- // unclear whether that would be a win since hashing is cheap enough.
- self.ecx.tcx.dep_graph.with_ignore(move || {
- let mut entry_builder = IsolatedEncoder::new(self.ecx);
- let entry = op(&mut entry_builder, data);
- let entry = entry_builder.lazy(&entry);
-
- self.items.record(id, entry);
- })
- }
-
- pub fn into_items(self) -> Index {
- self.items
- }
-}
-
-/// Trait used for data that can be passed from outside a dep-graph
-/// task. The data must either be of some safe type, such as a
-/// `DefId` index, or implement the `read` method so that it can add
-/// a read of whatever dep-graph nodes are appropriate.
-pub trait DepGraphRead {
- fn read(&self, tcx: TyCtxt<'_, '_, '_>);
-}
-
-impl DepGraphRead for DefId {
- fn read(&self, _tcx: TyCtxt<'_, '_, '_>) {}
-}
-
-impl DepGraphRead for ast::NodeId {
- fn read(&self, _tcx: TyCtxt<'_, '_, '_>) {}
-}
-
-impl<T> DepGraphRead for Option<T>
- where T: DepGraphRead
-{
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- match *self {
- Some(ref v) => v.read(tcx),
- None => (),
- }
- }
-}
-
-impl<T> DepGraphRead for [T]
- where T: DepGraphRead
-{
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- for i in self {
- i.read(tcx);
- }
- }
-}
-
-macro_rules! read_tuple {
- ($($name:ident),*) => {
- impl<$($name),*> DepGraphRead for ($($name),*)
- where $($name: DepGraphRead),*
- {
- #[allow(non_snake_case)]
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- let &($(ref $name),*) = self;
- $($name.read(tcx);)*
- }
- }
- }
-}
-read_tuple!(A, B);
-read_tuple!(A, B, C);
-
-macro_rules! read_hir {
- ($t:ty) => {
- impl<'tcx> DepGraphRead for &'tcx $t {
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- tcx.hir().read(self.hir_id);
- }
- }
- }
-}
-read_hir!(hir::Item);
-read_hir!(hir::ImplItem);
-read_hir!(hir::TraitItem);
-read_hir!(hir::ForeignItem);
-read_hir!(hir::MacroDef);
-
-/// Leaks access to a value of type T without any tracking. This is
-/// suitable for ambiguous types like `usize`, which *could* represent
-/// tracked data (e.g., if you read it out of a HIR node) or might not
-/// (e.g., if it's an index). Adding in an `Untracked` is an
-/// assertion, essentially, that the data does not need to be tracked
-/// (or that read edges will be added by some other way).
-///
-/// A good idea is to add to each use of `Untracked` an explanation of
-/// why this value is ok.
-pub struct Untracked<T>(pub T);
-
-impl<T> DepGraphRead for Untracked<T> {
- fn read(&self, _tcx: TyCtxt<'_, '_, '_>) {}
-}
-
-/// Newtype that can be used to package up misc data extracted from a
-/// HIR node that doesn't carry its own ID. This will allow an
-/// arbitrary `T` to be passed in, but register a read on the given
-/// `NodeId`.
-pub struct FromId<T>(pub hir::HirId, pub T);
-
-impl<T> DepGraphRead for FromId<T> {
- fn read(&self, tcx: TyCtxt<'_, '_, '_>) {
- tcx.hir().read(self.0);
- }
-}
+++ /dev/null
-use crate::encoder::EncodeContext;
-use crate::schema::{Lazy, LazySeq};
-use rustc::ty::TyCtxt;
-use rustc_serialize::Encodable;
-
-/// The IsolatedEncoder provides facilities to write to crate metadata while
-/// making sure that anything going through it is also feed into an ICH hasher.
-pub struct IsolatedEncoder<'a, 'b: 'a, 'tcx: 'b> {
- pub tcx: TyCtxt<'b, 'tcx, 'tcx>,
- ecx: &'a mut EncodeContext<'b, 'tcx>,
-}
-
-impl<'a, 'b: 'a, 'tcx: 'b> IsolatedEncoder<'a, 'b, 'tcx> {
-
- pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
- let tcx = ecx.tcx;
- IsolatedEncoder {
- tcx,
- ecx,
- }
- }
-
- pub fn lazy<T>(&mut self, value: &T) -> Lazy<T>
- where T: Encodable
- {
- self.ecx.lazy(value)
- }
-
- pub fn lazy_seq<I, T>(&mut self, iter: I) -> LazySeq<T>
- where I: IntoIterator<Item = T>,
- T: Encodable
- {
- self.ecx.lazy_seq(iter)
- }
-
- pub fn lazy_seq_ref<'x, I, T>(&mut self, iter: I) -> LazySeq<T>
- where I: IntoIterator<Item = &'x T>,
- T: 'x + Encodable
- {
- self.ecx.lazy_seq_ref(iter)
- }
-
- pub fn lazy_seq_from_slice<T>(&mut self, slice: &[T]) -> LazySeq<T>
- where T: Encodable
- {
- self.ecx.lazy_seq_ref(slice.iter())
- }
-}
#![feature(box_patterns)]
#![feature(drain_filter)]
+#![feature(in_band_lifetimes)]
#![feature(libc)]
#![feature(nll)]
#![feature(proc_macro_internals)]
mod error_codes;
-mod index_builder;
mod index;
mod encoder;
mod decoder;
mod cstore_impl;
-mod isolated_encoder;
mod schema;
mod native_libs;
mod link_args;
use rustc::hir;
use rustc::hir::def::{self, CtorKind};
-use rustc::hir::def_id::{DefIndex, DefId, CrateNum};
-use rustc::ich::StableHashingContext;
+use rustc::hir::def_id::{DefIndex, DefId};
+use rustc::middle::exported_symbols::{ExportedSymbol, SymbolExportLevel};
use rustc::middle::cstore::{DepKind, LinkagePreference, NativeLibrary, ForeignModule};
use rustc::middle::lang_items;
use rustc::mir;
use syntax_pos::{self, Span};
use std::marker::PhantomData;
-use std::mem;
-
-use rustc_data_structures::stable_hasher::{StableHasher, HashStable,
- StableHasherResult};
pub fn rustc_version() -> String {
format!("rustc {}",
impl<T> serialize::UseSpecializedEncodable for Lazy<T> {}
impl<T> serialize::UseSpecializedDecodable for Lazy<T> {}
-impl<CTX, T> HashStable<CTX> for Lazy<T> {
- fn hash_stable<W: StableHasherResult>(&self,
- _: &mut CTX,
- _: &mut StableHasher<W>) {
- // There's nothing to do. Whatever got encoded within this Lazy<>
- // wrapper has already been hashed.
- }
-}
-
/// A sequence of type T referred to by its absolute position
/// in the metadata and length, and which can be decoded lazily.
/// The sequence is a single node for the purposes of `Lazy`.
impl<T> serialize::UseSpecializedEncodable for LazySeq<T> {}
impl<T> serialize::UseSpecializedDecodable for LazySeq<T> {}
-impl<CTX, T> HashStable<CTX> for LazySeq<T> {
- fn hash_stable<W: StableHasherResult>(&self,
- _: &mut CTX,
- _: &mut StableHasher<W>) {
- // There's nothing to do. Whatever got encoded within this Lazy<>
- // wrapper has already been hashed.
- }
-}
-
/// Encoding / decoding state for `Lazy` and `LazySeq`.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum LazyState {
}
#[derive(RustcEncodable, RustcDecodable)]
-pub struct CrateRoot {
+pub struct CrateRoot<'tcx> {
pub name: Symbol,
pub triple: TargetTriple,
pub extra_filename: String,
pub source_map: LazySeq<syntax_pos::SourceFile>,
pub def_path_table: Lazy<hir::map::definitions::DefPathTable>,
pub impls: LazySeq<TraitImpls>,
- pub exported_symbols: EncodedExportedSymbols,
+ pub exported_symbols: LazySeq<(ExportedSymbol<'tcx>, SymbolExportLevel)>,
pub interpret_alloc_index: LazySeq<u32>,
- pub index: LazySeq<index::Index>,
+ pub entries_index: LazySeq<index::Index<'tcx>>,
pub compiler_builtins: bool,
pub needs_allocator: bool,
pub extra_filename: String,
}
-impl_stable_hash_for!(struct CrateDep {
- name,
- hash,
- kind,
- extra_filename
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitImpls {
pub trait_id: (u32, DefIndex),
pub impls: LazySeq<DefIndex>,
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for TraitImpls {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let TraitImpls {
- trait_id: (krate, def_index),
- ref impls,
- } = *self;
-
- DefId {
- krate: CrateNum::from_u32(krate),
- index: def_index
- }.hash_stable(hcx, hasher);
- impls.hash_stable(hcx, hasher);
- }
-}
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct Entry<'tcx> {
pub kind: EntryKind<'tcx>,
pub mir: Option<Lazy<mir::Mir<'tcx>>>,
}
-impl_stable_hash_for!(struct Entry<'tcx> {
- kind,
- visibility,
- span,
- attributes,
- children,
- stability,
- deprecation,
- ty,
- inherent_impls,
- variances,
- generics,
- predicates,
- predicates_defined_on,
- mir
-});
-
#[derive(Copy, Clone, RustcEncodable, RustcDecodable)]
pub enum EntryKind<'tcx> {
Const(ConstQualif, Lazy<RenderedConst>),
TraitAlias(Lazy<TraitAliasData<'tcx>>),
}
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for EntryKind<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- mem::discriminant(self).hash_stable(hcx, hasher);
- match *self {
- EntryKind::ImmStatic |
- EntryKind::MutStatic |
- EntryKind::ForeignImmStatic |
- EntryKind::ForeignMutStatic |
- EntryKind::ForeignMod |
- EntryKind::GlobalAsm |
- EntryKind::ForeignType |
- EntryKind::Field |
- EntryKind::Existential |
- EntryKind::Type |
- EntryKind::TypeParam |
- EntryKind::ConstParam => {
- // Nothing else to hash here.
- }
- EntryKind::Const(qualif, ref const_data) => {
- qualif.hash_stable(hcx, hasher);
- const_data.hash_stable(hcx, hasher);
- }
- EntryKind::Enum(ref repr_options) => {
- repr_options.hash_stable(hcx, hasher);
- }
- EntryKind::Variant(ref variant_data) => {
- variant_data.hash_stable(hcx, hasher);
- }
- EntryKind::Struct(ref variant_data, ref repr_options) |
- EntryKind::Union(ref variant_data, ref repr_options) => {
- variant_data.hash_stable(hcx, hasher);
- repr_options.hash_stable(hcx, hasher);
- }
- EntryKind::Fn(ref fn_data) |
- EntryKind::ForeignFn(ref fn_data) => {
- fn_data.hash_stable(hcx, hasher);
- }
- EntryKind::Mod(ref mod_data) => {
- mod_data.hash_stable(hcx, hasher);
- }
- EntryKind::MacroDef(ref macro_def) => {
- macro_def.hash_stable(hcx, hasher);
- }
- EntryKind::Generator(data) => {
- data.hash_stable(hcx, hasher);
- }
- EntryKind::Closure(closure_data) => {
- closure_data.hash_stable(hcx, hasher);
- }
- EntryKind::Trait(ref trait_data) => {
- trait_data.hash_stable(hcx, hasher);
- }
- EntryKind::TraitAlias(ref trait_alias_data) => {
- trait_alias_data.hash_stable(hcx, hasher);
- }
- EntryKind::Impl(ref impl_data) => {
- impl_data.hash_stable(hcx, hasher);
- }
- EntryKind::Method(ref method_data) => {
- method_data.hash_stable(hcx, hasher);
- }
- EntryKind::AssociatedExistential(associated_container) |
- EntryKind::AssociatedType(associated_container) => {
- associated_container.hash_stable(hcx, hasher);
- }
- EntryKind::AssociatedConst(associated_container, qualif, ref const_data) => {
- associated_container.hash_stable(hcx, hasher);
- qualif.hash_stable(hcx, hasher);
- const_data.hash_stable(hcx, hasher);
- }
- }
- }
-}
-
/// Additional data for EntryKind::Const and EntryKind::AssociatedConst
#[derive(Clone, Copy, RustcEncodable, RustcDecodable)]
pub struct ConstQualif {
pub ast_promotable: bool,
}
-impl_stable_hash_for!(struct ConstQualif { mir, ast_promotable });
-
/// Contains a constant which has been rendered to a String.
/// Used by rustdoc.
#[derive(RustcEncodable, RustcDecodable)]
pub struct RenderedConst(pub String);
-impl<'a> HashStable<StableHashingContext<'a>> for RenderedConst {
- #[inline]
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- self.0.hash_stable(hcx, hasher);
- }
-}
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct ModData {
pub reexports: LazySeq<def::Export<hir::HirId>>,
}
-impl_stable_hash_for!(struct ModData { reexports });
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct MacroDef {
pub body: String,
pub legacy: bool,
}
-impl_stable_hash_for!(struct MacroDef { body, legacy });
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct FnData<'tcx> {
pub constness: hir::Constness,
pub sig: Lazy<ty::PolyFnSig<'tcx>>,
}
-impl_stable_hash_for!(struct FnData<'tcx> { constness, arg_names, sig });
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct VariantData<'tcx> {
pub ctor_kind: CtorKind,
pub ctor_sig: Option<Lazy<ty::PolyFnSig<'tcx>>>,
}
-impl_stable_hash_for!(struct VariantData<'tcx> {
- ctor_kind,
- discr,
- ctor,
- ctor_sig
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitData<'tcx> {
pub unsafety: hir::Unsafety,
pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
}
-impl_stable_hash_for!(struct TraitData<'tcx> {
- unsafety,
- paren_sugar,
- has_auto_impl,
- is_marker,
- super_predicates
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct TraitAliasData<'tcx> {
pub super_predicates: Lazy<ty::GenericPredicates<'tcx>>,
}
-impl_stable_hash_for!(struct TraitAliasData<'tcx> {
- super_predicates
-});
-
#[derive(RustcEncodable, RustcDecodable)]
pub struct ImplData<'tcx> {
pub polarity: hir::ImplPolarity,
pub trait_ref: Option<Lazy<ty::TraitRef<'tcx>>>,
}
-impl_stable_hash_for!(struct ImplData<'tcx> {
- polarity,
- defaultness,
- parent_impl,
- coerce_unsized_info,
- trait_ref
-});
-
/// Describes whether the container of an associated item
/// is a trait or an impl and whether, in a trait, it has
ImplFinal,
}
-impl_stable_hash_for!(enum crate::schema::AssociatedContainer {
- TraitRequired,
- TraitWithDefault,
- ImplDefault,
- ImplFinal
-});
-
impl AssociatedContainer {
pub fn with_def_id(&self, def_id: DefId) -> ty::AssociatedItemContainer {
match *self {
pub container: AssociatedContainer,
pub has_self: bool,
}
-impl_stable_hash_for!(struct MethodData<'tcx> { fn_data, container, has_self });
#[derive(RustcEncodable, RustcDecodable)]
pub struct ClosureData<'tcx> {
pub sig: Lazy<ty::PolyFnSig<'tcx>>,
}
-impl_stable_hash_for!(struct ClosureData<'tcx> { sig });
#[derive(RustcEncodable, RustcDecodable)]
pub struct GeneratorData<'tcx> {
pub layout: mir::GeneratorLayout<'tcx>,
}
-impl_stable_hash_for!(struct GeneratorData<'tcx> { layout });
// Tags used for encoding Spans:
pub const TAG_VALID_SPAN: u8 = 0;
pub const TAG_INVALID_SPAN: u8 = 1;
-
-#[derive(RustcEncodable, RustcDecodable)]
-pub struct EncodedExportedSymbols {
- pub position: usize,
- pub len: usize,
-}
diag.warn(
"this error has been downgraded to a warning for backwards \
compatibility with previous releases",
- );
- diag.warn(
+ ).warn(
"this represents potential undefined behavior in your code and \
this warning will become a hard error in the future",
+ ).note(
+ "for more information, try `rustc --explain E0729`"
);
}
}
use rustc::ty::{self, Const, DefIdTree, Ty, TyS, TyCtxt};
use rustc_data_structures::indexed_vec::Idx;
use syntax_pos::Span;
-use syntax_pos::symbol::keywords;
+use syntax_pos::symbol::kw;
use crate::dataflow::move_paths::InitLocation;
use crate::borrow_check::MirBorrowckCtxt;
// Deliberately fall into this case for all implicit self types,
// so that we don't fall in to the next case with them.
*kind == mir::ImplicitSelfKind::MutRef
- } else if Some(keywords::SelfLower.name()) == local_decl.name {
+ } else if Some(kw::SelfLower) == local_decl.name {
// Otherwise, check if the name is the self kewyord - in which case
// we have an explicit self. Do the same thing in this case and check
// for a `self: &mut Self` to suggest removing the `&mut`.
use rustc_errors::{Diagnostic, DiagnosticBuilder};
use std::collections::VecDeque;
use syntax::errors::Applicability;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax_pos::Span;
mod region_name;
"add_static_impl_trait_suggestion: has_static_predicate={:?}",
has_static_predicate
);
- let static_str = keywords::StaticLifetime.name();
+ let static_str = kw::StaticLifetime;
// If there is a static predicate, then the only sensible suggestion is to replace
// fr with `'static`.
if has_static_predicate {
use rustc::ty::{self, RegionKind, RegionVid, Ty, TyCtxt};
use rustc::ty::print::RegionHighlightMode;
use rustc_errors::DiagnosticBuilder;
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax_pos::Span;
use syntax_pos::symbol::InternedString;
}
ty::ReStatic => Some(RegionName {
- name: keywords::StaticLifetime.name().as_interned_str(),
+ name: kw::StaticLifetime.as_interned_str(),
source: RegionNameSource::Static
}),
safety_mode
} =
self.hir.mirror(ast_block);
- self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), block, move |this| {
- this.in_scope((region_scope, source_info), LintLevel::Inherited, block, move |this| {
+ self.in_opt_scope(opt_destruction_scope.map(|de|(de, source_info)), move |this| {
+ this.in_scope((region_scope, source_info), LintLevel::Inherited, move |this| {
if targeted_by_break {
// This is a `break`-able block
let exit_block = this.cfg.start_new_block();
StmtKind::Expr { scope, expr } => {
this.block_context.push(BlockFrame::Statement { ignores_expr_result: true });
unpack!(block = this.in_opt_scope(
- opt_destruction_scope.map(|de|(de, source_info)), block, |this| {
+ opt_destruction_scope.map(|de|(de, source_info)), |this| {
let si = (scope, source_info);
- this.in_scope(si, LintLevel::Inherited, block, |this| {
+ this.in_scope(si, LintLevel::Inherited, |this| {
let expr = this.hir.mirror(expr);
this.stmt_expr(block, expr, Some(stmt_span))
})
let remainder_span = remainder_scope.span(this.hir.tcx(),
&this.hir.region_scope_tree);
- let scope;
+ let visibility_scope =
+ Some(this.new_source_scope(remainder_span, LintLevel::Inherited, None));
// Evaluate the initializer, if present.
if let Some(init) = initializer {
let initializer_span = init.span();
- scope = this.declare_bindings(
- None,
- remainder_span,
- lint_level,
- &pattern,
- ArmHasGuard(false),
- Some((None, initializer_span)),
- );
unpack!(block = this.in_opt_scope(
- opt_destruction_scope.map(|de|(de, source_info)), block, |this| {
+ opt_destruction_scope.map(|de|(de, source_info)), |this| {
let scope = (init_scope, source_info);
- this.in_scope(scope, lint_level, block, |this| {
+ this.in_scope(scope, lint_level, |this| {
+ this.declare_bindings(
+ visibility_scope,
+ remainder_span,
+ &pattern,
+ ArmHasGuard(false),
+ Some((None, initializer_span)),
+ );
this.expr_into_pattern(block, pattern, init)
})
}));
} else {
- scope = this.declare_bindings(
- None, remainder_span, lint_level, &pattern,
- ArmHasGuard(false), None);
+ let scope = (init_scope, source_info);
+ unpack!(this.in_scope(scope, lint_level, |this| {
+ this.declare_bindings(
+ visibility_scope,
+ remainder_span,
+ &pattern,
+ ArmHasGuard(false),
+ None,
+ );
+ block.unit()
+ }));
debug!("ast_block_stmts: pattern={:?}", pattern);
this.visit_bindings(
})
}
- // Enter the source scope, after evaluating the initializer.
- if let Some(source_scope) = scope {
+ // Enter the visibility scope, after evaluating the initializer.
+ if let Some(source_scope) = visibility_scope {
this.source_scope = source_scope;
}
}
{
let source_info = this.source_info(expr.span);
let region_scope = (region_scope, source_info);
- return this.in_scope(region_scope, lint_level, block, |this| {
+ return this.in_scope(region_scope, lint_level, |this| {
this.as_operand(block, scope, value)
});
}
region_scope,
lint_level,
value,
- } => this.in_scope((region_scope, source_info), lint_level, block, |this| {
+ } => this.in_scope((region_scope, source_info), lint_level, |this| {
if mutability == Mutability::Not {
this.as_read_only_place(block, value)
} else {
value,
} => {
let region_scope = (region_scope, source_info);
- this.in_scope(region_scope, lint_level, block, |this| {
+ this.in_scope(region_scope, lint_level, |this| {
this.as_rvalue(block, scope, value)
})
}
//! See docs in build/expr/mod.rs
use crate::build::{BlockAnd, BlockAndExtension, Builder};
+use crate::build::scope::{CachedBlock, DropKind};
use crate::hair::*;
use rustc::middle::region;
use rustc::mir::*;
value,
} = expr.kind
{
- return this.in_scope((region_scope, source_info), lint_level, block, |this| {
+ return this.in_scope((region_scope, source_info), lint_level, |this| {
this.as_temp(block, temp_lifetime, value, mutability)
});
}
}
this.local_decls.push(local_decl)
};
+ let temp_place = &Place::Base(PlaceBase::Local(temp));
+
if !expr_ty.is_never() {
this.cfg.push(
block,
kind: StatementKind::StorageLive(temp),
},
);
+
+ // In constants, `temp_lifetime` is `None` for temporaries that live for the
+ // `'static` lifetime. Thus we do not drop these temporaries and simply leak them.
+ // This is equivalent to what `let x = &foo();` does in functions. The temporary
+ // is lifted to their surrounding scope. In a function that means the temporary lives
+ // until just before the function returns. In constants that means it outlives the
+ // constant's initialization value computation. Anything outliving a constant
+ // must have the `'static` lifetime and live forever.
+ // Anything with a shorter lifetime (e.g the `&foo()` in `bar(&foo())` or anything
+ // within a block will keep the regular drops just like runtime code.
+ if let Some(temp_lifetime) = temp_lifetime {
+ this.schedule_drop(
+ expr_span,
+ temp_lifetime,
+ temp_place,
+ expr_ty,
+ DropKind::Storage,
+ );
+ }
}
- unpack!(block = this.into(&Place::Base(PlaceBase::Local(temp)), block, expr));
+ unpack!(block = this.into(temp_place, block, expr));
- // In constants, temp_lifetime is None for temporaries that live for the
- // 'static lifetime. Thus we do not drop these temporaries and simply leak them.
- // This is equivalent to what `let x = &foo();` does in functions. The temporary
- // is lifted to their surrounding scope. In a function that means the temporary lives
- // until just before the function returns. In constants that means it outlives the
- // constant's initialization value computation. Anything outliving a constant
- // must have the `'static` lifetime and live forever.
- // Anything with a shorter lifetime (e.g the `&foo()` in `bar(&foo())` or anything
- // within a block will keep the regular drops just like runtime code.
if let Some(temp_lifetime) = temp_lifetime {
- this.schedule_drop_storage_and_value(
+ this.schedule_drop(
expr_span,
temp_lifetime,
- &Place::Base(PlaceBase::Local(temp)),
+ temp_place,
expr_ty,
+ DropKind::Value {
+ cached_block: CachedBlock::default(),
+ },
);
}
value,
} => {
let region_scope = (region_scope, source_info);
- this.in_scope(region_scope, lint_level, block, |this| {
+ this.in_scope(region_scope, lint_level, |this| {
this.into(destination, block, value)
})
}
value,
} => {
let value = this.hir.mirror(value);
- this.in_scope((region_scope, source_info), lint_level, block, |this| {
+ this.in_scope((region_scope, source_info), lint_level, |this| {
this.stmt_expr(block, value, opt_stmt_span)
})
}
use crate::hair::{self, *};
use rustc::hir::HirId;
use rustc::mir::*;
+use rustc::middle::region;
use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty};
use rustc::ty::layout::VariantIdx;
use rustc_data_structures::bit_set::BitSet;
// Step 5. Create everything else: the guards and the arms.
- let outer_source_info = self.source_info(span);
let arm_end_blocks: Vec<_> = arm_candidates.into_iter().map(|(arm, candidates)| {
- let mut arm_block = self.cfg.start_new_block();
-
- let body = self.hir.mirror(arm.body.clone());
- let scope = self.declare_bindings(
- None,
- body.span,
- LintLevel::Inherited,
- &arm.patterns[0],
- ArmHasGuard(arm.guard.is_some()),
- Some((Some(&scrutinee_place), scrutinee_span)),
- );
-
- for candidate in candidates {
- self.bind_and_guard_matched_candidate(
- candidate,
- arm.guard.clone(),
- arm_block,
- &fake_borrow_temps,
- scrutinee_span,
+ let arm_source_info = self.source_info(arm.span);
+ let region_scope = (arm.scope, arm_source_info);
+ self.in_scope(region_scope, arm.lint_level, |this| {
+ let arm_block = this.cfg.start_new_block();
+
+ let body = this.hir.mirror(arm.body.clone());
+ let scope = this.declare_bindings(
+ None,
+ arm.span,
+ &arm.patterns[0],
+ ArmHasGuard(arm.guard.is_some()),
+ Some((Some(&scrutinee_place), scrutinee_span)),
);
- }
- if let Some(source_scope) = scope {
- self.source_scope = source_scope;
- }
+ if let Some(source_scope) = scope {
+ this.source_scope = source_scope;
+ }
- unpack!(arm_block = self.into(destination, arm_block, body));
+ for candidate in candidates {
+ this.clear_top_scope(arm.scope);
+ this.bind_and_guard_matched_candidate(
+ candidate,
+ arm.guard.clone(),
+ arm_block,
+ &fake_borrow_temps,
+ scrutinee_span,
+ region_scope,
+ );
+ }
- arm_block
+ this.into(destination, arm_block, body)
+ })
}).collect();
// all the arm blocks will rejoin here
for arm_block in arm_end_blocks {
self.cfg.terminate(
- arm_block,
+ unpack!(arm_block),
outer_source_info,
TerminatorKind::Goto { target: end_block },
);
&mut self,
mut visibility_scope: Option<SourceScope>,
scope_span: Span,
- lint_level: LintLevel,
pattern: &Pattern<'tcx>,
has_guard: ArmHasGuard,
opt_match_place: Option<(Option<&Place<'tcx>>, Span)>,
) -> Option<SourceScope> {
- assert!(
- !(visibility_scope.is_some() && lint_level.is_explicit()),
- "can't have both a visibility and a lint scope at the same time"
- );
- let mut scope = self.source_scope;
debug!("declare_bindings: pattern={:?}", pattern);
self.visit_bindings(
&pattern,
UserTypeProjections::none(),
&mut |this, mutability, name, mode, var, span, ty, user_ty| {
if visibility_scope.is_none() {
- // If we have lints, create a new source scope
- // that marks the lints for the locals. See the comment
- // on the `source_info` field for why this is needed.
- if lint_level.is_explicit() {
- scope = this.new_source_scope(scope_span, lint_level, None);
- }
- visibility_scope = Some(this.new_source_scope(scope_span,
- LintLevel::Inherited,
- None));
+ visibility_scope =
+ Some(this.new_source_scope(scope_span, LintLevel::Inherited, None));
}
- let source_info = SourceInfo { span, scope };
+ let source_info = SourceInfo { span, scope: this.source_scope };
let visibility_scope = visibility_scope.unwrap();
this.declare_binding(
source_info,
span,
untested_candidates,
join_block,
- &mut None,
+ fake_borrows,
)
}
arm_block: BasicBlock,
fake_borrows: &Vec<(&Place<'tcx>, Local)>,
scrutinee_span: Span,
+ region_scope: (region::Scope, SourceInfo),
) {
debug!("bind_and_guard_matched_candidate(candidate={:?})", candidate);
//
// and that is clearly not correct.
let post_guard_block = self.cfg.start_new_block();
+ let otherwise_post_guard_block = self.cfg.start_new_block();
self.cfg.terminate(
block,
source_info,
TerminatorKind::if_(
self.hir.tcx(),
- cond,
+ cond.clone(),
post_guard_block,
- candidate.otherwise_block.unwrap()
+ otherwise_post_guard_block,
),
);
+ self.exit_scope(
+ source_info.span,
+ region_scope,
+ otherwise_post_guard_block,
+ candidate.otherwise_block.unwrap(),
+ );
+
+ if let Operand::Copy(cond_place) | Operand::Move(cond_place) = cond {
+ if let Place::Base(PlaceBase::Local(cond_temp)) = cond_place {
+ // We will call `clear_top_scope` if there's another guard. So
+ // we have to drop this variable now or it will be "storage
+ // leaked".
+ self.pop_variable(
+ post_guard_block,
+ region_scope.0,
+ cond_temp
+ );
+ } else {
+ bug!("Expected as_local_operand to produce a temporary");
+ }
+ }
+
let by_value_bindings = candidate.bindings.iter().filter(|binding| {
if let BindingMode::ByValue = binding.binding_mode { true } else { false }
});
use std::u32;
use rustc_target::spec::abi::Abi;
use syntax::attr::{self, UnwindAttr};
-use syntax::symbol::keywords;
+use syntax::symbol::kw;
use syntax_pos::Span;
use super::lints;
ty::UpvarCapture::ByRef(..) => true,
};
let mut debuginfo = UpvarDebuginfo {
- debug_name: keywords::Invalid.name(),
+ debug_name: kw::Invalid,
by_ref,
};
let mut mutability = Mutability::Not;
let mut block = START_BLOCK;
let source_info = builder.source_info(span);
let call_site_s = (call_site_scope, source_info);
- unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, block, |builder| {
+ unpack!(block = builder.in_scope(call_site_s, LintLevel::Inherited, |builder| {
if should_abort_on_panic(tcx, fn_def_id, abi) {
builder.schedule_abort();
}
let arg_scope_s = (arg_scope, source_info);
- unpack!(block = builder.in_scope(arg_scope_s, LintLevel::Inherited, block, |builder| {
+ unpack!(block = builder.in_scope(arg_scope_s, LintLevel::Inherited, |builder| {
builder.args_and_body(block, &arguments, arg_scope, &body.value)
}));
// Attribute epilogue to function's closing brace
self.var_indices.insert(var, LocalsForNode::One(local));
}
_ => {
- scope = self.declare_bindings(scope, ast_body.span,
- LintLevel::Inherited, &pattern,
- matches::ArmHasGuard(false),
- Some((Some(&place), span)));
+ scope = self.declare_bindings(
+ scope,
+ ast_body.span,
+ &pattern,
+ matches::ArmHasGuard(false),
+ Some((Some(&place), span)),
+ );
unpack!(block = self.place_into_pattern(block, pattern, &place, false));
}
}
them. Eventually, when we shift to non-lexical lifetimes, there should
be no need to remember this mapping.
-There is one additional wrinkle, actually, that I wanted to hide from
-you but duty compels me to mention. In the course of building
-matches, it sometimes happen that certain code (namely guards) gets
-executed multiple times. This means that the scope lexical scope may
-in fact correspond to multiple, disjoint SEME regions. So in fact our
+### Not so SEME Regions
+
+In the course of building matches, it sometimes happens that certain code
+(namely guards) gets executed multiple times. This means that the scope lexical
+scope may in fact correspond to multiple, disjoint SEME regions. So in fact our
mapping is from one scope to a vector of SEME regions.
+Also in matches, the scopes assigned to arms are not even SEME regions! Each
+arm has a single region with one entry for each pattern. We manually
+manipulate the scheduled drops in this scope to avoid dropping things multiple
+times, although drop elaboration would clean this up for value drops.
+
### Drops
The primary purpose for scopes is to insert drops: while building
pub fn in_opt_scope<F, R>(&mut self,
opt_scope: Option<(region::Scope, SourceInfo)>,
- mut block: BasicBlock,
f: F)
-> BlockAnd<R>
where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
{
- debug!("in_opt_scope(opt_scope={:?}, block={:?})", opt_scope, block);
+ debug!("in_opt_scope(opt_scope={:?})", opt_scope);
if let Some(region_scope) = opt_scope { self.push_scope(region_scope); }
+ let mut block;
let rv = unpack!(block = f(self));
if let Some(region_scope) = opt_scope {
unpack!(block = self.pop_scope(region_scope, block));
pub fn in_scope<F, R>(&mut self,
region_scope: (region::Scope, SourceInfo),
lint_level: LintLevel,
- mut block: BasicBlock,
f: F)
-> BlockAnd<R>
where F: FnOnce(&mut Builder<'a, 'gcx, 'tcx>) -> BlockAnd<R>
{
- debug!("in_scope(region_scope={:?}, block={:?})", region_scope, block);
+ debug!("in_scope(region_scope={:?})", region_scope);
let source_scope = self.source_scope;
let tcx = self.hir.tcx();
if let LintLevel::Explicit(current_hir_id) = lint_level {
}
}
self.push_scope(region_scope);
+ let mut block;
let rv = unpack!(block = f(self));
unpack!(block = self.pop_scope(region_scope, block));
self.source_scope = source_scope;
// Note that this code iterates scopes from the inner-most to the outer-most,
// invalidating caches of each scope visited. This way bare minimum of the
// caches gets invalidated. i.e., if a new drop is added into the middle scope, the
- // cache of outer scpoe stays intact.
+ // cache of outer scope stays intact.
scope.invalidate_cache(!needs_drop, this_scope);
if this_scope {
if let DropKind::Value { .. } = drop_kind {
success_block
}
+
+ // `match` arm scopes
+ // ==================
+ /// Unschedules any drops in the top scope.
+ ///
+ /// This is only needed for `match` arm scopes, because they have one
+ /// entrance per pattern, but only one exit.
+ pub fn clear_top_scope(&mut self, region_scope: region::Scope) {
+ let top_scope = self.scopes.last_mut().unwrap();
+
+ assert_eq!(top_scope.region_scope, region_scope);
+
+ top_scope.drops.clear();
+ top_scope.invalidate_cache(false, true);
+ }
+
+ /// Drops the single variable provided
+ ///
+ /// * The scope must be the top scope.
+ /// * The variable must be in that scope.
+ /// * The variable must be at the top of that scope: it's the next thing
+ /// scheduled to drop.
+ /// * The drop must be of `DropKind::Storage`.
+ ///
+ /// This is used for the boolean holding the result of the match guard. We
+ /// do this because:
+ ///
+ /// * The boolean is different for each pattern
+ /// * There is only one exit for the arm scope
+ /// * The guard expression scope is too short, it ends just before the
+ /// boolean is tested.
+ pub fn pop_variable(
+ &mut self,
+ block: BasicBlock,
+ region_scope: region::Scope,
+ variable: Local,
+ ) {
+ let top_scope = self.scopes.last_mut().unwrap();
+
+ assert_eq!(top_scope.region_scope, region_scope);
+
+ let top_drop_data = top_scope.drops.pop().unwrap();
+
+ match top_drop_data.kind {
+ DropKind::Value { .. } => {
+ bug!("Should not be calling pop_top_variable on non-copy type!")
+ }
+ DropKind::Storage => {
+ // Drop the storage for both value and storage drops.
+ // Only temps and vars need their storage dead.
+ match top_drop_data.location {
+ Place::Base(PlaceBase::Local(index)) => {
+ let source_info = top_scope.source_info(top_drop_data.span);
+ assert_eq!(index, variable);
+ self.cfg.push(block, Statement {
+ source_info,
+ kind: StatementKind::StorageDead(index)
+ });
+ }
+ _ => unreachable!(),
+ }
+ }
+ }
+
+ top_scope.invalidate_cache(true, true);
+ }
+
}
/// Builds drops for pop_scope and exit_scope.
use crate::util::elaborate_drops::DropFlagState;
-use super::move_paths::{HasMoveData, MoveData, MovePathIndex, InitIndex};
-use super::move_paths::{LookupResult, InitKind};
+use super::move_paths::{HasMoveData, MoveData, MovePathIndex, InitIndex, InitKind};
use super::{BitDenotation, BlockSets, InitialFlow};
use super::drop_flag_effects_for_function_entry;
sets.gen_all(&init_loc_map[location]);
match stmt.kind {
- mir::StatementKind::StorageDead(local) |
- mir::StatementKind::StorageLive(local) => {
- // End inits for StorageDead and StorageLive, so that an immutable
- // variable can be reinitialized on the next iteration of the loop.
- //
- // FIXME(#46525): We *need* to do this for StorageLive as well as
- // StorageDead, because lifetimes of match bindings with guards are
- // weird - i.e., this code
- //
- // ```
- // fn main() {
- // match 0 {
- // a | a
- // if { println!("a={}", a); false } => {}
- // _ => {}
- // }
- // }
- // ```
- //
- // runs the guard twice, using the same binding for `a`, and only
- // storagedeads after everything ends, so if we don't regard the
- // storagelive as killing storage, we would have a multiple assignment
- // to immutable data error.
- if let LookupResult::Exact(mpi) =
- rev_lookup.find(&mir::Place::Base(mir::PlaceBase::Local(local))) {
- debug!("stmt {:?} at loc {:?} clears the ever initialized status of {:?}",
- stmt, location, &init_path_map[mpi]);
- sets.kill_all(&init_path_map[mpi]);
- }
+ mir::StatementKind::StorageDead(local) => {
+ // End inits for StorageDead, so that an immutable variable can
+ // be reinitialized on the next iteration of the loop.
+ let move_path_index = rev_lookup.find_local(local);
+ debug!("stmt {:?} at loc {:?} clears the ever initialized status of {:?}",
+ stmt, location, &init_path_map[move_path_index]);
+ sets.kill_all(&init_path_map[move_path_index]);
}
_ => {}
}
```
"##,
+E0729: r##"
+Support for Non-Lexical Lifetimes (NLL) has been included in the Rust compiler
+since 1.31, and has been enabled on the 2015 edition since 1.36. The new borrow
+checker for NLL uncovered some bugs in the old borrow checker, which in some
+cases allowed unsound code to compile, resulting in memory safety issues.
+
+### What do I do?
+
+Change your code so the warning does no longer trigger. For backwards
+compatibility, this unsound code may still compile (with a warning) right now.
+However, at some point in the future, the compiler will no longer accept this
+code and will throw a hard error.
+
+### Shouldn't you fix the old borrow checker?
+
+The old borrow checker has known soundness issues that are basically impossible
+to fix. The new NLL-based borrow checker is the fix.
+
+### Can I turn these warnings into errors by denying a lint?
+
+No.
+
+### When are these warnings going to turn into errors?
+
+No formal timeline for turning the warnings into errors has been set. See
+[GitHub issue 58781](https://github.com/rust-lang/rust/issues/58781) for more
+information.
+
+### Why do I get this message with code that doesn't involve borrowing?
+
+There are some known bugs that trigger this message.
+"##,
}
register_diagnostics! {
_ => None,
},
body: arm.body.to_ref(),
- // BUG: fix this
- lint_level: LintLevel::Inherited,
+ lint_level: LintLevel::Explicit(arm.hir_id),
+ scope: region::Scope {
+ id: arm.hir_id.local_id,
+ data: region::ScopeData::Node
+ },
+ span: arm.span,
}
}
Explicit(hir::HirId)
}
-impl LintLevel {
- pub fn is_explicit(self) -> bool {
- match self {
- LintLevel::Inherited => false,
- LintLevel::Explicit(_) => true
- }
- }
-}
-
#[derive(Clone, Debug)]
pub struct Block<'tcx> {
pub targeted_by_break: bool,
pub guard: Option<Guard<'tcx>>,
pub body: ExprRef<'tcx>,
pub lint_level: LintLevel,
+ pub scope: region::Scope,
+ pub span: Span,
}
#[derive(Clone, Debug)]
context: PlaceContext,
location: Location) {
debug!("visit_place: place={:?} context={:?} location={:?}", place, context, location);
- self.super_place(place, context, location);
- match *place {
- Place::Base(PlaceBase::Local(_)) => {}
- Place::Base(PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. })) => {
- unreachable!()
- }
- Place::Base(PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. })) => {
- if self.tcx
- .get_attrs(def_id)
- .iter()
- .any(|attr| attr.check_name(sym::thread_local)) {
- if self.mode != Mode::Fn {
- span_err!(self.tcx.sess, self.span, E0625,
- "thread-local statics cannot be \
- accessed at compile-time");
- }
- return;
+ place.iterate(|place_base, place_projections| {
+ match place_base {
+ PlaceBase::Local(_) => {}
+ PlaceBase::Static(box Static{ kind: StaticKind::Promoted(_), .. }) => {
+ unreachable!()
}
+ PlaceBase::Static(box Static{ kind: StaticKind::Static(def_id), .. }) => {
+ if self.tcx
+ .get_attrs(*def_id)
+ .iter()
+ .any(|attr| attr.check_name(sym::thread_local)) {
+ if self.mode != Mode::Fn {
+ span_err!(self.tcx.sess, self.span, E0625,
+ "thread-local statics cannot be \
+ accessed at compile-time");
+ }
+ return;
+ }
- // Only allow statics (not consts) to refer to other statics.
- if self.mode == Mode::Static || self.mode == Mode::StaticMut {
- if self.mode == Mode::Static && context.is_mutating_use() {
- // this is not strictly necessary as miri will also bail out
- // For interior mutability we can't really catch this statically as that
- // goes through raw pointers and intermediate temporaries, so miri has
- // to catch this anyway
- self.tcx.sess.span_err(
- self.span,
- "cannot mutate statics in the initializer of another static",
- );
+ // Only allow statics (not consts) to refer to other statics.
+ if self.mode == Mode::Static || self.mode == Mode::StaticMut {
+ if self.mode == Mode::Static && context.is_mutating_use() {
+ // this is not strictly necessary as miri will also bail out
+ // For interior mutability we can't really catch this statically as that
+ // goes through raw pointers and intermediate temporaries, so miri has
+ // to catch this anyway
+ self.tcx.sess.span_err(
+ self.span,
+ "cannot mutate statics in the initializer of another static",
+ );
+ }
+ return;
}
- return;
- }
- unleash_miri!(self);
+ unleash_miri!(self);
- if self.mode != Mode::Fn {
- let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
- "{}s cannot refer to statics, use \
- a constant instead", self.mode);
- if self.tcx.sess.teach(&err.get_code().unwrap()) {
- err.note(
- "Static and const variables can refer to other const variables. But a \
- const variable cannot refer to a static variable."
- );
- err.help(
- "To fix this, the value can be extracted as a const and then used."
- );
+ if self.mode != Mode::Fn {
+ let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
+ "{}s cannot refer to statics, use \
+ a constant instead", self.mode);
+ if self.tcx.sess.teach(&err.get_code().unwrap()) {
+ err.note(
+ "Static and const variables can refer to other const variables. \
+ But a const variable cannot refer to a static variable."
+ );
+ err.help(
+ "To fix this, the value can be extracted as a const and then used."
+ );
+ }
+ err.emit()
}
- err.emit()
}
}
- Place::Projection(ref proj) => {
+
+ for proj in place_projections {
match proj.elem {
ProjectionElem::Deref => {
if context.is_mutating_use() {
}
}
}
- }
+ });
}
fn visit_operand(&mut self, operand: &Operand<'tcx>, location: Location) {
use syntax::ast::*;
use syntax::attr;
use syntax::source_map::Spanned;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::ptr::P;
use syntax::visit::{self, Visitor};
use syntax::{span_err, struct_span_err, walk_list};
}
fn check_lifetime(&self, ident: Ident) {
- let valid_names = [keywords::UnderscoreLifetime.name(),
- keywords::StaticLifetime.name(),
- keywords::Invalid.name()];
+ let valid_names = [kw::UnderscoreLifetime,
+ kw::StaticLifetime,
+ kw::Invalid];
if !valid_names.contains(&ident.name) && ident.without_first_quote().is_reserved() {
self.err_handler().span_err(ident.span, "lifetimes cannot use keyword names");
}
}
fn visit_arm(&mut self, a: &'v hir::Arm) {
- self.record("Arm", Id::None, a);
+ self.record("Arm", Id::Node(a.hir_id), a);
hir_visit::walk_arm(self, a)
}
use std::path::PathBuf;
use syntax::ast;
use syntax::span_err;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax_pos::{Span, DUMMY_SP};
/// Pointer to a registrar function.
for plugin in plugins {
// plugins must have a name and can't be key = value
let name = plugin.name_or_empty();
- if name != keywords::Invalid.name() && !plugin.is_value_str() {
+ if name != kw::Invalid && !plugin.is_value_str() {
let args = plugin.meta_item_list().map(ToOwned::to_owned);
loader.load_plugin(plugin.span(), name, args.unwrap_or_default());
} else {
use rustc::ty::subst::InternalSubsts;
use rustc::util::nodemap::HirIdSet;
use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::sync::Lrc;
use syntax::ast::Ident;
use syntax::attr;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use std::{cmp, fmt, mem};
fn visit_trait(&mut self, trait_ref: TraitRef<'tcx>) -> bool {
self.skeleton().visit_trait(trait_ref)
}
- fn visit_predicates(&mut self, predicates: Lrc<ty::GenericPredicates<'tcx>>) -> bool {
+ fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool {
self.skeleton().visit_predicates(predicates)
}
}
(!self.def_id_visitor.shallow() && substs.visit_with(self))
}
- fn visit_predicates(&mut self, predicates: Lrc<ty::GenericPredicates<'tcx>>) -> bool {
- let ty::GenericPredicates { parent: _, predicates } = &*predicates;
+ fn visit_predicates(&mut self, predicates: &ty::GenericPredicates<'tcx>) -> bool {
+ let ty::GenericPredicates { parent: _, predicates } = predicates;
for (predicate, _span) in predicates {
match predicate {
ty::Predicate::Trait(poly_predicate) => {
span: Span, // span of the field pattern, e.g., `x: 0`
def: &'tcx ty::AdtDef, // definition of the struct or enum
field: &'tcx ty::FieldDef) { // definition of the field
- let ident = Ident::new(keywords::Invalid.name(), use_ctxt);
+ let ident = Ident::new(kw::Invalid, use_ctxt);
let current_hir = self.current_item;
let def_id = self.tcx.adjust_ident(ident, def.did, current_hir).1;
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
fn privacy_access_levels<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
krate: CrateNum,
-) -> Lrc<AccessLevels> {
+) -> &'tcx AccessLevels {
assert_eq!(krate, LOCAL_CRATE);
// Build up a set of all exported items in the AST. This is a set of all
}
visitor.update(hir::CRATE_HIR_ID, Some(AccessLevel::Public));
- Lrc::new(visitor.access_levels)
+ tcx.arena.alloc(visitor.access_levels)
}
fn check_private_in_public<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>, krate: CrateNum) {
use syntax::parse::token::{self, Token};
use syntax::span_err;
use syntax::std_inject::injected_crate_name;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
}
_ => None,
}.map(|ctxt| Segment::from_ident(Ident::new(
- keywords::PathRoot.name(), use_tree.prefix.span.shrink_to_lo().with_ctxt(ctxt)
+ kw::PathRoot, use_tree.prefix.span.shrink_to_lo().with_ctxt(ctxt)
)));
let prefix = crate_root.into_iter().chain(prefix_iter).collect::<Vec<_>>();
let empty_for_self = |prefix: &[Segment]| {
prefix.is_empty() ||
- prefix.len() == 1 && prefix[0].ident.name == keywords::PathRoot.name()
+ prefix.len() == 1 && prefix[0].ident.name == kw::PathRoot
};
match use_tree.kind {
ast::UseTreeKind::Simple(rename, ..) => {
if nested {
// Correctly handle `self`
- if source.ident.name == keywords::SelfLower.name() {
+ if source.ident.name == kw::SelfLower {
type_ns_only = true;
if empty_for_self(&module_path) {
}
} else {
// Disallow `self`
- if source.ident.name == keywords::SelfLower.name() {
+ if source.ident.name == kw::SelfLower {
resolve_error(self,
use_tree.span,
ResolutionError::SelfImportsOnlyAllowedWithin);
}
// Disallow `use $crate;`
- if source.ident.name == keywords::DollarCrate.name() && module_path.is_empty() {
+ if source.ident.name == kw::DollarCrate && module_path.is_empty() {
let crate_root = self.resolve_crate_root(source.ident);
let crate_name = match crate_root.kind {
ModuleKind::Def(.., name) => name,
// HACK(eddyb) unclear how good this is, but keeping `$crate`
// in `source` breaks `src/test/compile-fail/import-crate-var.rs`,
// while the current crate doesn't have a valid `crate_name`.
- if crate_name != keywords::Invalid.name() {
+ if crate_name != kw::Invalid {
// `crate_name` should not be interpreted as relative.
module_path.push(Segment {
ident: Ident {
- name: keywords::PathRoot.name(),
+ name: kw::PathRoot,
span: source.ident.span,
},
id: Some(self.session.next_node_id()),
}
}
- if ident.name == keywords::Crate.name() {
+ if ident.name == kw::Crate {
self.session.span_err(ident.span,
"crate root imports need to be explicitly named: \
`use crate as name;`");
// Ensure there is at most one `self` in the list
let self_spans = items.iter().filter_map(|&(ref use_tree, _)| {
if let ast::UseTreeKind::Simple(..) = use_tree.kind {
- if use_tree.ident().name == keywords::SelfLower.name() {
+ if use_tree.ident().name == kw::SelfLower {
return Some(use_tree.span);
}
}
let new_span = prefix[prefix.len() - 1].ident.span;
let tree = ast::UseTree {
prefix: ast::Path::from_ident(
- Ident::new(keywords::SelfLower.name(), new_span)
+ Ident::new(kw::SelfLower, new_span)
),
kind: ast::UseTreeKind::Simple(
Some(Ident::from_str_and_span("__dummy", new_span).gensym()),
}
ItemKind::ExternCrate(orig_name) => {
- let module = if orig_name.is_none() && ident.name == keywords::SelfLower.name() {
+ let module = if orig_name.is_none() && ident.name == kw::SelfLower {
self.session
.struct_span_err(item.span, "`extern crate self;` requires renaming")
.span_suggestion(
)
.emit();
return;
- } else if orig_name == Some(keywords::SelfLower.name()) {
+ } else if orig_name == Some(kw::SelfLower) {
self.graph_root
} else {
let crate_id = self.crate_loader.process_extern_crate(item, &self.definitions);
ItemKind::GlobalAsm(..) => {}
- ItemKind::Mod(..) if ident == keywords::Invalid.ident() => {} // Crate root
+ ItemKind::Mod(..) if ident.name == kw::Invalid => {} // Crate root
ItemKind::Mod(..) => {
let def_id = self.definitions.local_def_id(item.id);
"an `extern crate` loading macros must be at the crate root");
}
if let ItemKind::ExternCrate(Some(orig_name)) = item.node {
- if orig_name == keywords::SelfLower.name() {
+ if orig_name == kw::SelfLower {
self.session.span_err(attr.span,
"`macro_use` is not supported on `extern crate self`");
}
use rustc::session::{Session, config::nightly_options};
use syntax::ast::{self, Expr, ExprKind, Ident};
use syntax::ext::base::MacroKind;
-use syntax::symbol::{Symbol, keywords};
+use syntax::symbol::{Symbol, kw};
use syntax_pos::{BytePos, Span};
type Res = def::Res<ast::NodeId>;
let item_span = path.last().unwrap().ident.span;
let (mod_prefix, mod_str) = if path.len() == 1 {
(String::new(), "this scope".to_string())
- } else if path.len() == 2 && path[0].ident.name == keywords::PathRoot.name() {
+ } else if path.len() == 2 && path[0].ident.name == kw::PathRoot {
(String::new(), "the crate root".to_string())
} else {
let mod_path = &path[..path.len() - 1];
match (path.get(0), path.get(1)) {
// `{{root}}::ident::...` on both editions.
// On 2015 `{{root}}` is usually added implicitly.
- (Some(fst), Some(snd)) if fst.ident.name == keywords::PathRoot.name() &&
+ (Some(fst), Some(snd)) if fst.ident.name == kw::PathRoot &&
!snd.ident.is_path_segment_keyword() => {}
// `ident::...` on 2018.
(Some(fst), _) if fst.ident.span.rust_2018() &&
!fst.ident.is_path_segment_keyword() => {
// Insert a placeholder that's later replaced by `self`/`super`/etc.
- path.insert(0, Segment::from_ident(keywords::Invalid.ident()));
+ path.insert(0, Segment::from_ident(Ident::invalid()));
}
_ => return None,
}
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `self` and check if that is valid.
- path[0].ident.name = keywords::SelfLower.name();
+ path[0].ident.name = kw::SelfLower;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_self_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `crate` and check if that is valid.
- path[0].ident.name = keywords::Crate.name();
+ path[0].ident.name = kw::Crate;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_crate_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
parent_scope: &ParentScope<'b>,
) -> Option<(Vec<Segment>, Vec<String>)> {
// Replace first ident with `crate` and check if that is valid.
- path[0].ident.name = keywords::Super.name();
+ path[0].ident.name = kw::Super;
let result = self.resolve_path(&path, None, parent_scope, false, span, CrateLint::No);
debug!("make_missing_super_suggestion: path={:?} result={:?}", path, result);
if let PathResult::Module(..) = result {
use syntax::ext::base::SyntaxExtension;
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::base::MacroKind;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::visit::{self, FnKind, Visitor};
self.smart_resolve_path(ty.id, qself.as_ref(), path, PathSource::Type);
}
TyKind::ImplicitSelf => {
- let self_ty = keywords::SelfUpper.ident();
+ let self_ty = Ident::with_empty_ctxt(kw::SelfUpper);
let res = self.resolve_ident_in_lexical_scope(self_ty, TypeNS, Some(ty.id), ty.span)
.map_or(Res::Err, |d| d.res());
self.record_partial_res(ty.id, PartialRes::new(res));
is_value: bool
) -> hir::Path {
let root = if crate_root.is_some() {
- keywords::PathRoot
+ kw::PathRoot
} else {
- keywords::Crate
+ kw::Crate
};
- let segments = iter::once(root.ident())
+ let segments = iter::once(Ident::with_empty_ctxt(root))
.chain(
crate_root.into_iter()
.chain(components.iter().cloned())
.map(Ident::with_empty_ctxt)
).map(|i| self.new_ast_path_segment(i)).collect::<Vec<_>>();
-
let path = ast::Path {
span,
segments,
let path = if path_str.starts_with("::") {
ast::Path {
span,
- segments: iter::once(keywords::PathRoot.ident())
+ segments: iter::once(Ident::with_empty_ctxt(kw::PathRoot))
.chain({
path_str.split("::").skip(1).map(Ident::from_str)
})
let root_module_kind = ModuleKind::Def(
DefKind::Mod,
root_def_id,
- keywords::Invalid.name(),
+ kw::Invalid,
);
let graph_root = arenas.alloc_module(ModuleData {
no_implicit_prelude: attr::contains_name(&krate.attrs, sym::no_implicit_prelude),
path_span: Span)
-> Option<LexicalScopeBinding<'a>> {
assert!(ns == TypeNS || ns == ValueNS);
- if ident.name == keywords::Invalid.name() {
+ if ident.name == kw::Invalid {
return Some(LexicalScopeBinding::Res(Res::Err));
}
- ident.span = if ident.name == keywords::SelfUpper.name() {
+ ident.span = if ident.name == kw::SelfUpper {
// FIXME(jseyfried) improve `Self` hygiene
ident.span.with_ctxt(SyntaxContext::empty())
} else if ns == TypeNS {
fn resolve_crate_root(&mut self, ident: Ident) -> Module<'a> {
let mut ctxt = ident.span.ctxt();
- let mark = if ident.name == keywords::DollarCrate.name() {
+ let mark = if ident.name == kw::DollarCrate {
// When resolving `$crate` from a `macro_rules!` invoked in a `macro`,
// we don't want to pretend that the `macro_rules!` definition is in the `macro`
// as described in `SyntaxContext::apply_mark`, so we ignore prepended modern marks.
let mut self_type_rib = Rib::new(NormalRibKind);
// Plain insert (no renaming, since types are not currently hygienic)
- self_type_rib.bindings.insert(keywords::SelfUpper.ident(), self_res);
+ self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res);
self.ribs[TypeNS].push(self_type_rib);
f(self);
self.ribs[TypeNS].pop();
{
let self_res = Res::SelfCtor(impl_id);
let mut self_type_rib = Rib::new(NormalRibKind);
- self_type_rib.bindings.insert(keywords::SelfUpper.ident(), self_res);
+ self_type_rib.bindings.insert(Ident::with_empty_ctxt(kw::SelfUpper), self_res);
self.ribs[ValueNS].push(self_type_rib);
f(self);
self.ribs[ValueNS].pop();
}
None => {
// A completely fresh binding, add to the lists if it's valid.
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
bindings.insert(ident, outer_pat_id);
self.ribs[ValueNS].last_mut().unwrap().bindings.insert(ident, res);
}
}
fn self_type_is_available(&mut self, span: Span) -> bool {
- let binding = self.resolve_ident_in_lexical_scope(keywords::SelfUpper.ident(),
+ let binding = self.resolve_ident_in_lexical_scope(Ident::with_empty_ctxt(kw::SelfUpper),
TypeNS, None, span);
if let Some(LexicalScopeBinding::Res(res)) = binding { res != Res::Err } else { false }
}
fn self_value_is_available(&mut self, self_span: Span, path_span: Span) -> bool {
- let ident = Ident::new(keywords::SelfLower.name(), self_span);
+ let ident = Ident::new(kw::SelfLower, self_span);
let binding = self.resolve_ident_in_lexical_scope(ident, ValueNS, None, path_span);
if let Some(LexicalScopeBinding::Res(res)) = binding { res != Res::Err } else { false }
}
};
if path.len() > 1 && !global_by_default && result.base_res() != Res::Err &&
- path[0].ident.name != keywords::PathRoot.name() &&
- path[0].ident.name != keywords::DollarCrate.name() {
+ path[0].ident.name != kw::PathRoot &&
+ path[0].ident.name != kw::DollarCrate {
let unqualified_result = {
match self.resolve_path_without_parent_scope(
&[*path.last().unwrap()],
let name = ident.name;
allow_super &= ns == TypeNS &&
- (name == keywords::SelfLower.name() ||
- name == keywords::Super.name());
+ (name == kw::SelfLower ||
+ name == kw::Super);
if ns == TypeNS {
- if allow_super && name == keywords::Super.name() {
+ if allow_super && name == kw::Super {
let mut ctxt = ident.span.ctxt().modern();
let self_module = match i {
0 => Some(self.resolve_self(&mut ctxt, self.current_module)),
};
}
if i == 0 {
- if name == keywords::SelfLower.name() {
+ if name == kw::SelfLower {
let mut ctxt = ident.span.ctxt().modern();
module = Some(ModuleOrUniformRoot::Module(
self.resolve_self(&mut ctxt, self.current_module)));
continue;
}
- if name == keywords::PathRoot.name() && ident.span.rust_2018() {
+ if name == kw::PathRoot && ident.span.rust_2018() {
module = Some(ModuleOrUniformRoot::ExternPrelude);
continue;
}
- if name == keywords::PathRoot.name() &&
+ if name == kw::PathRoot &&
ident.span.rust_2015() && self.session.rust_2018() {
// `::a::b` from 2015 macro on 2018 global edition
module = Some(ModuleOrUniformRoot::CrateRootAndExternPrelude);
continue;
}
- if name == keywords::PathRoot.name() ||
- name == keywords::Crate.name() ||
- name == keywords::DollarCrate.name() {
+ if name == kw::PathRoot ||
+ name == kw::Crate ||
+ name == kw::DollarCrate {
// `::a::b`, `crate::a::b` or `$crate::a::b`
module = Some(ModuleOrUniformRoot::Module(
self.resolve_crate_root(ident)));
// Report special messages for path segment keywords in wrong positions.
if ident.is_path_segment_keyword() && i != 0 {
- let name_str = if name == keywords::PathRoot.name() {
+ let name_str = if name == kw::PathRoot {
"crate root".to_string()
} else {
format!("`{}`", name)
};
- let label = if i == 1 && path[0].ident.name == keywords::PathRoot.name() {
+ let label = if i == 1 && path[0].ident.name == kw::PathRoot {
format!("global paths cannot start with {}", name_str)
} else {
format!("{} in paths can only be used in start position", name_str)
// We're only interested in `use` paths which should start with
// `{{root}}` currently.
- if first_name != keywords::PathRoot.name() {
+ if first_name != kw::PathRoot {
return
}
match path.get(1) {
// If this import looks like `crate::...` it's already good
- Some(Segment { ident, .. }) if ident.name == keywords::Crate.name() => return,
+ Some(Segment { ident, .. }) if ident.name == kw::Crate => return,
// Otherwise go below to see if it's an extern crate
Some(_) => {}
// If the path has length one (and it's `PathRoot` most likely)
{
let mut candidates = Vec::new();
let mut seen_modules = FxHashSet::default();
- let not_local_module = crate_name != keywords::Crate.ident();
+ let not_local_module = crate_name.name != kw::Crate;
let mut worklist = vec![(start_module, Vec::<ast::PathSegment>::new(), not_local_module)];
while let Some((in_module,
where FilterFn: Fn(Res) -> bool
{
let mut suggestions = self.lookup_import_candidates_from_module(
- lookup_ident, namespace, self.graph_root, keywords::Crate.ident(), &filter_fn);
+ lookup_ident, namespace, self.graph_root, Ident::with_empty_ctxt(kw::Crate), &filter_fn
+ );
if lookup_ident.span.rust_2018() {
let extern_prelude_names = self.extern_prelude.clone();
} else {
let ctxt = ident.span.ctxt();
Some(Segment::from_ident(Ident::new(
- keywords::PathRoot.name(), path.span.shrink_to_lo().with_ctxt(ctxt)
+ kw::PathRoot, path.span.shrink_to_lo().with_ctxt(ctxt)
)))
};
}
fn is_self_type(path: &[Segment], namespace: Namespace) -> bool {
- namespace == TypeNS && path.len() == 1 && path[0].ident.name == keywords::SelfUpper.name()
+ namespace == TypeNS && path.len() == 1 && path[0].ident.name == kw::SelfUpper
}
fn is_self_value(path: &[Segment], namespace: Namespace) -> bool {
- namespace == ValueNS && path.len() == 1 && path[0].ident.name == keywords::SelfLower.name()
+ namespace == ValueNS && path.len() == 1 && path[0].ident.name == kw::SelfLower
}
fn names_to_string(idents: &[Ident]) -> String {
let mut result = String::new();
for (i, ident) in idents.iter()
- .filter(|ident| ident.name != keywords::PathRoot.name())
+ .filter(|ident| ident.name != kw::PathRoot)
.enumerate() {
if i > 0 {
result.push_str("::");
use syntax::feature_gate::{
feature_err, is_builtin_attr_name, AttributeGate, GateIssue, Stability, BUILTIN_ATTRIBUTES,
};
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::visit::Visitor;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{Span, DUMMY_SP};
}
impl<'a> Visitor<'a> for ResolveDollarCrates<'a, '_> {
fn visit_ident(&mut self, ident: Ident) {
- if ident.name == keywords::DollarCrate.name() {
+ if ident.name == kw::DollarCrate {
let name = match self.resolver.resolve_crate_root(ident).kind {
- ModuleKind::Def(.., name) if name != keywords::Invalid.name() => name,
- _ => keywords::Crate.name(),
+ ModuleKind::Def(.., name) if name != kw::Invalid => name,
+ _ => kw::Crate,
};
ident.span.ctxt().set_dollar_crate_name(name);
}
if kind == MacroKind::Bang && path.len() == 1 &&
path[0].ident.span.ctxt().outer().expn_info()
.map_or(false, |info| info.local_inner_macros) {
- let root = Ident::new(keywords::DollarCrate.name(), path[0].ident.span);
+ let root = Ident::new(kw::DollarCrate, path[0].ident.span);
path.insert(0, Segment::from_ident(root));
}
_ => Err(Determinacy::Determined),
}
WhereToResolve::CrateRoot => {
- let root_ident = Ident::new(keywords::PathRoot.name(), orig_ident.span);
+ let root_ident = Ident::new(kw::PathRoot, orig_ident.span);
let root_module = self.resolve_crate_root(root_ident);
let binding = self.resolve_ident_in_module_ext(
ModuleOrUniformRoot::Module(root_module),
use syntax::ast::{self, Ident, Name, NodeId, CRATE_NODE_ID};
use syntax::ext::base::Determinacy::{self, Determined, Undetermined};
use syntax::ext::hygiene::Mark;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::util::lev_distance::find_best_match_for_name;
use syntax::{struct_span_err, unwrap_or};
use syntax_pos::{MultiSpan, Span};
parent_scope.expect("no parent scope for a single-segment import");
if ns == TypeNS {
- if ident.name == keywords::Crate.name() ||
- ident.name == keywords::DollarCrate.name() {
+ if ident.name == kw::Crate ||
+ ident.name == kw::DollarCrate {
let module = self.resolve_crate_root(ident);
let binding = (module, ty::Visibility::Public,
module.span, Mark::root())
.to_name_binding(self.arenas);
return Ok(binding);
- } else if ident.name == keywords::Super.name() ||
- ident.name == keywords::SelfLower.name() {
+ } else if ident.name == kw::Super ||
+ ident.name == kw::SelfLower {
// FIXME: Implement these with renaming requirements so that e.g.
// `use super;` doesn't work, but `use super as name;` does.
// Fall through here to get an error from `early_resolve_...`.
has_errors = true;
if let SingleImport { source, ref source_bindings, .. } = import.subclass {
- if source.name == keywords::SelfLower.name() {
+ if source.name == kw::SelfLower {
// Silence `unresolved import` error if E0429 is already emitted
if let Err(Determined) = source_bindings.value_ns.get() {
continue;
// HACK(eddyb) `lint_if_path_starts_with_module` needs at least
// 2 segments, so the `resolve_path` above won't trigger it.
let mut full_path = directive.module_path.clone();
- full_path.push(Segment::from_ident(keywords::Invalid.ident()));
+ full_path.push(Segment::from_ident(Ident::invalid()));
self.lint_if_path_starts_with_module(
directive.crate_lint(),
&full_path,
subclass: &ImportDirectiveSubclass<'_>,
span: Span) -> String {
let pos = names.iter()
- .position(|p| span == p.span && p.name != keywords::PathRoot.name());
- let global = !names.is_empty() && names[0].name == keywords::PathRoot.name();
+ .position(|p| span == p.span && p.name != kw::PathRoot);
+ let global = !names.is_empty() && names[0].name == kw::PathRoot;
if let Some(pos) = pos {
let names = if global { &names[1..pos + 1] } else { &names[..pos + 1] };
names_to_string(names)
use rustc::ty::{self, DefIdTree, TyCtxt};
use rustc::{bug, span_bug};
use rustc_codegen_utils::link::{filename_for_metadata, out_filename};
-use rustc_data_structures::sync::Lrc;
use std::cell::Cell;
use std::default::Default;
let mut result = Vec::with_capacity(self.tcx.crates().len());
for &n in self.tcx.crates().iter() {
- let span = match *self.tcx.extern_crate(n.as_def_id()) {
- Some(ExternCrate { span, .. }) => span,
+ let span = match self.tcx.extern_crate(n.as_def_id()) {
+ Some(&ExternCrate { span, .. }) => span,
None => {
debug!("Skipping crate {}, no data", n);
continue;
// fallback in case the access levels couldn't have been correctly computed.
let access_levels = match tcx.sess.compile_status() {
Ok(..) => tcx.privacy_access_levels(LOCAL_CRATE),
- Err(..) => Lrc::new(AccessLevels::default()),
+ Err(..) => tcx.arena.alloc(AccessLevels::default()),
};
let save_ctxt = SaveContext {
use rustc::ty::subst::{Kind, Subst, InternalSubsts, SubstsRef};
use rustc::ty::wf::object_region_bounds;
use rustc::mir::interpret::ConstValue;
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi;
use crate::require_c_abi_if_c_variadic;
use smallvec::SmallVec;
/// Returns the set of bounds in scope for the type parameter with
/// the given id.
fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>>;
+ -> &'tcx ty::GenericPredicates<'tcx>;
/// What lifetime should we use when a lifetime is omitted (and not elided)?
fn re_infer(&self, span: Span, _def: Option<&ty::GenericParamDef>)
err.help(&format!("did you mean `{}: &{}`?", snippet, expected));
}
}
- hir::Node::Expr(hir::Expr { node: hir::ExprKind::Match(..), .. }) |
+ hir::Node::Arm(_) |
hir::Node::Pat(_) => {
// rely on match ergonomics or it might be nested `&&pat`
if let Ok(snippet) = tcx.sess.source_map().span_to_snippet(inner.span) {
fn maybe_get_coercion_reason(&self, hir_id: hir::HirId, span: Span) -> Option<(Span, String)> {
use hir::Node::{Block, Item, Local};
- let node = self.tcx.hir().get_by_hir_id(self.tcx.hir().get_parent_node_by_hir_id(
- self.tcx.hir().get_parent_node_by_hir_id(hir_id),
- ));
+ let hir = self.tcx.hir();
+ let arm_id = hir.get_parent_node_by_hir_id(hir_id);
+ let match_id = hir.get_parent_node_by_hir_id(arm_id);
+ let containing_id = hir.get_parent_node_by_hir_id(match_id);
+
+ let node = hir.get_by_hir_id(containing_id);
if let Block(block) = node {
// check that the body's parent is an fn
- let parent = self.tcx.hir().get_by_hir_id(
- self.tcx.hir().get_parent_node_by_hir_id(
- self.tcx.hir().get_parent_node_by_hir_id(block.hir_id),
+ let parent = hir.get_by_hir_id(
+ hir.get_parent_node_by_hir_id(
+ hir.get_parent_node_by_hir_id(block.hir_id),
),
);
if let (Some(expr), Item(hir::Item {
use crate::namespace::Namespace;
use crate::util::nodemap::FxHashSet;
use errors::{Applicability, DiagnosticBuilder};
-use rustc_data_structures::sync::Lrc;
use rustc::hir::{self, ExprKind, Node, QPath};
use rustc::hir::def::{Res, DefKind};
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
pub fn provide(providers: &mut ty::query::Providers<'_>) {
providers.all_traits = |tcx, cnum| {
assert_eq!(cnum, LOCAL_CRATE);
- Lrc::new(compute_all_traits(tcx))
+ &tcx.arena.alloc(compute_all_traits(tcx))[..]
}
}
use rustc::infer::{self, InferCtxt, InferOk, InferResult};
use rustc::infer::canonical::{Canonical, OriginalQueryValues, QueryResponse};
use rustc_data_structures::indexed_vec::Idx;
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi::Abi;
use rustc::infer::opaque_types::OpaqueTypeDecl;
use rustc::infer::type_variable::{TypeVariableOrigin};
use syntax::feature_gate::{GateIssue, emit_feature_err};
use syntax::ptr::P;
use syntax::source_map::{DUMMY_SP, original_sp};
-use syntax::symbol::{Symbol, LocalInternedString, keywords, sym};
+use syntax::symbol::{Symbol, LocalInternedString, kw, sym};
use syntax::util::lev_distance::find_best_match_for_name;
use std::cell::{Cell, RefCell, Ref, RefMut};
fn used_trait_imports<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
- -> Lrc<DefIdSet> {
- tcx.typeck_tables_of(def_id).used_trait_imports.clone()
+ -> &'tcx DefIdSet {
+ &*tcx.typeck_tables_of(def_id).used_trait_imports
}
fn typeck_tables_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> { self.tcx }
fn get_type_parameter_bounds(&self, _: Span, def_id: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>>
+ -> &'tcx ty::GenericPredicates<'tcx>
{
let tcx = self.tcx;
let hir_id = tcx.hir().as_local_hir_id(def_id).unwrap();
let item_def_id = tcx.hir().local_def_id_from_hir_id(item_id);
let generics = tcx.generics_of(item_def_id);
let index = generics.param_def_id_to_index[&def_id];
- Lrc::new(ty::GenericPredicates {
+ tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: self.param_env.caller_bounds.iter().filter_map(|&predicate| {
match predicate {
Ok(method)
}
Err(error) => {
- if segment.ident.name != keywords::Invalid.name() {
+ if segment.ident.name != kw::Invalid {
self.report_method_error(span,
rcvr_t,
segment.ident,
}
err.emit();
field_ty
- } else if field.name == keywords::Invalid.name() {
+ } else if field.name == kw::Invalid {
self.tcx().types.err
} else if self.method_exists(field, expr_t, expr.hir_id, true) {
let mut err = type_error_struct!(self.tcx().sess, field.span, expr_t, E0615,
method::MethodError::PrivateMatch(kind, def_id, _) => Ok((kind, def_id)),
_ => Err(ErrorReported),
};
- if item_name.name != keywords::Invalid.name() {
+ if item_name.name != kw::Invalid {
self.report_method_error(
span,
ty,
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ty::{self, CrateInherentImpls, TyCtxt};
-use rustc_data_structures::sync::Lrc;
use syntax::ast;
use syntax_pos::Span;
/// On-demand query: yields a map containing all types mapped to their inherent impls.
pub fn crate_inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
crate_num: CrateNum)
- -> Lrc<CrateInherentImpls> {
+ -> &'tcx CrateInherentImpls {
assert_eq!(crate_num, LOCAL_CRATE);
let krate = tcx.hir().krate();
impls_map: Default::default(),
};
krate.visit_all_item_likes(&mut collect);
- Lrc::new(collect.impls_map)
+ tcx.arena.alloc(collect.impls_map)
}
/// On-demand query: yields a vector of the inherent impls for a specific type.
pub fn inherent_impls<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty_def_id: DefId)
- -> Lrc<Vec<DefId>> {
+ -> &'tcx [DefId] {
assert!(ty_def_id.is_local());
// NB. Until we adopt the red-green dep-tracking algorithm (see
//
// [the plan]: https://github.com/rust-lang/rust-roadmap/issues/4
- thread_local! {
- static EMPTY_DEF_ID_VEC: Lrc<Vec<DefId>> = Lrc::new(vec![])
- }
-
let result = tcx.dep_graph.with_ignore(|| {
let crate_map = tcx.crate_inherent_impls(ty_def_id.krate);
match crate_map.inherent_impls.get(&ty_def_id) {
- Some(v) => v.clone(),
- None => EMPTY_DEF_ID_VEC.with(|v| v.clone())
+ Some(v) => &v[..],
+ None => &[],
}
});
// type def ID, if there is a base type for this implementation and
// the implementation does not have any associated traits.
let impl_def_id = self.tcx.hir().local_def_id_from_hir_id(item.hir_id);
- let mut rc_vec = self.impls_map.inherent_impls
- .entry(def_id)
- .or_default();
-
- // At this point, there should not be any clones of the
- // `Lrc`, so we can still safely push into it in place:
- Lrc::get_mut(&mut rc_vec).unwrap().push(impl_def_id);
+ let vec = self.impls_map.inherent_impls.entry(def_id).or_default();
+ vec.push(impl_def_id);
} else {
struct_span_err!(self.tcx.sess,
item.span,
use rustc::ty::{ReprOptions, ToPredicate};
use rustc::util::captures::Captures;
use rustc::util::nodemap::FxHashMap;
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi;
use syntax::ast;
use syntax::attr::{InlineAttr, OptimizeAttr, list_contains_name, mark_used};
use syntax::source_map::Spanned;
use syntax::feature_gate;
-use syntax::symbol::{InternedString, keywords, Symbol, sym};
+use syntax::symbol::{InternedString, kw, Symbol, sym};
use syntax_pos::{Span, DUMMY_SP};
use rustc::hir::def::{CtorKind, Res, DefKind};
}
fn get_type_parameter_bounds(&self, span: Span, def_id: DefId)
- -> Lrc<ty::GenericPredicates<'tcx>> {
+ -> &'tcx ty::GenericPredicates<'tcx> {
self.tcx
.at(span)
.type_param_predicates((self.item_def_id, def_id))
fn type_param_predicates<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
(item_def_id, def_id): (DefId, DefId),
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
use rustc::hir::*;
// In the AST, bounds can derive from two places. Either
tcx.generics_of(item_def_id).parent
};
- let mut result = parent.map_or_else(
- || Lrc::new(ty::GenericPredicates {
- parent: None,
- predicates: vec![],
- }),
- |parent| {
- let icx = ItemCtxt::new(tcx, parent);
- icx.get_type_parameter_bounds(DUMMY_SP, def_id)
- },
- );
+ let result = parent.map_or(&tcx.common.empty_predicates, |parent| {
+ let icx = ItemCtxt::new(tcx, parent);
+ icx.get_type_parameter_bounds(DUMMY_SP, def_id)
+ });
+ let mut extend = None;
let item_hir_id = tcx.hir().as_local_hir_id(item_def_id).unwrap();
let ast_generics = match tcx.hir().get_by_hir_id(item_hir_id) {
// Implied `Self: Trait` and supertrait bounds.
if param_id == item_hir_id {
let identity_trait_ref = ty::TraitRef::identity(tcx, item_def_id);
- Lrc::make_mut(&mut result)
- .predicates
- .push((identity_trait_ref.to_predicate(), item.span));
+ extend = Some((identity_trait_ref.to_predicate(), item.span));
}
generics
}
};
let icx = ItemCtxt::new(tcx, item_def_id);
- Lrc::make_mut(&mut result)
- .predicates
- .extend(icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty,
- OnlySelfBounds(true)));
- result
+ let mut result = (*result).clone();
+ result.predicates.extend(extend.into_iter());
+ result.predicates
+ .extend(icx.type_parameter_bounds_in_generics(ast_generics, param_id, ty,
+ OnlySelfBounds(true)));
+ tcx.arena.alloc(result)
}
impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
fn super_predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
trait_def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
debug!("super_predicates(trait_def_id={:?})", trait_def_id);
let trait_hir_id = tcx.hir().as_local_hir_id(trait_def_id).unwrap();
}
}
- Lrc::new(ty::GenericPredicates {
+ tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: superbounds,
})
opt_self = Some(ty::GenericParamDef {
index: 0,
- name: keywords::SelfUpper.name().as_interned_str(),
+ name: kw::SelfUpper.as_interned_str(),
def_id: tcx.hir().local_def_id_from_hir_id(param_id),
pure_wrt_drop: false,
kind: ty::GenericParamDefKind::Type {
synthetic,
..
} => {
- if param.name.ident().name == keywords::SelfUpper.name() {
+ if param.name.ident().name == kw::SelfUpper {
span_bug!(
param.span,
"`Self` should not be the name of a regular parameter"
}
}
GenericParamKind::Const { .. } => {
- if param.name.ident().name == keywords::SelfUpper.name() {
+ if param.name.ident().name == kw::SelfUpper {
span_bug!(
param.span,
"`Self` should not be the name of a regular parameter",
fn predicates_defined_on<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
debug!("predicates_defined_on({:?})", def_id);
let mut result = tcx.explicit_predicates_of(def_id);
debug!(
def_id,
inferred_outlives,
);
- Lrc::make_mut(&mut result)
- .predicates
- .extend(inferred_outlives.iter().map(|&p| (p, span)));
+ let mut predicates = (*result).clone();
+ predicates.predicates.extend(inferred_outlives.iter().map(|&p| (p, span)));
+ result = tcx.arena.alloc(predicates);
}
debug!("predicates_defined_on({:?}) = {:?}", def_id, result);
result
fn predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
let mut result = tcx.predicates_defined_on(def_id);
if tcx.is_trait(def_id) {
// used, and adding the predicate into this list ensures
// that this is done.
let span = tcx.def_span(def_id);
- Lrc::make_mut(&mut result)
- .predicates
- .push((ty::TraitRef::identity(tcx, def_id).to_predicate(), span));
+ let mut predicates = (*result).clone();
+ predicates.predicates.push((ty::TraitRef::identity(tcx, def_id).to_predicate(), span));
+ result = tcx.arena.alloc(predicates);
}
debug!("predicates_of(def_id={:?}) = {:?}", def_id, result);
result
fn explicit_predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
-) -> Lrc<ty::GenericPredicates<'tcx>> {
+) -> &'tcx ty::GenericPredicates<'tcx> {
use rustc::hir::*;
use rustc_data_structures::fx::FxHashSet;
if impl_trait_fn.is_some() {
// impl Trait
- return Lrc::new(ty::GenericPredicates {
+ return tcx.arena.alloc(ty::GenericPredicates {
parent: None,
predicates: bounds.predicates(tcx, opaque_ty),
});
);
}
- let result = Lrc::new(ty::GenericPredicates {
+ let result = tcx.arena.alloc(ty::GenericPredicates {
parent: generics.parent,
predicates,
});
use rustc::ty::query::Providers;
use rustc::ty::subst::UnpackedKind;
use rustc::ty::{self, CratePredicatesMap, TyCtxt};
-use rustc_data_structures::sync::Lrc;
use syntax::symbol::sym;
mod explicit;
fn inferred_outlives_crate<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
crate_num: CrateNum,
-) -> Lrc<CratePredicatesMap<'tcx>> {
+) -> &'tcx CratePredicatesMap<'tcx> {
assert_eq!(crate_num, LOCAL_CRATE);
// Compute a map from each struct/enum/union S to the **explicit**
(def_id, &*predicates)
}).collect();
- Lrc::new(ty::CratePredicatesMap {
+ tcx.arena.alloc(ty::CratePredicatesMap {
predicates,
})
}
use rustc::hir::def_id::{CrateNum, DefId, LOCAL_CRATE};
use rustc::ty::{self, CrateVariancesMap, TyCtxt};
use rustc::ty::query::Providers;
-use rustc_data_structures::sync::Lrc;
/// Defines the `TermsContext` basically houses an arena where we can
/// allocate terms.
}
fn crate_variances<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, crate_num: CrateNum)
- -> Lrc<CrateVariancesMap<'tcx>> {
+ -> &'tcx CrateVariancesMap<'tcx> {
assert_eq!(crate_num, LOCAL_CRATE);
let mut arena = arena::TypedArena::default();
let terms_cx = terms::determine_parameters_to_be_inferred(tcx, &mut arena);
let constraints_cx = constraints::add_constraints_from_crate(terms_cx);
- Lrc::new(solve::solve_constraints(constraints_cx))
+ tcx.arena.alloc(solve::solve_constraints(constraints_cx))
}
fn variances_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, item_def_id: DefId)
// Instead, we generate `impl !Send for Foo<T>`, which better
// expresses the fact that `Foo<T>` never implements `Send`,
// regardless of the choice of `T`.
- let params = (self.cx.tcx.generics_of(param_env_def_id), &Default::default())
- .clean(self.cx).params;
+ let params = (
+ self.cx.tcx.generics_of(param_env_def_id),
+ &&self.cx.tcx.common.empty_predicates,
+ ).clean(self.cx).params;
Generics {
params,
mod blanket_impl;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc_data_structures::sync::Lrc;
use rustc_target::spec::abi::Abi;
use rustc_typeck::hir_ty_to_ty;
use rustc::infer::region_constraints::{RegionConstraintData, Constraint};
use syntax::ext::base::MacroKind;
use syntax::source_map::{dummy_spanned, Spanned};
use syntax::ptr::P;
-use syntax::symbol::keywords::{self, Keyword};
-use syntax::symbol::{Symbol, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::symbol::InternedString;
use syntax_pos::{self, Pos, FileName};
use std::{mem, slice, vec};
use std::iter::{FromIterator, once};
use std::rc::Rc;
-use std::str::FromStr;
use std::cell::RefCell;
use std::sync::Arc;
use std::u32;
for attr in attrs.lists(sym::doc) {
if let Some(v) = attr.value_str() {
if attr.check_name(sym::keyword) {
- keyword = Keyword::from_str(&v.as_str()).ok()
- .map(|x| x.name().to_string());
- if keyword.is_some() {
- break
+ if v.is_doc_keyword() {
+ keyword = Some(v.to_string());
+ break;
}
// FIXME: should warn on unknown keywords?
}
}
impl<'a, 'tcx> Clean<Generics> for (&'a ty::Generics,
- &'a Lrc<ty::GenericPredicates<'tcx>>) {
+ &'a &'tcx ty::GenericPredicates<'tcx>) {
fn clean(&self, cx: &DocContext<'_>) -> Generics {
use self::WherePredicate as WP;
let stripped_typarams = gens.params.iter().filter_map(|param| match param.kind {
ty::GenericParamDefKind::Lifetime => None,
ty::GenericParamDefKind::Type { .. } => {
- if param.name.as_symbol() == keywords::SelfUpper.name() {
+ if param.name.as_symbol() == kw::SelfUpper {
assert_eq!(param.index, 0);
return None;
}
if i > 0 {
s.push_str("::");
}
- if seg.ident.name != keywords::PathRoot.name() {
+ if seg.ident.name != kw::PathRoot {
s.push_str(&*seg.ident.as_str());
}
}
hir::Float(float_ty) => return Primitive(float_ty.into()),
},
Res::SelfTy(..) if path.segments.len() == 1 => {
- return Generic(keywords::SelfUpper.name().to_string());
+ return Generic(kw::SelfUpper.to_string());
}
Res::Def(DefKind::TyParam, _) if path.segments.len() == 1 => {
return Generic(format!("{:#}", path));
loop {
let segment = path_it.next()?;
- for item in mem::replace(&mut items, Lrc::new(vec![])).iter() {
+ for item in mem::replace(&mut items, &[]).iter() {
if item.ident.name == *segment {
if path_it.peek().is_none() {
return match item.res {
use syntax::parse::lexer::{self, TokenAndSpan};
use syntax::parse::token;
use syntax::parse;
+use syntax::symbol::{kw, sym};
use syntax_pos::{Span, FileName};
/// Highlights `src`, returning the HTML output.
}
}
- token::Literal(lit, _suf) => {
- match lit {
+ token::Literal(lit) => {
+ match lit.kind {
// Text literals.
- token::Byte(..) | token::Char(..) | token::Err(..) |
- token::ByteStr(..) | token::ByteStrRaw(..) |
- token::Str_(..) | token::StrRaw(..) => Class::String,
+ token::Byte | token::Char | token::Err |
+ token::ByteStr | token::ByteStrRaw(..) |
+ token::Str | token::StrRaw(..) => Class::String,
// Number literals.
- token::Integer(..) | token::Float(..) => Class::Number,
+ token::Integer | token::Float => Class::Number,
- token::Bool(..) => panic!("literal token contains `Lit::Bool`"),
+ token::Bool => panic!("literal token contains `Lit::Bool`"),
}
}
// Keywords are also included in the identifier set.
token::Ident(ident, is_raw) => {
- match &*ident.as_str() {
- "ref" | "mut" if !is_raw => Class::RefKeyWord,
+ match ident.name {
+ kw::Ref | kw::Mut if !is_raw => Class::RefKeyWord,
- "self" | "Self" => Class::Self_,
- "false" | "true" if !is_raw => Class::Bool,
+ kw::SelfLower | kw::SelfUpper => Class::Self_,
+ kw::False | kw::True if !is_raw => Class::Bool,
- "Option" | "Result" => Class::PreludeTy,
- "Some" | "None" | "Ok" | "Err" => Class::PreludeVal,
+ sym::Option | sym::Result => Class::PreludeTy,
+ sym::Some | sym::None | sym::Ok | sym::Err => Class::PreludeVal,
- "$crate" => Class::KeyWord,
_ if tas.tok.is_reserved_ident() => Class::KeyWord,
_ => {
panic_abort = { path = "../libpanic_abort" }
core = { path = "../libcore" }
libc = { version = "0.2.51", default-features = false, features = ['rustc-dep-of-std'] }
-compiler_builtins = { version = "0.1.14" }
+compiler_builtins = { version = "0.1.15" }
profiler_builtins = { path = "../libprofiler_builtins", optional = true }
unwind = { path = "../libunwind" }
hashbrown = { version = "0.3.0", features = ['rustc-dep-of-std'] }
cc = "1.0"
[features]
-default = ["compiler_builtins_c", "std_detect_file_io", "std_detect_dlsym_getauxval"]
+default = ["std_detect_file_io", "std_detect_dlsym_getauxval"]
backtrace = ["backtrace-sys"]
panic-unwind = ["panic_unwind"]
profiler = ["profiler_builtins"]
-compiler_builtins_c = ["alloc/compiler-builtins-c"]
+compiler-builtins-c = ["alloc/compiler-builtins-c"]
llvm-libunwind = ["unwind/llvm-libunwind"]
# Make panics and failed asserts immediately abort without formatting any message
self.inner.next().map(|s| s.into_string().unwrap())
}
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
- #[inline]
- fn last(mut self) -> Option<String> {
- self.next_back()
- }
}
#[stable(feature = "env", since = "1.0.0")]
type Item = OsString;
fn next(&mut self) -> Option<OsString> { self.inner.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
- #[inline]
- fn last(mut self) -> Option<OsString> { self.next_back() }
}
#[stable(feature = "env", since = "1.0.0")]
fn next(&mut self) -> Option<&'a OsStr> {
self.inner.next().map(Component::as_os_str)
}
-
- #[inline]
- fn last(mut self) -> Option<&'a OsStr> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
}
None
}
-
- #[inline]
- fn last(mut self) -> Option<Self::Item> {
- self.next_back()
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
type Item = OsString;
fn next(&mut self) -> Option<OsString> { self.iter.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.iter.size_hint() }
- #[inline]
- fn last(mut self) -> Option<OsString> { self.next_back() }
}
impl ExactSizeIterator for Args {
fn size_hint(&self) -> (usize, Option<usize>) {
self.iter.size_hint()
}
- #[inline]
- fn last(mut self) -> Option<OsString> {
- self.next_back()
- }
}
impl ExactSizeIterator for Args {
type Item = OsString;
fn next(&mut self) -> Option<OsString> { self.parsed_args_list.next() }
fn size_hint(&self) -> (usize, Option<usize>) { self.parsed_args_list.size_hint() }
- #[inline]
- fn last(mut self) -> Option<OsString> { self.next_back() }
}
impl DoubleEndedIterator for Args {
use crate::print::pprust;
use crate::ptr::P;
use crate::source_map::{dummy_spanned, respan, Spanned};
-use crate::symbol::{keywords, Symbol};
+use crate::symbol::{kw, Symbol};
use crate::tokenstream::TokenStream;
use crate::ThinVec;
pub struct Path {
pub span: Span,
/// The segments in the path: the things separated by `::`.
- /// Global paths begin with `keywords::PathRoot`.
+ /// Global paths begin with `kw::PathRoot`.
pub segments: Vec<PathSegment>,
}
}
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].ident.name == keywords::PathRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == kw::PathRoot
}
}
PathSegment { ident, id: DUMMY_NODE_ID, args: None }
}
pub fn path_root(span: Span) -> Self {
- PathSegment::from_ident(Ident::new(keywords::PathRoot.name(), span))
+ PathSegment::from_ident(Ident::new(kw::PathRoot, span))
}
}
pub pats: Vec<P<Pat>>,
pub guard: Option<Guard>,
pub body: P<Expr>,
+ pub span: Span,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Lit {
/// The original literal token as written in source code.
pub token: token::Lit,
- /// The original literal suffix as written in source code.
- pub suffix: Option<Symbol>,
/// The "semantic" representation of the literal lowered from the original tokens.
/// Strings are unescaped, hexadecimal forms are eliminated, etc.
/// FIXME: Remove this and only create the semantic representation during lowering to HIR.
impl Arg {
pub fn to_self(&self) -> Option<ExplicitSelf> {
if let PatKind::Ident(BindingMode::ByValue(mutbl), ident, _) = self.pat.node {
- if ident.name == keywords::SelfLower.name() {
+ if ident.name == kw::SelfLower {
return match self.ty.node {
TyKind::ImplicitSelf => Some(respan(self.pat.span, SelfKind::Value(mutbl))),
TyKind::Rptr(lt, MutTy { ref ty, mutbl }) if ty.node.is_implicit_self() => {
pub fn is_self(&self) -> bool {
if let PatKind::Ident(_, ident, _) = self.pat.node {
- ident.name == keywords::SelfLower.name()
+ ident.name == kw::SelfLower
} else {
false
}
use crate::parse::{self, ParseSess, PResult};
use crate::parse::token::{self, Token};
use crate::ptr::P;
-use crate::symbol::{keywords, Symbol, sym};
+use crate::symbol::{sym, Symbol};
use crate::ThinVec;
use crate::tokenstream::{TokenStream, TokenTree, DelimSpan};
use crate::GLOBALS;
self.meta_item().and_then(|meta_item| meta_item.ident())
}
pub fn name_or_empty(&self) -> Symbol {
- self.ident().unwrap_or(keywords::Invalid.ident()).name
+ self.ident().unwrap_or(Ident::invalid()).name
}
/// Gets the string value if self is a MetaItem and the MetaItem is a
}
}
pub fn name_or_empty(&self) -> Symbol {
- self.ident().unwrap_or(keywords::Invalid.ident()).name
+ self.ident().unwrap_or(Ident::invalid()).name
}
pub fn value_str(&self) -> Option<Symbol> {
}
}
pub fn name_or_empty(&self) -> Symbol {
- self.ident().unwrap_or(keywords::Invalid.ident()).name
+ self.ident().unwrap_or(Ident::invalid()).name
}
// #[attribute(name = "value")]
Some(TokenTree::Token(_, token::Eq)) => {
tokens.next();
return if let Some(TokenTree::Token(span, token)) = tokens.next() {
- Lit::from_token(&token, span, None).map(MetaItemKind::NameValue)
+ Lit::from_token(&token, span).ok().map(MetaItemKind::NameValue)
} else {
None
};
where I: Iterator<Item = TokenTree>,
{
if let Some(TokenTree::Token(span, token)) = tokens.peek().cloned() {
- if let Some(lit) = Lit::from_token(&token, span, None) {
+ if let Ok(lit) = Lit::from_token(&token, span) {
tokens.next();
return Some(NestedMetaItem::Literal(lit));
}
use crate::ext::build::AstBuilder;
use crate::parse::token;
use crate::ptr::P;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::tokenstream::{TokenTree};
use smallvec::smallvec;
},
(3, Some(&TokenTree::Token(_, token::Ident(ref code, _))),
Some(&TokenTree::Token(_, token::Comma)),
- Some(&TokenTree::Token(_, token::Literal(token::StrRaw(description, _), None)))) => {
- (code, Some(description))
+ Some(&TokenTree::Token(_, token::Literal(token::Lit { symbol, .. })))) => {
+ (code, Some(symbol))
}
_ => unreachable!()
};
(descriptions.len(), ecx.expr_vec(span, descriptions))
});
- let static_ = ecx.lifetime(span, keywords::StaticLifetime.ident());
+ let static_ = ecx.lifetime(span, Ident::with_empty_ctxt(kw::StaticLifetime));
let ty_str = ecx.ty_rptr(
span,
ecx.ty_ident(span, ecx.ident_of("str")),
use crate::parse::{self, parser, DirectoryOwnership};
use crate::parse::token;
use crate::ptr::P;
-use crate::symbol::{keywords, Ident, Symbol, sym};
+use crate::symbol::{kw, sym, Ident, Symbol};
use crate::ThinVec;
use crate::tokenstream::{self, TokenStream};
}
pub fn std_path(&self, components: &[&str]) -> Vec<ast::Ident> {
let def_site = DUMMY_SP.apply_mark(self.current_expansion.mark);
- iter::once(Ident::new(keywords::DollarCrate.name(), def_site))
+ iter::once(Ident::new(kw::DollarCrate, def_site))
.chain(components.iter().map(|s| self.ident_of(s)))
.collect()
}
use crate::source_map::{dummy_spanned, respan, Spanned};
use crate::ext::base::ExtCtxt;
use crate::ptr::P;
-use crate::symbol::{Symbol, keywords};
+use crate::symbol::{Symbol, kw};
use crate::ThinVec;
use rustc_target::spec::abi::Abi;
self.expr_path(self.path_ident(span, id))
}
fn expr_self(&self, span: Span) -> P<ast::Expr> {
- self.expr_ident(span, keywords::SelfLower.ident())
+ self.expr_ident(span, Ident::with_empty_ctxt(kw::SelfLower))
}
fn expr_binary(&self, sp: Span, op: ast::BinOpKind,
self.pat_tuple_struct(span, path, vec![pat])
}
- fn arm(&self, _span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm {
+ fn arm(&self, span: Span, pats: Vec<P<ast::Pat>>, expr: P<ast::Expr>) -> ast::Arm {
ast::Arm {
attrs: vec![],
pats,
guard: None,
body: expr,
+ span,
}
}
vis: ast::Visibility, vp: P<ast::UseTree>) -> P<ast::Item> {
P(ast::Item {
id: ast::DUMMY_NODE_ID,
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
attrs: vec![],
node: ast::ItemKind::Use(vp),
vis,
use crate::parse::parser::Parser;
use crate::ptr::P;
use crate::symbol::Symbol;
-use crate::symbol::{keywords, sym};
+use crate::symbol::{kw, sym};
use crate::tokenstream::{TokenStream, TokenTree};
use crate::visit::{self, Visitor};
use crate::util::map_in_place::MapInPlace;
if i != 0 {
path_str.push_str("::");
}
- if segment.ident.name != keywords::PathRoot.name() {
+ if segment.ident.name != kw::PathRoot {
path_str.push_str(&segment.ident.as_str())
}
}
attrs: krate.attrs,
span: krate.span,
node: ast::ItemKind::Mod(krate.module),
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
id: ast::DUMMY_NODE_ID,
vis: respan(krate.span.shrink_to_lo(), ast::VisibilityKind::Public),
tokens: None,
};
let path = &mac.node.path;
- let ident = ident.unwrap_or_else(|| keywords::Invalid.ident());
+ let ident = ident.unwrap_or_else(|| Ident::invalid());
let validate_and_set_expn_info = |this: &mut Self, // arg instead of capture
def_site_span: Option<Span>,
allow_internal_unstable,
}
}
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
let msg = format!("macro {}! expects no ident argument, given '{}'", path, ident);
this.cx.span_err(path.span, &msg);
this.cx.trace_macros_diag();
}
IdentTT { ref expander, span: tt_span, ref allow_internal_unstable } => {
- if ident.name == keywords::Invalid.name() {
+ if ident.name == kw::Invalid {
self.cx.span_err(path.span,
&format!("macro {}! expects an ident argument", path));
self.cx.trace_macros_diag();
}
SyntaxExtension::ProcMacro { ref expander, ref allow_internal_unstable, edition } => {
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
let msg =
format!("macro {}! expects no ident argument, given '{}'", path, ident);
self.cx.span_err(path.span, &msg);
invoc.expansion_data.mark.set_expn_info(expn_info);
let span = span.with_ctxt(self.cx.backtrace());
let dummy = ast::MetaItem { // FIXME(jseyfried) avoid this
- path: Path::from_ident(keywords::Invalid.ident()),
+ path: Path::from_ident(Ident::invalid()),
span: DUMMY_SP,
node: ast::MetaItemKind::Word,
};
})
}
ast::ItemKind::Mod(ast::Mod { inner, .. }) => {
- if item.ident == keywords::Invalid.ident() {
+ if item.ident == Ident::invalid() {
return noop_flat_map_item(item, self);
}
use crate::tokenstream::TokenStream;
use crate::mut_visit::*;
use crate::ptr::P;
-use crate::symbol::keywords;
use crate::ThinVec;
use smallvec::{smallvec, SmallVec};
})
}
- let ident = keywords::Invalid.ident();
+ let ident = ast::Ident::invalid();
let attrs = Vec::new();
let generics = ast::Generics::default();
let vis = dummy_spanned(ast::VisibilityKind::Inherited);
use crate::parse::parser::{Parser, PathStyle};
use crate::parse::token::{self, DocComment, Nonterminal, Token};
use crate::print::pprust;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::tokenstream::{DelimSpan, TokenStream};
use errors::FatalError;
TokenTree::Delimited(_, ref delim) => for next_m in &delim.tts {
n_rec(sess, next_m, res.by_ref(), ret_val)?;
},
- TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
+ TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
return Err((span, "missing fragment specifier".to_string()));
}
}
// We need to match a metavar (but the identifier is invalid)... this is an error
- TokenTree::MetaVarDecl(span, _, id) if id.name == keywords::Invalid.name() => {
+ TokenTree::MetaVarDecl(span, _, id) if id.name == kw::Invalid => {
if sess.missing_fragment_specifiers.borrow_mut().remove(&span) {
return Error(span, "missing fragment specifier".to_string());
}
/// We prohibit passing `_` to macros expecting `ident` for now.
fn get_macro_ident(token: &Token) -> Option<(Ident, bool)> {
match *token {
- token::Ident(ident, is_raw) if ident.name != keywords::Underscore.name() =>
+ token::Ident(ident, is_raw) if ident.name != kw::Underscore =>
Some((ident, is_raw)),
_ => None,
}
use crate::parse::parser::Parser;
use crate::parse::token::{self, NtTT};
use crate::parse::token::Token::*;
-use crate::symbol::{Symbol, keywords, sym};
+use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
use errors::FatalError;
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
FatArrow | Comma | Eq | BinOp(token::Or) => IsInFollow::Yes,
- Ident(i, false) if i.name == keywords::If.name() ||
- i.name == keywords::In.name() => IsInFollow::Yes,
+ Ident(i, false) if i.name == kw::If ||
+ i.name == kw::In => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
_ => IsInFollow::No(tokens),
OpenDelim(token::DelimToken::Bracket) |
Comma | FatArrow | Colon | Eq | Gt | BinOp(token::Shr) | Semi |
BinOp(token::Or) => IsInFollow::Yes,
- Ident(i, false) if i.name == keywords::As.name() ||
- i.name == keywords::Where.name() => IsInFollow::Yes,
+ Ident(i, false) if i.name == kw::As ||
+ i.name == kw::Where => IsInFollow::Yes,
_ => IsInFollow::No(tokens),
},
TokenTree::MetaVarDecl(_, _, frag) if frag.name == sym::block =>
match *tok {
TokenTree::Token(_, ref tok) => match *tok {
Comma => IsInFollow::Yes,
- Ident(i, is_raw) if is_raw || i.name != keywords::Priv.name() =>
+ Ident(i, is_raw) if is_raw || i.name != kw::Priv =>
IsInFollow::Yes,
ref tok => if tok.can_begin_type() {
IsInFollow::Yes
_ => IsInFollow::No(tokens),
}
},
- "" => IsInFollow::Yes, // keywords::Invalid
+ "" => IsInFollow::Yes, // kw::Invalid
_ => IsInFollow::Invalid(format!("invalid fragment specifier `{}`", frag),
VALID_FRAGMENT_NAMES_MSG),
}
use crate::print::pprust;
use crate::tokenstream::{self, DelimSpan};
use crate::ast;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use syntax_pos::{edition::Edition, BytePos, Span};
result.push(TokenTree::MetaVarDecl(
span,
ident,
- keywords::Invalid.ident(),
+ ast::Ident::invalid(),
));
}
Some(tokenstream::TokenTree::Token(ident_span, ref token)) if token.is_ident() => {
let (ident, is_raw) = token.ident().unwrap();
let span = ident_span.with_lo(span.lo());
- if ident.name == keywords::Crate.name() && !is_raw {
- let ident = ast::Ident::new(keywords::DollarCrate.name(), ident.span);
+ if ident.name == kw::Crate && !is_raw {
+ let ident = ast::Ident::new(kw::DollarCrate, ident.span);
TokenTree::Token(span, token::Ident(ident, is_raw))
} else {
TokenTree::MetaVar(span, ident)
pprust::token_to_string(&tok)
);
sess.span_diagnostic.span_err(span, &msg);
- TokenTree::MetaVar(span, keywords::Invalid.ident())
+ TokenTree::MetaVar(span, ast::Ident::invalid())
}
// There are no more tokens. Just return the `$` we already have.
}
LockstepIterSize::Contradiction(ref msg) => {
- // This should never happen because the macro parser should generate
- // properly-sized matches for all meta-vars.
- cx.span_bug(seq.span(), &msg[..]);
+ // FIXME: this really ought to be caught at macro definition time... It
+ // happens when two meta-variables are used in the same repetition in a
+ // sequence, but they come from different sequence matchers and repeat
+ // different amounts.
+ cx.span_fatal(seq.span(), &msg[..]);
}
LockstepIterSize::Constraint(len, _) => {
// Is the repetition empty?
if len == 0 {
if seq.op == quoted::KleeneOp::OneOrMore {
- // This should be impossible because the macro parser would not
- // match the given macro arm.
- cx.span_bug(sp.entire(), "this must repeat at least once");
+ // FIXME: this really ought to be caught at macro definition
+ // time... It happens when the Kleene operator in the matcher and
+ // the body for the same meta-variable do not match.
+ cx.span_fatal(sp.entire(), "this must repeat at least once");
}
} else {
// 0 is the initial counter (we have done 0 repretitions so far). `len`
LockstepIterSize::Constraint(r_len, _) if l_len == r_len => self,
LockstepIterSize::Constraint(r_len, r_id) => {
let msg = format!(
- "inconsistent lockstep iteration: \
- '{}' has {} items, but '{}' has {}",
+ "meta-variable `{}` repeats {} times, but `{}` repeats {} times",
l_id, l_len, r_id, r_len
);
LockstepIterSize::Contradiction(msg)
use crate::edition::{ALL_EDITIONS, Edition};
use crate::visit::{self, FnKind, Visitor};
use crate::parse::{token, ParseSess};
-use crate::symbol::{Symbol, keywords, sym};
+use crate::symbol::{Symbol, kw, sym};
use crate::tokenstream::TokenTree;
use errors::{DiagnosticBuilder, Handler};
is just used to enable niche optimizations in libcore \
and will never be stable",
cfg_fn!(rustc_attrs))),
+ (sym::rustc_nonnull_optimization_guaranteed, Whitelisted, template!(Word),
+ Gated(Stability::Unstable,
+ sym::rustc_attrs,
+ "the `#[rustc_nonnull_optimization_guaranteed]` attribute \
+ is just used to enable niche optimizations in libcore \
+ and will never be stable",
+ cfg_fn!(rustc_attrs))),
(sym::rustc_regions, Normal, template!(Word), Gated(Stability::Unstable,
sym::rustc_attrs,
"the `#[rustc_regions]` attribute \
fn visit_item(&mut self, i: &'a ast::Item) {
match i.node {
ast::ItemKind::Const(_,_) => {
- if i.ident.name == keywords::Underscore.name() {
+ if i.ident.name == kw::Underscore {
gate_feature_post!(&self, underscore_const_names, i.span,
"naming constants with `_` is unstable");
}
}
}
- fn emit_artifact_notification(&mut self, path: &Path) {
- let data = ArtifactNotification { artifact: path };
+ fn emit_artifact_notification(&mut self, path: &Path, artifact_type: &str) {
+ let data = ArtifactNotification { artifact: path, emit: artifact_type };
let result = if self.pretty {
writeln!(&mut self.dst, "{}", as_pretty_json(&data))
} else {
struct ArtifactNotification<'a> {
/// The path of the artifact.
artifact: &'a Path,
+ /// What kind of artifact we're emitting.
+ emit: &'a str,
}
impl Diagnostic {
use crate::source_map::{Spanned, respan};
use crate::parse::token::{self, Token};
use crate::ptr::P;
-use crate::symbol::keywords;
use crate::ThinVec;
use crate::tokenstream::*;
use crate::util::map_in_place::MapInPlace;
vis.visit_span(span);
}
-pub fn noop_visit_arm<T: MutVisitor>(Arm { attrs, pats, guard, body }: &mut Arm, vis: &mut T) {
+pub fn noop_visit_arm<T: MutVisitor>(
+ Arm { attrs, pats, guard, body, span }: &mut Arm,
+ vis: &mut T,
+) {
visit_attrs(attrs, vis);
visit_vec(pats, |pat| vis.visit_pat(pat));
visit_opt(guard, |guard| vis.visit_guard(guard));
vis.visit_expr(body);
+ vis.visit_span(span);
}
pub fn noop_visit_guard<T: MutVisitor>(g: &mut Guard, vis: &mut T) {
pub fn noop_visit_crate<T: MutVisitor>(krate: &mut Crate, vis: &mut T) {
visit_clobber(krate, |Crate { module, attrs, span }| {
let item = P(Item {
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
attrs,
id: DUMMY_NODE_ID,
vis: respan(span.shrink_to_lo(), VisibilityKind::Public),
use crate::parse::Parser;
use crate::print::pprust;
use crate::ptr::P;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::ThinVec;
use errors::{Applicability, DiagnosticBuilder};
use syntax_pos::Span;
/// Recover from `pub` keyword in places where it seems _reasonable_ but isn't valid.
crate fn eat_bad_pub(&mut self) {
- if self.token.is_keyword(keywords::Pub) {
+ if self.token.is_keyword(kw::Pub) {
match self.parse_visibility(false) {
Ok(vis) => {
self.diagnostic()
use crate::ast::{self, Ident};
-use crate::parse::{token, ParseSess};
+use crate::parse::ParseSess;
+use crate::parse::token::{self, Token};
use crate::symbol::Symbol;
use crate::parse::unescape;
use crate::parse::unescape_error_reporting::{emit_unescape_error, push_escaped_char};
#[derive(Clone, Debug)]
pub struct TokenAndSpan {
- pub tok: token::Token,
+ pub tok: Token,
pub sp: Span,
}
/// Stop reading src at this index.
crate end_src_index: usize,
// cached:
- peek_tok: token::Token,
+ peek_tok: Token,
peek_span: Span,
peek_span_src_raw: Span,
fatal_errs: Vec<DiagnosticBuilder<'a>>,
}
/// Lex a LIT_INTEGER or a LIT_FLOAT
- fn scan_number(&mut self, c: char) -> token::Lit {
+ fn scan_number(&mut self, c: char) -> (token::LitKind, Symbol) {
let mut base = 10;
let start_bpos = self.pos;
self.bump();
}
_ => {
// just a 0
- return token::Integer(self.name_from(start_bpos));
+ return (token::Integer, self.name_from(start_bpos));
}
}
} else if c.is_digit(10) {
if num_digits == 0 {
self.err_span_(start_bpos, self.pos, "no valid digits found for number");
- return token::Integer(Symbol::intern("0"));
+ return (token::Integer, Symbol::intern("0"));
}
// might be a float, but don't be greedy if this is actually an
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- token::Float(self.name_from(start_bpos))
+ (token::Float, self.name_from(start_bpos))
} else {
// it might be a float if it has an exponent
if self.ch_is('e') || self.ch_is('E') {
self.scan_float_exponent();
let pos = self.pos;
self.check_float_base(start_bpos, pos, base);
- return token::Float(self.name_from(start_bpos));
+ return (token::Float, self.name_from(start_bpos));
}
// but we certainly have an integer!
- token::Integer(self.name_from(start_bpos))
+ (token::Integer, self.name_from(start_bpos))
}
}
}
}
- fn binop(&mut self, op: token::BinOpToken) -> token::Token {
+ fn binop(&mut self, op: token::BinOpToken) -> Token {
self.bump();
if self.ch_is('=') {
self.bump();
/// Returns the next token from the string, advances the input past that
/// token, and updates the interner
- fn next_token_inner(&mut self) -> Result<token::Token, ()> {
+ fn next_token_inner(&mut self) -> Result<Token, ()> {
let c = self.ch;
if ident_start(c) {
}
if is_dec_digit(c) {
- let num = self.scan_number(c.unwrap());
+ let (kind, symbol) = self.scan_number(c.unwrap());
let suffix = self.scan_optional_raw_name();
- debug!("next_token_inner: scanned number {:?}, {:?}", num, suffix);
- return Ok(token::Literal(num, suffix));
+ debug!("next_token_inner: scanned number {:?}, {:?}, {:?}", kind, symbol, suffix);
+ return Ok(Token::lit(kind, symbol, suffix));
}
match c.expect("next_token_inner called at EOF") {
// lifetimes shouldn't end with a single quote
// if we find one, then this is an invalid character literal
if self.ch_is('\'') {
- let id = self.name_from(start);
+ let symbol = self.name_from(start);
self.bump();
self.validate_char_escape(start_with_quote);
- return Ok(token::Literal(token::Char(id), None))
+ return Ok(Token::lit(token::Char, symbol, None));
}
// Include the leading `'` in the real identifier, for macro
return Ok(token::Lifetime(ident));
}
let msg = "unterminated character literal";
- let id = self.scan_single_quoted_string(start_with_quote, msg);
+ let symbol = self.scan_single_quoted_string(start_with_quote, msg);
self.validate_char_escape(start_with_quote);
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(token::Char(id), suffix))
+ Ok(Token::lit(token::Char, symbol, suffix))
}
'b' => {
self.bump();
- let lit = match self.ch {
+ let (kind, symbol) = match self.ch {
Some('\'') => {
let start_with_quote = self.pos;
self.bump();
let msg = "unterminated byte constant";
- let id = self.scan_single_quoted_string(start_with_quote, msg);
+ let symbol = self.scan_single_quoted_string(start_with_quote, msg);
self.validate_byte_escape(start_with_quote);
- token::Byte(id)
+ (token::Byte, symbol)
},
Some('"') => {
let start_with_quote = self.pos;
let msg = "unterminated double quote byte string";
- let id = self.scan_double_quoted_string(msg);
+ let symbol = self.scan_double_quoted_string(msg);
self.validate_byte_str_escape(start_with_quote);
- token::ByteStr(id)
+ (token::ByteStr, symbol)
},
Some('r') => self.scan_raw_byte_string(),
_ => unreachable!(), // Should have been a token::Ident above.
};
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(lit, suffix))
+ Ok(Token::lit(kind, symbol, suffix))
}
'"' => {
let start_with_quote = self.pos;
let msg = "unterminated double quote string";
- let id = self.scan_double_quoted_string(msg);
+ let symbol = self.scan_double_quoted_string(msg);
self.validate_str_escape(start_with_quote);
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(token::Str_(id), suffix))
+ Ok(Token::lit(token::Str, symbol, suffix))
}
'r' => {
let start_bpos = self.pos;
}
self.bump();
- let id = if valid {
+ let symbol = if valid {
self.name_from_to(content_start_bpos, content_end_bpos)
} else {
Symbol::intern("??")
};
let suffix = self.scan_optional_raw_name();
- Ok(token::Literal(token::StrRaw(id, hash_count), suffix))
+ Ok(Token::lit(token::StrRaw(hash_count), symbol, suffix))
}
'-' => {
if self.nextch_is('>') {
id
}
- fn scan_raw_byte_string(&mut self) -> token::Lit {
+ fn scan_raw_byte_string(&mut self) -> (token::LitKind, Symbol) {
let start_bpos = self.pos;
self.bump();
let mut hash_count = 0;
self.bump();
- token::ByteStrRaw(self.name_from_to(content_start_bpos, content_end_bpos), hash_count)
+ (token::ByteStrRaw(hash_count), self.name_from_to(content_start_bpos, content_end_bpos))
}
fn validate_char_escape(&self, start_with_quote: BytePos) {
// check that the given reader produces the desired stream
// of tokens (stop checking after exhausting the expected vec)
- fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<token::Token>) {
+ fn check_tokenization(mut string_reader: StringReader<'_>, expected: Vec<Token>) {
for expected_tok in &expected {
assert_eq!(&string_reader.next_token().tok, expected_tok);
}
}
// make the identifier by looking up the string in the interner
- fn mk_ident(id: &str) -> token::Token {
- token::Token::from_ast_ident(Ident::from_str(id))
+ fn mk_ident(id: &str) -> Token {
+ Token::from_ast_ident(Ident::from_str(id))
+ }
+
+ fn mk_lit(kind: token::LitKind, symbol: &str, suffix: Option<&str>) -> Token {
+ Token::lit(kind, Symbol::intern(symbol), suffix.map(Symbol::intern))
}
#[test]
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'a'".to_string()).next_token().tok,
- token::Literal(token::Char(Symbol::intern("a")), None));
+ mk_lit(token::Char, "a", None));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "' '".to_string()).next_token().tok,
- token::Literal(token::Char(Symbol::intern(" ")), None));
+ mk_lit(token::Char, " ", None));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'\\n'".to_string()).next_token().tok,
- token::Literal(token::Char(Symbol::intern("\\n")), None));
+ mk_lit(token::Char, "\\n", None));
})
}
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
assert_eq!(setup(&sm, &sh, "'abc".to_string()).next_token().tok,
- token::Lifetime(Ident::from_str("'abc")));
+ token::Lifetime(Ident::from_str("'abc")));
})
}
with_default_globals(|| {
let sm = Lrc::new(SourceMap::new(FilePathMapping::empty()));
let sh = mk_sess(sm.clone());
- assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string())
- .next_token()
- .tok,
- token::Literal(token::StrRaw(Symbol::intern("\"#a\\b\x00c\""), 3), None));
+ assert_eq!(setup(&sm, &sh, "r###\"\"#a\\b\x00c\"\"###".to_string()).next_token().tok,
+ mk_lit(token::StrRaw(3), "\"#a\\b\x00c\"", None));
})
}
macro_rules! test {
($input: expr, $tok_type: ident, $tok_contents: expr) => {{
assert_eq!(setup(&sm, &sh, format!("{}suffix", $input)).next_token().tok,
- token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
- Some(Symbol::intern("suffix"))));
+ mk_lit(token::$tok_type, $tok_contents, Some("suffix")));
// with a whitespace separator:
assert_eq!(setup(&sm, &sh, format!("{} suffix", $input)).next_token().tok,
- token::Literal(token::$tok_type(Symbol::intern($tok_contents)),
- None));
+ mk_lit(token::$tok_type, $tok_contents, None));
}}
}
test!("'a'", Char, "a");
test!("b'a'", Byte, "a");
- test!("\"a\"", Str_, "a");
+ test!("\"a\"", Str, "a");
test!("b\"a\"", ByteStr, "a");
test!("1234", Integer, "1234");
test!("0b101", Integer, "0b101");
test!("1.0e10", Float, "1.0e10");
assert_eq!(setup(&sm, &sh, "2us".to_string()).next_token().tok,
- token::Literal(token::Integer(Symbol::intern("2")),
- Some(Symbol::intern("us"))));
+ mk_lit(token::Integer, "2", Some("us")));
assert_eq!(setup(&sm, &sh, "r###\"raw\"###suffix".to_string()).next_token().tok,
- token::Literal(token::StrRaw(Symbol::intern("raw"), 3),
- Some(Symbol::intern("suffix"))));
+ mk_lit(token::StrRaw(3), "raw", Some("suffix")));
assert_eq!(setup(&sm, &sh, "br###\"raw\"###suffix".to_string()).next_token().tok,
- token::Literal(token::ByteStrRaw(Symbol::intern("raw"), 3),
- Some(Symbol::intern("suffix"))));
+ mk_lit(token::ByteStrRaw(3), "raw", Some("suffix")));
})
}
token::Comment => {}
_ => panic!("expected a comment!"),
}
- assert_eq!(lexer.next_token().tok,
- token::Literal(token::Char(Symbol::intern("a")), None));
+ assert_eq!(lexer.next_token().tok, mk_lit(token::Char, "a", None));
})
}
use crate::parse::token::{self, Token};
use crate::parse::unescape::{unescape_str, unescape_char, unescape_byte_str, unescape_byte};
use crate::print::pprust;
-use crate::symbol::{keywords, Symbol};
+use crate::symbol::{kw, sym, Symbol};
use crate::tokenstream::{TokenStream, TokenTree};
use errors::{Applicability, Handler};
use std::ascii;
-macro_rules! err {
- ($opt_diag:expr, |$span:ident, $diag:ident| $($body:tt)*) => {
- match $opt_diag {
- Some(($span, $diag)) => { $($body)* }
- None => return None,
+crate enum LitError {
+ NotLiteral,
+ LexerError,
+ InvalidSuffix,
+ InvalidIntSuffix,
+ InvalidFloatSuffix,
+ NonDecimalFloat(u32),
+ IntTooLarge,
+}
+
+impl LitError {
+ fn report(&self, diag: &Handler, lit: token::Lit, span: Span) {
+ let token::Lit { kind, suffix, .. } = lit;
+ match *self {
+ // `NotLiteral` is not an error by itself, so we don't report
+ // it and give the parser opportunity to try something else.
+ LitError::NotLiteral => {}
+ // `LexerError` *is* an error, but it was already reported
+ // by lexer, so here we don't report it the second time.
+ LitError::LexerError => {}
+ LitError::InvalidSuffix => {
+ expect_no_suffix(
+ diag, span, &format!("{} {} literal", kind.article(), kind.descr()), suffix
+ );
+ }
+ LitError::InvalidIntSuffix => {
+ let suf = suffix.expect("suffix error with no suffix").as_str();
+ if looks_like_width_suffix(&['i', 'u'], &suf) {
+ // If it looks like a width, try to be helpful.
+ let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
+ diag.struct_span_err(span, &msg)
+ .help("valid widths are 8, 16, 32, 64 and 128")
+ .emit();
+ } else {
+ let msg = format!("invalid suffix `{}` for integer literal", suf);
+ diag.struct_span_err(span, &msg)
+ .span_label(span, format!("invalid suffix `{}`", suf))
+ .help("the suffix must be one of the integral types (`u32`, `isize`, etc)")
+ .emit();
+ }
+ }
+ LitError::InvalidFloatSuffix => {
+ let suf = suffix.expect("suffix error with no suffix").as_str();
+ if looks_like_width_suffix(&['f'], &suf) {
+ // If it looks like a width, try to be helpful.
+ let msg = format!("invalid width `{}` for float literal", &suf[1..]);
+ diag.struct_span_err(span, &msg)
+ .help("valid widths are 32 and 64")
+ .emit();
+ } else {
+ let msg = format!("invalid suffix `{}` for float literal", suf);
+ diag.struct_span_err(span, &msg)
+ .span_label(span, format!("invalid suffix `{}`", suf))
+ .help("valid suffixes are `f32` and `f64`")
+ .emit();
+ }
+ }
+ LitError::NonDecimalFloat(base) => {
+ let descr = match base {
+ 16 => "hexadecimal",
+ 8 => "octal",
+ 2 => "binary",
+ _ => unreachable!(),
+ };
+ diag.struct_span_err(span, &format!("{} float literal is not supported", descr))
+ .span_label(span, "not supported")
+ .emit();
+ }
+ LitError::IntTooLarge => {
+ diag.struct_span_err(span, "integer literal is too large")
+ .emit();
+ }
}
}
}
impl LitKind {
- /// Converts literal token with a suffix into a semantic literal.
- /// Works speculatively and may return `None` if diagnostic handler is not passed.
- /// If diagnostic handler is passed, always returns `Some`,
- /// possibly after reporting non-fatal errors and recovery.
- fn from_lit_token(
- lit: token::Lit,
- suf: Option<Symbol>,
- diag: Option<(Span, &Handler)>
- ) -> Option<LitKind> {
- if suf.is_some() && !lit.may_have_suffix() {
- err!(diag, |span, diag| {
- expect_no_suffix(span, diag, &format!("a {}", lit.literal_name()), suf)
- });
+ /// Converts literal token into a semantic literal.
+ fn from_lit_token(lit: token::Lit) -> Result<LitKind, LitError> {
+ let token::Lit { kind, symbol, suffix } = lit;
+ if suffix.is_some() && !kind.may_have_suffix() {
+ return Err(LitError::InvalidSuffix);
}
- Some(match lit {
- token::Bool(i) => {
- assert!(i == keywords::True.name() || i == keywords::False.name());
- LitKind::Bool(i == keywords::True.name())
+ Ok(match kind {
+ token::Bool => {
+ assert!(symbol == kw::True || symbol == kw::False);
+ LitKind::Bool(symbol == kw::True)
}
- token::Byte(i) => {
- match unescape_byte(&i.as_str()) {
- Ok(c) => LitKind::Byte(c),
- Err(_) => LitKind::Err(i),
- }
- },
- token::Char(i) => {
- match unescape_char(&i.as_str()) {
- Ok(c) => LitKind::Char(c),
- Err(_) => LitKind::Err(i),
- }
- },
- token::Err(i) => LitKind::Err(i),
+ token::Byte => return unescape_byte(&symbol.as_str())
+ .map(LitKind::Byte).map_err(|_| LitError::LexerError),
+ token::Char => return unescape_char(&symbol.as_str())
+ .map(LitKind::Char).map_err(|_| LitError::LexerError),
// There are some valid suffixes for integer and float literals,
// so all the handling is done internally.
- token::Integer(s) => return integer_lit(&s.as_str(), suf, diag),
- token::Float(s) => return float_lit(&s.as_str(), suf, diag),
+ token::Integer => return integer_lit(symbol, suffix),
+ token::Float => return float_lit(symbol, suffix),
- token::Str_(mut sym) => {
+ token::Str => {
// If there are no characters requiring special treatment we can
- // reuse the symbol from the Token. Otherwise, we must generate a
+ // reuse the symbol from the token. Otherwise, we must generate a
// new symbol because the string in the LitKind is different to the
- // string in the Token.
- let mut has_error = false;
- let s = &sym.as_str();
- if s.as_bytes().iter().any(|&c| c == b'\\' || c == b'\r') {
+ // string in the token.
+ let s = symbol.as_str();
+ let symbol = if s.contains(&['\\', '\r'][..]) {
let mut buf = String::with_capacity(s.len());
- unescape_str(s, &mut |_, unescaped_char| {
+ let mut error = Ok(());
+ unescape_str(&s, &mut |_, unescaped_char| {
match unescaped_char {
Ok(c) => buf.push(c),
- Err(_) => has_error = true,
+ Err(_) => error = Err(LitError::LexerError),
}
});
- if has_error {
- return Some(LitKind::Err(sym));
- }
- sym = Symbol::intern(&buf)
- }
-
- LitKind::Str(sym, ast::StrStyle::Cooked)
+ error?;
+ Symbol::intern(&buf)
+ } else {
+ symbol
+ };
+ LitKind::Str(symbol, ast::StrStyle::Cooked)
}
- token::StrRaw(mut sym, n) => {
+ token::StrRaw(n) => {
// Ditto.
- let s = &sym.as_str();
- if s.contains('\r') {
- sym = Symbol::intern(&raw_str_lit(s));
- }
- LitKind::Str(sym, ast::StrStyle::Raw(n))
+ let s = symbol.as_str();
+ let symbol = if s.contains('\r') {
+ Symbol::intern(&raw_str_lit(&s))
+ } else {
+ symbol
+ };
+ LitKind::Str(symbol, ast::StrStyle::Raw(n))
}
- token::ByteStr(i) => {
- let s = &i.as_str();
+ token::ByteStr => {
+ let s = symbol.as_str();
let mut buf = Vec::with_capacity(s.len());
- let mut has_error = false;
- unescape_byte_str(s, &mut |_, unescaped_byte| {
+ let mut error = Ok(());
+ unescape_byte_str(&s, &mut |_, unescaped_byte| {
match unescaped_byte {
Ok(c) => buf.push(c),
- Err(_) => has_error = true,
+ Err(_) => error = Err(LitError::LexerError),
}
});
- if has_error {
- return Some(LitKind::Err(i));
- }
+ error?;
buf.shrink_to_fit();
LitKind::ByteStr(Lrc::new(buf))
}
- token::ByteStrRaw(i, _) => {
- LitKind::ByteStr(Lrc::new(i.to_string().into_bytes()))
- }
+ token::ByteStrRaw(_) => LitKind::ByteStr(Lrc::new(symbol.to_string().into_bytes())),
+ token::Err => LitKind::Err(symbol),
})
}
/// Attempts to recover a token from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
- pub fn to_lit_token(&self) -> (token::Lit, Option<Symbol>) {
- match *self {
+ pub fn to_lit_token(&self) -> token::Lit {
+ let (kind, symbol, suffix) = match *self {
LitKind::Str(string, ast::StrStyle::Cooked) => {
let escaped = string.as_str().escape_default().to_string();
- (token::Lit::Str_(Symbol::intern(&escaped)), None)
+ (token::Str, Symbol::intern(&escaped), None)
}
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
- (token::Lit::StrRaw(string, n), None)
+ (token::StrRaw(n), string, None)
}
LitKind::ByteStr(ref bytes) => {
let string = bytes.iter().cloned().flat_map(ascii::escape_default)
.map(Into::<char>::into).collect::<String>();
- (token::Lit::ByteStr(Symbol::intern(&string)), None)
+ (token::ByteStr, Symbol::intern(&string), None)
}
LitKind::Byte(byte) => {
let string: String = ascii::escape_default(byte).map(Into::<char>::into).collect();
- (token::Lit::Byte(Symbol::intern(&string)), None)
+ (token::Byte, Symbol::intern(&string), None)
}
LitKind::Char(ch) => {
let string: String = ch.escape_default().map(Into::<char>::into).collect();
- (token::Lit::Char(Symbol::intern(&string)), None)
+ (token::Char, Symbol::intern(&string), None)
}
LitKind::Int(n, ty) => {
let suffix = match ty {
ast::LitIntType::Signed(ty) => Some(Symbol::intern(ty.ty_to_string())),
ast::LitIntType::Unsuffixed => None,
};
- (token::Lit::Integer(Symbol::intern(&n.to_string())), suffix)
+ (token::Integer, Symbol::intern(&n.to_string()), suffix)
}
LitKind::Float(symbol, ty) => {
- (token::Lit::Float(symbol), Some(Symbol::intern(ty.ty_to_string())))
+ (token::Float, symbol, Some(Symbol::intern(ty.ty_to_string())))
+ }
+ LitKind::FloatUnsuffixed(symbol) => {
+ (token::Float, symbol, None)
}
- LitKind::FloatUnsuffixed(symbol) => (token::Lit::Float(symbol), None),
LitKind::Bool(value) => {
- let kw = if value { keywords::True } else { keywords::False };
- (token::Lit::Bool(kw.name()), None)
+ let symbol = if value { kw::True } else { kw::False };
+ (token::Bool, symbol, None)
}
- LitKind::Err(val) => (token::Lit::Err(val), None),
- }
+ LitKind::Err(symbol) => {
+ (token::Err, symbol, None)
+ }
+ };
+
+ token::Lit::new(kind, symbol, suffix)
}
}
impl Lit {
- /// Converts literal token with a suffix into an AST literal.
- /// Works speculatively and may return `None` if diagnostic handler is not passed.
- /// If diagnostic handler is passed, may return `Some`,
- /// possibly after reporting non-fatal errors and recovery, or `None` for irrecoverable errors.
- crate fn from_token(
- token: &token::Token,
- span: Span,
- diag: Option<(Span, &Handler)>,
- ) -> Option<Lit> {
- let (token, suffix) = match *token {
- token::Ident(ident, false) if ident.name == keywords::True.name() ||
- ident.name == keywords::False.name() =>
- (token::Bool(ident.name), None),
- token::Literal(token, suffix) =>
- (token, suffix),
+ /// Converts literal token into an AST literal.
+ fn from_lit_token(token: token::Lit, span: Span) -> Result<Lit, LitError> {
+ Ok(Lit { token, node: LitKind::from_lit_token(token)?, span })
+ }
+
+ /// Converts arbitrary token into an AST literal.
+ crate fn from_token(token: &Token, span: Span) -> Result<Lit, LitError> {
+ let lit = match *token {
+ token::Ident(ident, false) if ident.name == kw::True || ident.name == kw::False =>
+ token::Lit::new(token::Bool, ident.name, None),
+ token::Literal(lit) =>
+ lit,
token::Interpolated(ref nt) => {
if let token::NtExpr(expr) | token::NtLiteral(expr) = &**nt {
if let ast::ExprKind::Lit(lit) = &expr.node {
- return Some(lit.clone());
+ return Ok(lit.clone());
}
}
- return None;
+ return Err(LitError::NotLiteral);
}
- _ => return None,
+ _ => return Err(LitError::NotLiteral)
};
- let node = LitKind::from_lit_token(token, suffix, diag)?;
- Some(Lit { node, token, suffix, span })
+ Lit::from_lit_token(lit, span)
}
/// Attempts to recover an AST literal from semantic literal.
/// This function is used when the original token doesn't exist (e.g. the literal is created
/// by an AST-based macro) or unavailable (e.g. from HIR pretty-printing).
pub fn from_lit_kind(node: LitKind, span: Span) -> Lit {
- let (token, suffix) = node.to_lit_token();
- Lit { node, token, suffix, span }
+ Lit { token: node.to_lit_token(), node, span }
}
/// Losslessly convert an AST literal into a token stream.
crate fn tokens(&self) -> TokenStream {
- let token = match self.token {
- token::Bool(symbol) => Token::Ident(Ident::with_empty_ctxt(symbol), false),
- token => Token::Literal(token, self.suffix),
+ let token = match self.token.kind {
+ token::Bool => token::Ident(Ident::new(self.token.symbol, self.span), false),
+ _ => token::Literal(self.token),
};
TokenTree::Token(self.span, token).into()
}
impl<'a> Parser<'a> {
/// Matches `lit = true | false | token_lit`.
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
- let diag = Some((self.span, &self.sess.span_diagnostic));
- if let Some(lit) = Lit::from_token(&self.token, self.span, diag) {
- self.bump();
- return Ok(lit);
- } else if self.token == token::Dot {
- // Recover `.4` as `0.4`.
- let recovered = self.look_ahead(1, |t| {
- if let token::Literal(token::Integer(val), suf) = *t {
+ let mut recovered = None;
+ if self.token == token::Dot {
+ // Attempt to recover `.4` as `0.4`.
+ recovered = self.look_ahead(1, |t| {
+ if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = *t {
let next_span = self.look_ahead_span(1);
if self.span.hi() == next_span.lo() {
- let sym = String::from("0.") + &val.as_str();
- let token = token::Literal(token::Float(Symbol::intern(&sym)), suf);
+ let s = String::from("0.") + &symbol.as_str();
+ let token = Token::lit(token::Float, Symbol::intern(&s), suffix);
return Some((token, self.span.to(next_span)));
}
}
None
});
- if let Some((token, span)) = recovered {
+ if let Some((ref token, span)) = recovered {
+ self.bump();
self.diagnostic()
.struct_span_err(span, "float literals must have an integer part")
.span_suggestion(
Applicability::MachineApplicable,
)
.emit();
- let diag = Some((span, &self.sess.span_diagnostic));
- if let Some(lit) = Lit::from_token(&token, span, diag) {
- self.bump();
- self.bump();
- return Ok(lit);
- }
}
}
- Err(self.span_fatal(self.span, &format!("unexpected token: {}", self.this_token_descr())))
- }
-}
+ let (token, span) = recovered.as_ref().map_or((&self.token, self.span),
+ |(token, span)| (token, *span));
-crate fn expect_no_suffix(sp: Span, diag: &Handler, kind: &str, suffix: Option<ast::Name>) {
- match suffix {
- None => {/* everything ok */}
- Some(suf) => {
- let text = suf.as_str();
- if text.is_empty() {
- diag.span_bug(sp, "found empty literal suffix in Some")
+ match Lit::from_token(token, span) {
+ Ok(lit) => {
+ self.bump();
+ Ok(lit)
+ }
+ Err(LitError::NotLiteral) => {
+ let msg = format!("unexpected token: {}", self.this_token_descr());
+ Err(self.span_fatal(span, &msg))
+ }
+ Err(err) => {
+ let lit = token.expect_lit();
+ self.bump();
+ err.report(&self.sess.span_diagnostic, lit, span);
+ let lit = token::Lit::new(token::Err, lit.symbol, lit.suffix);
+ Lit::from_lit_token(lit, span).map_err(|_| unreachable!())
}
- let mut err = if kind == "a tuple index" &&
- ["i32", "u32", "isize", "usize"].contains(&text.to_string().as_str())
- {
- // #59553: warn instead of reject out of hand to allow the fix to percolate
- // through the ecosystem when people fix their macros
- let mut err = diag.struct_span_warn(
- sp,
- &format!("suffixes on {} are invalid", kind),
- );
- err.note(&format!(
- "`{}` is *temporarily* accepted on tuple index fields as it was \
- incorrectly accepted on stable for a few releases",
- text,
- ));
- err.help(
- "on proc macros, you'll want to use `syn::Index::from` or \
- `proc_macro::Literal::*_unsuffixed` for code that will desugar \
- to tuple field access",
- );
- err.note(
- "for more context, see https://github.com/rust-lang/rust/issues/60210",
- );
- err
- } else {
- diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
- };
- err.span_label(sp, format!("invalid suffix `{}`", text));
- err.emit();
}
}
}
+crate fn expect_no_suffix(diag: &Handler, sp: Span, kind: &str, suffix: Option<Symbol>) {
+ if let Some(suf) = suffix {
+ let mut err = if kind == "a tuple index" &&
+ [sym::i32, sym::u32, sym::isize, sym::usize].contains(&suf) {
+ // #59553: warn instead of reject out of hand to allow the fix to percolate
+ // through the ecosystem when people fix their macros
+ let mut err = diag.struct_span_warn(
+ sp,
+ &format!("suffixes on {} are invalid", kind),
+ );
+ err.note(&format!(
+ "`{}` is *temporarily* accepted on tuple index fields as it was \
+ incorrectly accepted on stable for a few releases",
+ suf,
+ ));
+ err.help(
+ "on proc macros, you'll want to use `syn::Index::from` or \
+ `proc_macro::Literal::*_unsuffixed` for code that will desugar \
+ to tuple field access",
+ );
+ err.note(
+ "for more context, see https://github.com/rust-lang/rust/issues/60210",
+ );
+ err
+ } else {
+ diag.struct_span_err(sp, &format!("suffixes on {} are invalid", kind))
+ };
+ err.span_label(sp, format!("invalid suffix `{}`", suf));
+ err.emit();
+ }
+}
+
/// Parses a string representing a raw string literal into its final form. The
/// only operation this does is convert embedded CRLF into a single LF.
fn raw_str_lit(lit: &str) -> String {
- debug!("raw_str_lit: given {}", lit.escape_default());
+ debug!("raw_str_lit: {:?}", lit);
let mut res = String::with_capacity(lit.len());
let mut chars = lit.chars().peekable();
res
}
-// check if `s` looks like i32 or u1234 etc.
+// Checks if `s` looks like i32 or u1234 etc.
fn looks_like_width_suffix(first_chars: &[char], s: &str) -> bool {
- s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
+ s.len() > 1 && s.starts_with(first_chars) && s[1..].chars().all(|c| c.is_ascii_digit())
}
-fn filtered_float_lit(data: Symbol, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
- -> Option<LitKind> {
- debug!("filtered_float_lit: {}, {:?}", data, suffix);
- let suffix = match suffix {
- Some(suffix) => suffix,
- None => return Some(LitKind::FloatUnsuffixed(data)),
- };
-
- Some(match &*suffix.as_str() {
- "f32" => LitKind::Float(data, ast::FloatTy::F32),
- "f64" => LitKind::Float(data, ast::FloatTy::F64),
- suf => {
- err!(diag, |span, diag| {
- if suf.len() >= 2 && looks_like_width_suffix(&['f'], suf) {
- // if it looks like a width, lets try to be helpful.
- let msg = format!("invalid width `{}` for float literal", &suf[1..]);
- diag.struct_span_err(span, &msg).help("valid widths are 32 and 64").emit()
- } else {
- let msg = format!("invalid suffix `{}` for float literal", suf);
- diag.struct_span_err(span, &msg)
- .span_label(span, format!("invalid suffix `{}`", suf))
- .help("valid suffixes are `f32` and `f64`")
- .emit();
- }
- });
+fn strip_underscores(symbol: Symbol) -> Symbol {
+ // Do not allocate a new string unless necessary.
+ let s = symbol.as_str();
+ if s.contains('_') {
+ let mut s = s.to_string();
+ s.retain(|c| c != '_');
+ return Symbol::intern(&s);
+ }
+ symbol
+}
- LitKind::FloatUnsuffixed(data)
+fn filtered_float_lit(symbol: Symbol, suffix: Option<Symbol>, base: u32)
+ -> Result<LitKind, LitError> {
+ debug!("filtered_float_lit: {:?}, {:?}, {:?}", symbol, suffix, base);
+ if base != 10 {
+ return Err(LitError::NonDecimalFloat(base));
+ }
+ Ok(match suffix {
+ Some(suf) => match suf {
+ sym::f32 => LitKind::Float(symbol, ast::FloatTy::F32),
+ sym::f64 => LitKind::Float(symbol, ast::FloatTy::F64),
+ _ => return Err(LitError::InvalidFloatSuffix),
}
+ None => LitKind::FloatUnsuffixed(symbol)
})
}
-fn float_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
- -> Option<LitKind> {
- debug!("float_lit: {:?}, {:?}", s, suffix);
- // FIXME #2252: bounds checking float literals is deferred until trans
-
- // Strip underscores without allocating a new String unless necessary.
- let s2;
- let s = if s.chars().any(|c| c == '_') {
- s2 = s.chars().filter(|&c| c != '_').collect::<String>();
- &s2
- } else {
- s
- };
- filtered_float_lit(Symbol::intern(s), suffix, diag)
+fn float_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitError> {
+ debug!("float_lit: {:?}, {:?}", symbol, suffix);
+ filtered_float_lit(strip_underscores(symbol), suffix, 10)
}
-fn integer_lit(s: &str, suffix: Option<Symbol>, diag: Option<(Span, &Handler)>)
- -> Option<LitKind> {
- // s can only be ascii, byte indexing is fine
-
- // Strip underscores without allocating a new String unless necessary.
- let s2;
- let mut s = if s.chars().any(|c| c == '_') {
- s2 = s.chars().filter(|&c| c != '_').collect::<String>();
- &s2
- } else {
- s
- };
-
- debug!("integer_lit: {}, {:?}", s, suffix);
+fn integer_lit(symbol: Symbol, suffix: Option<Symbol>) -> Result<LitKind, LitError> {
+ debug!("integer_lit: {:?}, {:?}", symbol, suffix);
+ let symbol = strip_underscores(symbol);
+ let s = symbol.as_str();
let mut base = 10;
- let orig = s;
- let mut ty = ast::LitIntType::Unsuffixed;
-
- if s.starts_with('0') && s.len() > 1 {
+ if s.len() > 1 && s.as_bytes()[0] == b'0' {
match s.as_bytes()[1] {
b'x' => base = 16,
b'o' => base = 8,
b'b' => base = 2,
- _ => { }
+ _ => {}
}
}
- // 1f64 and 2f32 etc. are valid float literals.
- if let Some(suf) = suffix {
- if looks_like_width_suffix(&['f'], &suf.as_str()) {
- let err = match base {
- 16 => Some("hexadecimal float literal is not supported"),
- 8 => Some("octal float literal is not supported"),
- 2 => Some("binary float literal is not supported"),
- _ => None,
- };
- if let Some(err) = err {
- err!(diag, |span, diag| {
- diag.struct_span_err(span, err)
- .span_label(span, "not supported")
- .emit();
- });
- }
- return filtered_float_lit(Symbol::intern(s), Some(suf), diag)
- }
- }
-
- if base != 10 {
- s = &s[2..];
- }
-
- if let Some(suf) = suffix {
- if suf.as_str().is_empty() {
- err!(diag, |span, diag| diag.span_bug(span, "found empty literal suffix in Some"));
- }
- ty = match &*suf.as_str() {
- "isize" => ast::LitIntType::Signed(ast::IntTy::Isize),
- "i8" => ast::LitIntType::Signed(ast::IntTy::I8),
- "i16" => ast::LitIntType::Signed(ast::IntTy::I16),
- "i32" => ast::LitIntType::Signed(ast::IntTy::I32),
- "i64" => ast::LitIntType::Signed(ast::IntTy::I64),
- "i128" => ast::LitIntType::Signed(ast::IntTy::I128),
- "usize" => ast::LitIntType::Unsigned(ast::UintTy::Usize),
- "u8" => ast::LitIntType::Unsigned(ast::UintTy::U8),
- "u16" => ast::LitIntType::Unsigned(ast::UintTy::U16),
- "u32" => ast::LitIntType::Unsigned(ast::UintTy::U32),
- "u64" => ast::LitIntType::Unsigned(ast::UintTy::U64),
- "u128" => ast::LitIntType::Unsigned(ast::UintTy::U128),
- suf => {
- // i<digits> and u<digits> look like widths, so lets
- // give an error message along those lines
- err!(diag, |span, diag| {
- if looks_like_width_suffix(&['i', 'u'], suf) {
- let msg = format!("invalid width `{}` for integer literal", &suf[1..]);
- diag.struct_span_err(span, &msg)
- .help("valid widths are 8, 16, 32, 64 and 128")
- .emit();
- } else {
- let msg = format!("invalid suffix `{}` for numeric literal", suf);
- diag.struct_span_err(span, &msg)
- .span_label(span, format!("invalid suffix `{}`", suf))
- .help("the suffix must be one of the integral types \
- (`u32`, `isize`, etc)")
- .emit();
- }
- });
-
- ty
- }
+ let ty = match suffix {
+ Some(suf) => match suf {
+ sym::isize => ast::LitIntType::Signed(ast::IntTy::Isize),
+ sym::i8 => ast::LitIntType::Signed(ast::IntTy::I8),
+ sym::i16 => ast::LitIntType::Signed(ast::IntTy::I16),
+ sym::i32 => ast::LitIntType::Signed(ast::IntTy::I32),
+ sym::i64 => ast::LitIntType::Signed(ast::IntTy::I64),
+ sym::i128 => ast::LitIntType::Signed(ast::IntTy::I128),
+ sym::usize => ast::LitIntType::Unsigned(ast::UintTy::Usize),
+ sym::u8 => ast::LitIntType::Unsigned(ast::UintTy::U8),
+ sym::u16 => ast::LitIntType::Unsigned(ast::UintTy::U16),
+ sym::u32 => ast::LitIntType::Unsigned(ast::UintTy::U32),
+ sym::u64 => ast::LitIntType::Unsigned(ast::UintTy::U64),
+ sym::u128 => ast::LitIntType::Unsigned(ast::UintTy::U128),
+ // `1f64` and `2f32` etc. are valid float literals, and
+ // `fxxx` looks more like an invalid float literal than invalid integer literal.
+ _ if suf.as_str().starts_with('f') => return filtered_float_lit(symbol, suffix, base),
+ _ => return Err(LitError::InvalidIntSuffix),
}
- }
-
- debug!("integer_lit: the type is {:?}, base {:?}, the new string is {:?}, the original \
- string was {:?}, the original suffix was {:?}", ty, base, s, orig, suffix);
-
- Some(match u128::from_str_radix(s, base) {
- Ok(r) => LitKind::Int(r, ty),
- Err(_) => {
- // small bases are lexed as if they were base 10, e.g, the string
- // might be `0b10201`. This will cause the conversion above to fail,
- // but these cases have errors in the lexer: we don't want to emit
- // two errors, and we especially don't want to emit this error since
- // it isn't necessarily true.
- let already_errored = base < 10 &&
- s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+ _ => ast::LitIntType::Unsuffixed
+ };
- if !already_errored {
- err!(diag, |span, diag| diag.span_err(span, "int literal is too large"));
- }
- LitKind::Int(0, ty)
- }
+ let s = &s[if base != 10 { 2 } else { 0 } ..];
+ u128::from_str_radix(s, base).map(|i| LitKind::Int(i, ty)).map_err(|_| {
+ // Small bases are lexed as if they were base 10, e.g, the string
+ // might be `0b10201`. This will cause the conversion above to fail,
+ // but these kinds of errors are already reported by the lexer.
+ let from_lexer =
+ base < 10 && s.chars().any(|c| c.to_digit(10).map_or(false, |d| d >= base));
+ if from_lexer { LitError::LexerError } else { LitError::IntTooLarge }
})
}
use crate::parse::PResult;
use crate::ThinVec;
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
-use crate::symbol::{keywords, sym, Symbol};
+use crate::symbol::{kw, sym, Symbol};
use errors::{Applicability, DiagnosticBuilder, DiagnosticId, FatalError};
use rustc_target::spec::abi::{self, Abi};
let body = TokenTree::Delimited(
delim_span,
token::Bracket,
- [TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
- TokenTree::Token(sp, token::Eq),
- TokenTree::Token(sp, token::Literal(
- token::StrRaw(Symbol::intern(&stripped), num_of_hashes), None))
+ [
+ TokenTree::Token(sp, token::Ident(ast::Ident::with_empty_ctxt(sym::doc), false)),
+ TokenTree::Token(sp, token::Eq),
+ TokenTree::Token(sp, token::Token::lit(
+ token::StrRaw(num_of_hashes), Symbol::intern(&stripped), None
+ )),
]
.iter().cloned().collect::<TokenStream>().into(),
);
#[derive(Clone, PartialEq)]
crate enum TokenType {
Token(token::Token),
- Keyword(keywords::Keyword),
+ Keyword(Symbol),
Operator,
Lifetime,
Ident,
crate fn to_string(&self) -> String {
match *self {
TokenType::Token(ref t) => format!("`{}`", pprust::token_to_string(t)),
- TokenType::Keyword(kw) => format!("`{}`", kw.name()),
+ TokenType::Keyword(kw) => format!("`{}`", kw),
TokenType::Operator => "an operator".to_string(),
TokenType::Lifetime => "lifetime".to_string(),
TokenType::Ident => "identifier".to_string(),
/// Creates a placeholder argument.
fn dummy_arg(span: Span) -> Arg {
- let ident = Ident::new(keywords::Invalid.name(), span);
+ let ident = Ident::new(kw::Invalid, span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(BindingMode::ByValue(Mutability::Immutable), ident, None),
TokenType::Token(token::Semi) => true, // we expect a `;` here
_ => false,
}) && ( // a `;` would be expected before the current keyword
- self.token.is_keyword(keywords::Break) ||
- self.token.is_keyword(keywords::Continue) ||
- self.token.is_keyword(keywords::For) ||
- self.token.is_keyword(keywords::If) ||
- self.token.is_keyword(keywords::Let) ||
- self.token.is_keyword(keywords::Loop) ||
- self.token.is_keyword(keywords::Match) ||
- self.token.is_keyword(keywords::Return) ||
- self.token.is_keyword(keywords::While)
+ self.token.is_keyword(kw::Break) ||
+ self.token.is_keyword(kw::Continue) ||
+ self.token.is_keyword(kw::For) ||
+ self.token.is_keyword(kw::If) ||
+ self.token.is_keyword(kw::Let) ||
+ self.token.is_keyword(kw::Loop) ||
+ self.token.is_keyword(kw::Match) ||
+ self.token.is_keyword(kw::Return) ||
+ self.token.is_keyword(kw::While)
);
let cm = self.sess.source_map();
match (cm.lookup_line(self.span.lo()), cm.lookup_line(sp.lo())) {
is_present
}
- fn check_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ fn check_keyword(&mut self, kw: Symbol) -> bool {
self.expected_tokens.push(TokenType::Keyword(kw));
self.token.is_keyword(kw)
}
/// If the next token is the given keyword, eats it and returns
/// `true`. Otherwise, returns `false`.
- pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
+ pub fn eat_keyword(&mut self, kw: Symbol) -> bool {
if self.check_keyword(kw) {
self.bump();
true
}
}
- fn eat_keyword_noexpect(&mut self, kw: keywords::Keyword) -> bool {
+ fn eat_keyword_noexpect(&mut self, kw: Symbol) -> bool {
if self.token.is_keyword(kw) {
self.bump();
true
/// If the given word is not a keyword, signals an error.
/// If the next token is not the given word, signals an error.
/// Otherwise, eats it.
- fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
+ fn expect_keyword(&mut self, kw: Symbol) -> PResult<'a, ()> {
if !self.eat_keyword(kw) {
self.unexpected()
} else {
}
fn expect_no_suffix(&self, sp: Span, kind: &str, suffix: Option<ast::Name>) {
- literal::expect_no_suffix(sp, &self.sess.span_diagnostic, kind, suffix)
+ literal::expect_no_suffix(&self.sess.span_diagnostic, sp, kind, suffix)
}
/// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
/// Is the current token one of the keywords that signals a bare function type?
fn token_is_bare_fn_keyword(&mut self) -> bool {
- self.check_keyword(keywords::Fn) ||
- self.check_keyword(keywords::Unsafe) ||
- self.check_keyword(keywords::Extern)
+ self.check_keyword(kw::Fn) ||
+ self.check_keyword(kw::Unsafe) ||
+ self.check_keyword(kw::Extern)
}
/// Parses a `TyKind::BareFn` type.
*/
let unsafety = self.parse_unsafety();
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let (inputs, c_variadic) = self.parse_fn_args(false, true)?;
let ret_ty = self.parse_ret_ty(false)?;
let decl = P(FnDecl {
/// Parses asyncness: `async` or nothing.
fn parse_asyncness(&mut self) -> IsAsync {
- if self.eat_keyword(keywords::Async) {
+ if self.eat_keyword(kw::Async) {
IsAsync::Async {
closure_id: ast::DUMMY_NODE_ID,
return_impl_trait_id: ast::DUMMY_NODE_ID,
/// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafety {
- if self.eat_keyword(keywords::Unsafe) {
+ if self.eat_keyword(kw::Unsafe) {
Unsafety::Unsafe
} else {
Unsafety::Normal
mut attrs: Vec<Attribute>) -> PResult<'a, TraitItem> {
let lo = self.span;
self.eat_bad_pub();
- let (name, node, generics) = if self.eat_keyword(keywords::Type) {
+ let (name, node, generics) = if self.eat_keyword(kw::Type) {
self.parse_trait_item_assoc_ty()?
} else if self.is_const_item() {
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
(ident, TraitItemKind::Const(ty, default), ast::Generics::default())
} else if let Some(mac) = self.parse_assoc_macro_invoc("trait", None, &mut false)? {
// trait item macro.
- (keywords::Invalid.ident(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
+ (Ident::invalid(), ast::TraitItemKind::Macro(mac), ast::Generics::default())
} else {
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
// Reference
self.expect_and()?;
self.parse_borrowed_pointee()?
- } else if self.eat_keyword_noexpect(keywords::Typeof) {
+ } else if self.eat_keyword_noexpect(kw::Typeof) {
// `typeof(EXPR)`
// In order to not be ambiguous, the type must be surrounded by parens.
self.expect(&token::OpenDelim(token::Paren))?;
};
self.expect(&token::CloseDelim(token::Paren))?;
TyKind::Typeof(e)
- } else if self.eat_keyword(keywords::Underscore) {
+ } else if self.eat_keyword(kw::Underscore) {
// A type to be inferred `_`
TyKind::Infer
} else if self.token_is_bare_fn_keyword() {
// Function pointer type
self.parse_ty_bare_fn(Vec::new())?
- } else if self.check_keyword(keywords::For) {
+ } else if self.check_keyword(kw::For) {
// Function pointer type or bound list (trait object type) starting with a poly-trait.
// `for<'lt> [unsafe] [extern "ABI"] fn (&'lt S) -> T`
// `for<'lt> Trait1<'lt> + Trait2 + 'a`
let parse_plus = allow_plus && self.check_plus();
self.parse_remaining_bounds(lifetime_defs, path, lo, parse_plus)?
}
- } else if self.eat_keyword(keywords::Impl) {
+ } else if self.eat_keyword(kw::Impl) {
// Always parse bounds greedily for better error recovery.
let bounds = self.parse_generic_bounds(None)?;
impl_dyn_multi = bounds.len() > 1 || self.prev_token_kind == PrevTokenKind::Plus;
TyKind::ImplTrait(ast::DUMMY_NODE_ID, bounds)
- } else if self.check_keyword(keywords::Dyn) &&
+ } else if self.check_keyword(kw::Dyn) &&
(self.span.rust_2018() ||
self.look_ahead(1, |t| t.can_begin_bound() &&
!can_continue_type_after_non_fn_ident(t))) {
}
fn parse_ptr(&mut self) -> PResult<'a, MutTy> {
- let mutbl = if self.eat_keyword(keywords::Mut) {
+ let mutbl = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
- } else if self.eat_keyword(keywords::Const) {
+ } else if self.eat_keyword(kw::Const) {
Mutability::Immutable
} else {
let span = self.prev_span;
_ => 0,
}
token::BinOp(token::And) | token::AndAnd => 1,
- _ if self.token.is_keyword(keywords::Mut) => 1,
+ _ if self.token.is_keyword(kw::Mut) => 1,
_ => 0,
};
}
match ty {
Ok(ty) => {
- let ident = Ident::new(keywords::Invalid.name(), self.prev_span);
+ let ident = Ident::new(kw::Invalid, self.prev_span);
let pat = P(Pat {
id: ast::DUMMY_NODE_ID,
node: PatKind::Ident(
fn parse_ident_or_underscore(&mut self) -> PResult<'a, ast::Ident> {
match self.token {
- token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
+ token::Ident(ident, false) if ident.name == kw::Underscore => {
let span = self.span;
self.bump();
Ok(Ident::new(ident.name, span))
// above). `path_span` has the span of that path, or an empty
// span in the case of something like `<T>::Bar`.
let (mut path, path_span);
- if self.eat_keyword(keywords::As) {
+ if self.eat_keyword(kw::As) {
let path_lo = self.span;
path = self.parse_path(PathStyle::Type)?;
path_span = path_lo.to(self.prev_span);
/// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
- if self.eat_keyword(keywords::Mut) {
+ if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
}
fn parse_field_name(&mut self) -> PResult<'a, Ident> {
- if let token::Literal(token::Integer(name), suffix) = self.token {
+ if let token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) = self.token {
self.expect_no_suffix(self.span, "a tuple index", suffix);
self.bump();
- Ok(Ident::new(name, self.prev_span))
+ Ok(Ident::new(symbol, self.prev_span))
} else {
self.parse_ident_common(false)
}
hi = path.span;
return Ok(self.mk_expr(lo.to(hi), ExprKind::Path(Some(qself), path), attrs));
}
- if self.span.rust_2018() && self.check_keyword(keywords::Async) {
+ if self.span.rust_2018() && self.check_keyword(kw::Async) {
return if self.is_async_block() { // check for `async {` and `async move {`
self.parse_async_block(attrs)
} else {
self.parse_lambda_expr(attrs)
};
}
- if self.check_keyword(keywords::Move) || self.check_keyword(keywords::Static) {
+ if self.check_keyword(kw::Move) || self.check_keyword(kw::Static) {
return self.parse_lambda_expr(attrs);
}
- if self.eat_keyword(keywords::If) {
+ if self.eat_keyword(kw::If) {
return self.parse_if_expr(attrs);
}
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
let lo = self.prev_span;
return self.parse_for_expr(None, lo, attrs);
}
- if self.eat_keyword(keywords::While) {
+ if self.eat_keyword(kw::While) {
let lo = self.prev_span;
return self.parse_while_expr(None, lo, attrs);
}
if let Some(label) = self.eat_label() {
let lo = label.ident.span;
self.expect(&token::Colon)?;
- if self.eat_keyword(keywords::While) {
+ if self.eat_keyword(kw::While) {
return self.parse_while_expr(Some(label), lo, attrs)
}
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
return self.parse_for_expr(Some(label), lo, attrs)
}
- if self.eat_keyword(keywords::Loop) {
+ if self.eat_keyword(kw::Loop) {
return self.parse_loop_expr(Some(label), lo, attrs)
}
if self.token == token::OpenDelim(token::Brace) {
err.span_label(self.span, msg);
return Err(err);
}
- if self.eat_keyword(keywords::Loop) {
+ if self.eat_keyword(kw::Loop) {
let lo = self.prev_span;
return self.parse_loop_expr(None, lo, attrs);
}
- if self.eat_keyword(keywords::Continue) {
+ if self.eat_keyword(kw::Continue) {
let label = self.eat_label();
let ex = ExprKind::Continue(label);
let hi = self.prev_span;
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
- if self.eat_keyword(keywords::Match) {
+ if self.eat_keyword(kw::Match) {
let match_sp = self.prev_span;
return self.parse_match_expr(attrs).map_err(|mut err| {
err.span_label(match_sp, "while parsing this match expression");
err
});
}
- if self.eat_keyword(keywords::Unsafe) {
+ if self.eat_keyword(kw::Unsafe) {
return self.parse_block_expr(
None,
lo,
}
if self.is_try_block() {
let lo = self.span;
- assert!(self.eat_keyword(keywords::Try));
+ assert!(self.eat_keyword(kw::Try));
return self.parse_try_block(lo, attrs);
}
- if self.eat_keyword(keywords::Return) {
+ if self.eat_keyword(kw::Return) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
} else {
ex = ExprKind::Ret(None);
}
- } else if self.eat_keyword(keywords::Break) {
+ } else if self.eat_keyword(kw::Break) {
let label = self.eat_label();
let e = if self.token.can_begin_expr()
&& !(self.token == token::OpenDelim(token::Brace)
};
ex = ExprKind::Break(label, e);
hi = self.prev_span;
- } else if self.eat_keyword(keywords::Yield) {
+ } else if self.eat_keyword(kw::Yield) {
if self.token.can_begin_expr() {
let e = self.parse_expr()?;
hi = e.span;
} else {
ex = ExprKind::Yield(None);
}
- } else if self.token.is_keyword(keywords::Let) {
+ } else if self.token.is_keyword(kw::Let) {
// Catch this syntax error here, instead of in `parse_ident`, so
// that we can explicitly mention that let is not to be used as an expression
let mut db = self.fatal("expected expression, found statement (`let`)");
db.span_label(self.span, "expected expression");
db.note("variable declaration using `let` is a statement");
return Err(db);
- } else if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
+ } else if self.span.rust_2018() && self.eat_keyword(kw::Await) {
let (await_hi, e_kind) = self.parse_await_macro_or_alt(lo, self.prev_span)?;
hi = await_hi;
ex = e_kind;
// Assuming we have just parsed `.`, continue parsing into an expression.
fn parse_dot_suffix(&mut self, self_arg: P<Expr>, lo: Span) -> PResult<'a, P<Expr>> {
- if self.span.rust_2018() && self.eat_keyword(keywords::Await) {
+ if self.span.rust_2018() && self.eat_keyword(kw::Await) {
let span = lo.to(self.prev_span);
let await_expr = self.mk_expr(
span,
token::Ident(..) => {
e = self.parse_dot_suffix(e, lo)?;
}
- token::Literal(token::Integer(name), suffix) => {
+ token::Literal(token::Lit { kind: token::Integer, symbol, suffix }) => {
let span = self.span;
self.bump();
- let field = ExprKind::Field(e, Ident::new(name, span));
+ let field = ExprKind::Field(e, Ident::new(symbol, span));
e = self.mk_expr(lo.to(span), field, ThinVec::new());
self.expect_no_suffix(span, "a tuple index", suffix);
}
- token::Literal(token::Float(n), _suf) => {
+ token::Literal(token::Lit { kind: token::Float, symbol, .. }) => {
self.bump();
- let fstr = n.as_str();
- let mut err = self.diagnostic()
- .struct_span_err(self.prev_span, &format!("unexpected token: `{}`", n));
+ let fstr = symbol.as_str();
+ let msg = format!("unexpected token: `{}`", symbol);
+ let mut err = self.diagnostic().struct_span_err(self.prev_span, &msg);
err.span_label(self.prev_span, "unexpected token");
if fstr.chars().all(|x| "0123456789.".contains(x)) {
let float = match fstr.parse::<f64>().ok() {
let (span, e) = self.interpolated_or_expr_span(e)?;
(lo.to(span), ExprKind::AddrOf(m, e))
}
- token::Ident(..) if self.token.is_keyword(keywords::In) => {
+ token::Ident(..) if self.token.is_keyword(kw::In) => {
self.bump();
let place = self.parse_expr_res(
Restrictions::NO_STRUCT_LITERAL,
let blk_expr = self.mk_expr(span, ExprKind::Block(blk, None), ThinVec::new());
(lo.to(span), ExprKind::ObsoleteInPlace(place, blk_expr))
}
- token::Ident(..) if self.token.is_keyword(keywords::Box) => {
+ token::Ident(..) if self.token.is_keyword(kw::Box) => {
self.bump();
let e = self.parse_prefix_expr(None);
let (span, e) = self.interpolated_or_expr_span(e)?;
/// Parses an `if` or `if let` expression (`if` token already eaten).
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- if self.check_keyword(keywords::Let) {
+ if self.check_keyword(kw::Let) {
return self.parse_if_let_expr(attrs);
}
let lo = self.prev_span;
// verify that the last statement is either an implicit return (no `;`) or an explicit
// return. This won't catch blocks with an explicit `return`, but that would be caught by
// the dead code lint.
- if self.eat_keyword(keywords::Else) || !cond.returns() {
+ if self.eat_keyword(kw::Else) || !cond.returns() {
let sp = self.sess.source_map().next_point(lo);
let mut err = self.diagnostic()
.struct_span_err(sp, "missing condition for `if` statemement");
})?;
let mut els: Option<P<Expr>> = None;
let mut hi = thn.span;
- if self.eat_keyword(keywords::Else) {
+ if self.eat_keyword(kw::Else) {
let elexpr = self.parse_else_expr()?;
hi = elexpr.span;
els = Some(elexpr);
fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let lo = self.prev_span;
- self.expect_keyword(keywords::Let)?;
+ self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
let thn = self.parse_block()?;
- let (hi, els) = if self.eat_keyword(keywords::Else) {
+ let (hi, els) = if self.eat_keyword(kw::Else) {
let expr = self.parse_else_expr()?;
(expr.span, Some(expr))
} else {
-> PResult<'a, P<Expr>>
{
let lo = self.span;
- let movability = if self.eat_keyword(keywords::Static) {
+ let movability = if self.eat_keyword(kw::Static) {
Movability::Static
} else {
Movability::Movable
} else {
IsAsync::NotAsync
};
- let capture_clause = if self.eat_keyword(keywords::Move) {
+ let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
// `else` token already eaten
fn parse_else_expr(&mut self) -> PResult<'a, P<Expr>> {
- if self.eat_keyword(keywords::If) {
+ if self.eat_keyword(kw::If) {
return self.parse_if_expr(ThinVec::new());
} else {
let blk = self.parse_block()?;
// Parse: `for <src_pat> in <src_expr> <src_loop_block>`
let pat = self.parse_top_level_pat()?;
- if !self.eat_keyword(keywords::In) {
+ if !self.eat_keyword(kw::In) {
let in_span = self.prev_span.between(self.span);
let mut err = self.sess.span_diagnostic
.struct_span_err(in_span, "missing `in` in `for` loop");
err.emit();
}
let in_span = self.prev_span;
- if self.eat_keyword(keywords::In) {
+ if self.eat_keyword(kw::In) {
// a common typo: `for _ in in bar {}`
let mut err = self.sess.span_diagnostic.struct_span_err(
self.prev_span,
fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- if self.token.is_keyword(keywords::Let) {
+ if self.token.is_keyword(kw::Let) {
return self.parse_while_let_expr(opt_label, span_lo, attrs);
}
let cond = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
- self.expect_keyword(keywords::Let)?;
+ self.expect_keyword(kw::Let)?;
let pats = self.parse_pats()?;
self.expect(&token::Eq)?;
let expr = self.parse_expr_res(Restrictions::NO_STRUCT_LITERAL, None)?;
-> PResult<'a, P<Expr>>
{
let span_lo = self.span;
- self.expect_keyword(keywords::Async)?;
- let capture_clause = if self.eat_keyword(keywords::Move) {
+ self.expect_keyword(kw::Async)?;
+ let capture_clause = if self.eat_keyword(kw::Move) {
CaptureBy::Value
} else {
CaptureBy::Ref
{
let (iattrs, body) = self.parse_inner_attrs_and_block()?;
attrs.extend(iattrs);
- if self.eat_keyword(keywords::Catch) {
+ if self.eat_keyword(kw::Catch) {
let mut error = self.struct_span_err(self.prev_span,
"keyword `catch` cannot follow a `try` block");
error.help("try using `match` on the result of the `try` block instead");
crate fn parse_arm(&mut self) -> PResult<'a, Arm> {
let attrs = self.parse_outer_attributes()?;
+ let lo = self.span;
let pats = self.parse_pats()?;
- let guard = if self.eat_keyword(keywords::If) {
+ let guard = if self.eat_keyword(kw::If) {
Some(Guard::If(self.parse_expr()?))
} else {
None
let require_comma = classify::expr_requires_semi_to_be_stmt(&expr)
&& self.token != token::CloseDelim(token::Brace);
+ let hi = self.span;
+
if require_comma {
let cm = self.sess.source_map();
self.expect_one_of(&[token::Comma], &[token::CloseDelim(token::Brace)])
pats,
guard,
body: expr,
+ span: lo.to(hi),
})
}
(pat, fieldname, false)
} else {
// Parsing a pattern of the form "(box) (ref) (mut) fieldname"
- let is_box = self.eat_keyword(keywords::Box);
+ let is_box = self.eat_keyword(kw::Box);
let boxed_span = self.span;
- let is_ref = self.eat_keyword(keywords::Ref);
- let is_mut = self.eat_keyword(keywords::Mut);
+ let is_ref = self.eat_keyword(kw::Ref);
+ let is_mut = self.eat_keyword(kw::Mut);
let fieldname = self.parse_ident()?;
hi = self.prev_span;
pat = PatKind::Slice(before, slice, after);
}
// At this point, token != &, &&, (, [
- _ => if self.eat_keyword(keywords::Underscore) {
+ _ => if self.eat_keyword(kw::Underscore) {
// Parse _
pat = PatKind::Wild;
- } else if self.eat_keyword(keywords::Mut) {
+ } else if self.eat_keyword(kw::Mut) {
// Parse mut ident @ pat / mut ref ident @ pat
let mutref_span = self.prev_span.to(self.span);
- let binding_mode = if self.eat_keyword(keywords::Ref) {
+ let binding_mode = if self.eat_keyword(kw::Ref) {
self.diagnostic()
.struct_span_err(mutref_span, "the order of `mut` and `ref` is incorrect")
.span_suggestion(
BindingMode::ByValue(Mutability::Mutable)
};
pat = self.parse_pat_ident(binding_mode)?;
- } else if self.eat_keyword(keywords::Ref) {
+ } else if self.eat_keyword(kw::Ref) {
// Parse ref ident @ pat / ref mut ident @ pat
let mutbl = self.parse_mutability();
pat = self.parse_pat_ident(BindingMode::ByRef(mutbl))?;
- } else if self.eat_keyword(keywords::Box) {
+ } else if self.eat_keyword(kw::Box) {
// Parse box pat
let subpat = self.parse_pat_with_range_pat(false, None)?;
pat = PatKind::Box(subpat);
}
fn is_async_block(&self) -> bool {
- self.token.is_keyword(keywords::Async) &&
+ self.token.is_keyword(kw::Async) &&
(
( // `async move {`
- self.look_ahead(1, |t| t.is_keyword(keywords::Move)) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Move)) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace))
) || ( // `async {`
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace))
}
fn is_async_fn(&self) -> bool {
- self.token.is_keyword(keywords::Async) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ self.token.is_keyword(kw::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Fn))
}
fn is_do_catch_block(&self) -> bool {
- self.token.is_keyword(keywords::Do) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Catch)) &&
+ self.token.is_keyword(kw::Do) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Catch)) &&
self.look_ahead(2, |t| *t == token::OpenDelim(token::Brace)) &&
!self.restrictions.contains(Restrictions::NO_STRUCT_LITERAL)
}
fn is_try_block(&self) -> bool {
- self.token.is_keyword(keywords::Try) &&
+ self.token.is_keyword(kw::Try) &&
self.look_ahead(1, |t| *t == token::OpenDelim(token::Brace)) &&
self.span.rust_2018() &&
// prevent `while try {} {}`, `if try {} {} else {}`, etc.
}
fn is_union_item(&self) -> bool {
- self.token.is_keyword(keywords::Union) &&
+ self.token.is_keyword(kw::Union) &&
self.look_ahead(1, |t| t.is_ident() && !t.is_reserved_ident())
}
fn is_crate_vis(&self) -> bool {
- self.token.is_keyword(keywords::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
+ self.token.is_keyword(kw::Crate) && self.look_ahead(1, |t| t != &token::ModSep)
}
fn is_existential_type_decl(&self) -> bool {
- self.token.is_keyword(keywords::Existential) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Type))
+ self.token.is_keyword(kw::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Type))
}
fn is_auto_trait_item(&self) -> bool {
// auto trait
- (self.token.is_keyword(keywords::Auto)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ (self.token.is_keyword(kw::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
|| // unsafe auto trait
- (self.token.is_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Auto)) &&
- self.look_ahead(2, |t| t.is_keyword(keywords::Trait)))
+ (self.token.is_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Auto)) &&
+ self.look_ahead(2, |t| t.is_keyword(kw::Trait)))
}
fn eat_macro_def(&mut self, attrs: &[Attribute], vis: &Visibility, lo: Span)
-> PResult<'a, Option<P<Item>>> {
let token_lo = self.span;
let (ident, def) = match self.token {
- token::Ident(ident, false) if ident.name == keywords::Macro.name() => {
+ token::Ident(ident, false) if ident.name == kw::Macro => {
self.bump();
let ident = self.parse_ident()?;
let tokens = if self.check(&token::OpenDelim(token::Brace)) {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
- Ok(Some(if self.eat_keyword(keywords::Let) {
+ Ok(Some(if self.eat_keyword(kw::Let) {
Stmt {
id: ast::DUMMY_NODE_ID,
node: StmtKind::Local(self.parse_local(attrs.into())?),
// it's a macro invocation
let id = match self.token {
- token::OpenDelim(_) => keywords::Invalid.ident(), // no special identifier
+ token::OpenDelim(_) => Ident::invalid(), // no special identifier
_ => self.parse_ident()?,
};
_ => {
// we only expect an ident if we didn't parse one
// above.
- let ident_str = if id.name == keywords::Invalid.name() {
+ let ident_str = if id.name == kw::Invalid {
"identifier, "
} else {
""
MacStmtStyle::NoBraces
};
- if id.name == keywords::Invalid.name() {
+ if id.name == kw::Invalid {
let mac = respan(lo.to(hi), Mac_ { path: pth, tts, delim });
let node = if delim == MacDelimiter::Brace ||
self.token == token::Semi || self.token == token::Eof {
let tok = self.this_token_descr();
let mut e = self.span_fatal(sp, &format!("expected `{{`, found {}", tok));
let do_not_suggest_help =
- self.token.is_keyword(keywords::In) || self.token == token::Colon;
+ self.token.is_keyword(kw::In) || self.token == token::Colon;
if self.token.is_ident_named("and") {
e.span_suggestion_short(
let is_bound_start = self.check_path() || self.check_lifetime() ||
self.check(&token::Not) || // used for error reporting only
self.check(&token::Question) ||
- self.check_keyword(keywords::For) ||
+ self.check_keyword(kw::For) ||
self.check(&token::OpenDelim(token::Paren));
if is_bound_start {
let lo = self.span;
}
fn parse_const_param(&mut self, preceding_attrs: Vec<Attribute>) -> PResult<'a, GenericParam> {
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let ident = self.parse_ident()?;
self.expect(&token::Colon)?;
let ty = self.parse_ty()?;
bounds,
kind: ast::GenericParamKind::Lifetime,
});
- } else if self.check_keyword(keywords::Const) {
+ } else if self.check_keyword(kw::Const) {
// Parse const parameter.
params.push(self.parse_const_param(attrs)?);
} else if self.check_ident() {
span: syntax_pos::DUMMY_SP,
};
- if !self.eat_keyword(keywords::Where) {
+ if !self.eat_keyword(kw::Where) {
return Ok(where_clause);
}
let lo = self.prev_span;
_ => unreachable!()
};
let isolated_self = |this: &mut Self, n| {
- this.look_ahead(n, |t| t.is_keyword(keywords::SelfLower)) &&
+ this.look_ahead(n, |t| t.is_keyword(kw::SelfLower)) &&
this.look_ahead(n + 1, |t| t != &token::ModSep)
};
(if isolated_self(self, 1) {
self.bump();
SelfKind::Region(None, Mutability::Immutable)
- } else if self.look_ahead(1, |t| t.is_keyword(keywords::Mut)) &&
+ } else if self.look_ahead(1, |t| t.is_keyword(kw::Mut)) &&
isolated_self(self, 2) {
self.bump();
self.bump();
let lt = self.expect_lifetime();
SelfKind::Region(Some(lt), Mutability::Immutable)
} else if self.look_ahead(1, |t| t.is_lifetime()) &&
- self.look_ahead(2, |t| t.is_keyword(keywords::Mut)) &&
+ self.look_ahead(2, |t| t.is_keyword(kw::Mut)) &&
isolated_self(self, 3) {
self.bump();
let lt = self.expect_lifetime();
} else {
SelfKind::Value(Mutability::Immutable)
}, eself_ident, eself_hi)
- } else if self.token.is_keyword(keywords::Mut) &&
+ } else if self.token.is_keyword(kw::Mut) &&
isolated_self(self, 1) {
// mut self
// mut self: TYPE
/// Returns `true` if we are looking at `const ID`
/// (returns `false` for things like `const fn`, etc.).
fn is_const_item(&self) -> bool {
- self.token.is_keyword(keywords::Const) &&
- !self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
- !self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
+ self.token.is_keyword(kw::Const) &&
+ !self.look_ahead(1, |t| t.is_keyword(kw::Fn)) &&
+ !self.look_ahead(1, |t| t.is_keyword(kw::Unsafe))
}
/// Parses all the "front matter" for a `fn` declaration, up to
Abi
)>
{
- let is_const_fn = self.eat_keyword(keywords::Const);
+ let is_const_fn = self.eat_keyword(kw::Const);
let const_span = self.prev_span;
let unsafety = self.parse_unsafety();
let asyncness = self.parse_asyncness();
let (constness, unsafety, abi) = if is_const_fn {
(respan(const_span, Constness::Const), unsafety, Abi::Rust)
} else {
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
(respan(self.prev_span, Constness::NotConst), unsafety, abi)
};
- if !self.eat_keyword(keywords::Fn) {
+ if !self.eat_keyword(kw::Fn) {
// It is possible for `expect_one_of` to recover given the contents of
// `self.expected_tokens`, therefore, do not use `self.unexpected()` which doesn't
// account for this.
} else if self.is_const_item() {
// This parses the grammar:
// ImplItemConst = "const" Ident ":" Ty "=" Expr ";"
- self.expect_keyword(keywords::Const)?;
+ self.expect_keyword(kw::Const)?;
let name = self.parse_ident()?;
self.expect(&token::Colon)?;
let typ = self.parse_ty()?;
// code copied from parse_macro_use_or_failure... abstraction!
if let Some(mac) = self.parse_assoc_macro_invoc("impl", Some(vis), at_end)? {
// method macro
- Ok((keywords::Invalid.ident(), vec![], ast::Generics::default(),
+ Ok((Ident::invalid(), vec![], ast::Generics::default(),
ast::ImplItemKind::Macro(mac)))
} else {
let (constness, unsafety, mut asyncness, abi) = self.parse_fn_front_matter()?;
self.look_ahead(1, |t| t.is_lifetime() || t.is_ident()) &&
self.look_ahead(2, |t| t == &token::Gt || t == &token::Comma ||
t == &token::Colon || t == &token::Eq) ||
- self.look_ahead(1, |t| t.is_keyword(keywords::Const)))
+ self.look_ahead(1, |t| t.is_keyword(kw::Const)))
}
fn parse_impl_body(&mut self) -> PResult<'a, (Vec<ImplItem>, Vec<Attribute>)> {
};
// Parse both types and traits as a type, then reinterpret if necessary.
- let err_path = |span| ast::Path::from_ident(Ident::new(keywords::Invalid.name(), span));
- let ty_first = if self.token.is_keyword(keywords::For) &&
+ let err_path = |span| ast::Path::from_ident(Ident::new(kw::Invalid, span));
+ let ty_first = if self.token.is_keyword(kw::For) &&
self.look_ahead(1, |t| t != &token::Lt) {
let span = self.prev_span.between(self.span);
self.struct_span_err(span, "missing trait in a trait impl").emit();
};
// If `for` is missing we try to recover.
- let has_for = self.eat_keyword(keywords::For);
+ let has_for = self.eat_keyword(kw::For);
let missing_for_span = self.prev_span.between(self.span);
let ty_second = if self.token == token::DotDot {
}
};
- Ok((keywords::Invalid.ident(), item_kind, Some(attrs)))
+ Ok((Ident::invalid(), item_kind, Some(attrs)))
}
fn parse_late_bound_lifetime_defs(&mut self) -> PResult<'a, Vec<GenericParam>> {
- if self.eat_keyword(keywords::For) {
+ if self.eat_keyword(kw::For) {
self.expect_lt()?;
let params = self.parse_generic_params()?;
self.expect_gt()?;
// Otherwise if we look ahead and see a paren we parse a tuple-style
// struct.
- let vdata = if self.token.is_keyword(keywords::Where) {
+ let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
if self.eat(&token::Semi) {
// If we see a: `struct Foo<T> where T: Copy;` style decl.
let mut generics = self.parse_generics()?;
- let vdata = if self.token.is_keyword(keywords::Where) {
+ let vdata = if self.token.is_keyword(kw::Where) {
generics.where_clause = self.parse_where_clause()?;
let (fields, recovered) = self.parse_record_struct_body()?;
VariantData::Struct(fields, recovered)
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x);
- self.expected_tokens.push(TokenType::Keyword(keywords::Crate));
+ self.expected_tokens.push(TokenType::Keyword(kw::Crate));
if self.is_crate_vis() {
self.bump(); // `crate`
return Ok(respan(self.prev_span, VisibilityKind::Crate(CrateSugar::JustCrate)));
}
- if !self.eat_keyword(keywords::Pub) {
+ if !self.eat_keyword(kw::Pub) {
// We need a span for our `Spanned<VisibilityKind>`, but there's inherently no
// keyword to grab a span from for inherited visibility; an empty span at the
// beginning of the current token would seem to be the "Schelling span".
// `()` or a tuple might be allowed. For example, `struct Struct(pub (), pub (usize));`.
// Because of this, we only `bump` the `(` if we're assured it is appropriate to do so
// by the following tokens.
- if self.look_ahead(1, |t| t.is_keyword(keywords::Crate)) &&
+ if self.look_ahead(1, |t| t.is_keyword(kw::Crate)) &&
self.look_ahead(2, |t| t != &token::ModSep) // account for `pub(crate::foo)`
{
// `pub(crate)`
VisibilityKind::Crate(CrateSugar::PubCrate),
);
return Ok(vis)
- } else if self.look_ahead(1, |t| t.is_keyword(keywords::In)) {
+ } else if self.look_ahead(1, |t| t.is_keyword(kw::In)) {
// `pub(in path)`
self.bump(); // `(`
self.bump(); // `in`
});
return Ok(vis)
} else if self.look_ahead(2, |t| t == &token::CloseDelim(token::Paren)) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Super) ||
- t.is_keyword(keywords::SelfLower))
+ self.look_ahead(1, |t| t.is_keyword(kw::Super) ||
+ t.is_keyword(kw::SelfLower))
{
// `pub(self)` or `pub(super)`
self.bump(); // `(`
/// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// `pub` is included for better error messages
- if self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl) ||
- t.is_keyword(keywords::Const) ||
- t.is_keyword(keywords::Fn) ||
- t.is_keyword(keywords::Unsafe) ||
- t.is_keyword(keywords::Extern) ||
- t.is_keyword(keywords::Type) ||
- t.is_keyword(keywords::Pub)) {
+ if self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl) ||
+ t.is_keyword(kw::Const) ||
+ t.is_keyword(kw::Fn) ||
+ t.is_keyword(kw::Unsafe) ||
+ t.is_keyword(kw::Extern) ||
+ t.is_keyword(kw::Type) ||
+ t.is_keyword(kw::Pub)) {
self.bump(); // `default`
Defaultness::Default
} else {
/// Parses a function declaration from a foreign module.
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let (ident, mut generics) = self.parse_fn_header()?;
let decl = self.parse_fn_decl(true)?;
/// Parses a type from a foreign module.
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
- self.expect_keyword(keywords::Type)?;
+ self.expect_keyword(kw::Type)?;
let ident = self.parse_ident()?;
let hi = self.span;
let error_msg = "crate name using dashes are not valid in `extern crate` statements";
let suggestion_msg = "if the original crate name uses dashes you need to use underscores \
in the code";
- let mut ident = if self.token.is_keyword(keywords::SelfLower) {
+ let mut ident = if self.token.is_keyword(kw::SelfLower) {
self.parse_path_segment_ident()
} else {
self.parse_ident()
abi,
items: foreign_items
};
- let invalid = keywords::Invalid.ident();
+ let invalid = Ident::invalid();
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
}
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
// This parses the grammar:
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
- if self.check_keyword(keywords::Type) ||
- self.check_keyword(keywords::Existential) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Type)) {
- let existential = self.eat_keyword(keywords::Existential);
- assert!(self.eat_keyword(keywords::Type));
+ if self.check_keyword(kw::Type) ||
+ self.check_keyword(kw::Existential) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Type)) {
+ let existential = self.eat_keyword(kw::Existential);
+ assert!(self.eat_keyword(kw::Type));
Some(self.parse_existential_or_alias(existential))
} else {
None
/// the `extern` keyword, if one is found.
fn parse_opt_abi(&mut self) -> PResult<'a, Option<Abi>> {
match self.token {
- token::Literal(token::Str_(s), suf) | token::Literal(token::StrRaw(s, _), suf) => {
+ token::Literal(token::Lit { kind: token::Str, symbol, suffix }) |
+ token::Literal(token::Lit { kind: token::StrRaw(..), symbol, suffix }) => {
let sp = self.span;
- self.expect_no_suffix(sp, "an ABI spec", suf);
+ self.expect_no_suffix(sp, "an ABI spec", suffix);
self.bump();
- match abi::lookup(&s.as_str()) {
+ match abi::lookup(&symbol.as_str()) {
Some(abi) => Ok(Some(abi)),
None => {
let prev_span = self.prev_span;
prev_span,
E0703,
"invalid ABI: found `{}`",
- s);
+ symbol);
err.span_label(prev_span, "invalid ABI");
err.help(&format!("valid ABIs: {}", abi::all_names().join(", ")));
err.emit();
}
fn is_static_global(&mut self) -> bool {
- if self.check_keyword(keywords::Static) {
+ if self.check_keyword(kw::Static) {
// Check if this could be a closure
!self.look_ahead(1, |token| {
- if token.is_keyword(keywords::Move) {
+ if token.is_keyword(kw::Move) {
return true;
}
match *token {
let visibility = self.parse_visibility(false)?;
- if self.eat_keyword(keywords::Use) {
+ if self.eat_keyword(kw::Use) {
// USE ITEM
let item_ = ItemKind::Use(P(self.parse_use_tree()?));
self.expect(&token::Semi)?;
let span = lo.to(self.prev_span);
- let item = self.mk_item(span, keywords::Invalid.ident(), item_, visibility, attrs);
+ let item =
+ self.mk_item(span, Ident::invalid(), item_, visibility, attrs);
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Extern) {
- if self.eat_keyword(keywords::Crate) {
+ if self.eat_keyword(kw::Extern) {
+ if self.eat_keyword(kw::Crate) {
return Ok(Some(self.parse_item_extern_crate(lo, visibility, attrs)?));
}
let opt_abi = self.parse_opt_abi()?;
- if self.eat_keyword(keywords::Fn) {
+ if self.eat_keyword(kw::Fn) {
// EXTERN FUNCTION ITEM
let fn_span = self.prev_span;
let abi = opt_abi.unwrap_or(Abi::C);
if self.is_static_global() {
self.bump();
// STATIC ITEM
- let m = if self.eat_keyword(keywords::Mut) {
+ let m = if self.eat_keyword(kw::Mut) {
Mutability::Mutable
} else {
Mutability::Immutable
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Const) {
+ if self.eat_keyword(kw::Const) {
let const_span = self.prev_span;
- if self.check_keyword(keywords::Fn)
- || (self.check_keyword(keywords::Unsafe)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Fn))) {
+ if self.check_keyword(kw::Fn)
+ || (self.check_keyword(kw::Unsafe)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Fn))) {
// CONST FUNCTION ITEM
let unsafety = self.parse_unsafety();
self.bump();
}
// CONST ITEM
- if self.eat_keyword(keywords::Mut) {
+ if self.eat_keyword(kw::Mut) {
let prev_span = self.prev_span;
let mut err = self.diagnostic()
.struct_span_err(prev_span, "const globals cannot be mutable");
// `unsafe async fn` or `async fn`
if (
- self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Async))
+ self.check_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Async))
) || (
- self.check_keyword(keywords::Async) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Fn))
+ self.check_keyword(kw::Async) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Fn))
)
{
// ASYNC FUNCTION ITEM
let unsafety = self.parse_unsafety();
- self.expect_keyword(keywords::Async)?;
+ self.expect_keyword(kw::Async)?;
let async_span = self.prev_span;
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(unsafety,
}
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe) &&
- (self.look_ahead(1, |t| t.is_keyword(keywords::Trait)) ||
- self.look_ahead(1, |t| t.is_keyword(keywords::Auto)))
+ if self.check_keyword(kw::Unsafe) &&
+ (self.look_ahead(1, |t| t.is_keyword(kw::Trait)) ||
+ self.look_ahead(1, |t| t.is_keyword(kw::Auto)))
{
// UNSAFE TRAIT ITEM
self.bump(); // `unsafe`
- let is_auto = if self.eat_keyword(keywords::Trait) {
+ let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
- self.expect_keyword(keywords::Auto)?;
- self.expect_keyword(keywords::Trait)?;
+ self.expect_keyword(kw::Auto)?;
+ self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
let (ident, item_, extra_attrs) =
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Impl) ||
- self.check_keyword(keywords::Unsafe) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
- self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Impl)) ||
- self.check_keyword(keywords::Default) &&
- self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe)) {
+ if self.check_keyword(kw::Impl) ||
+ self.check_keyword(kw::Unsafe) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
+ self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Impl)) ||
+ self.check_keyword(kw::Default) &&
+ self.look_ahead(1, |t| t.is_keyword(kw::Unsafe)) {
// IMPL ITEM
let defaultness = self.parse_defaultness();
let unsafety = self.parse_unsafety();
- self.expect_keyword(keywords::Impl)?;
+ self.expect_keyword(kw::Impl)?;
let (ident, item, extra_attrs) = self.parse_item_impl(unsafety, defaultness)?;
let span = lo.to(self.prev_span);
return Ok(Some(self.mk_item(span, ident, item, visibility,
maybe_append(attrs, extra_attrs))));
}
- if self.check_keyword(keywords::Fn) {
+ if self.check_keyword(kw::Fn) {
// FUNCTION ITEM
self.bump();
let fn_span = self.prev_span;
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Unsafe)
+ if self.check_keyword(kw::Unsafe)
&& self.look_ahead(1, |t| *t != token::OpenDelim(token::Brace)) {
// UNSAFE FUNCTION ITEM
self.bump(); // `unsafe`
// `{` is also expected after `unsafe`, in case of error, include it in the diagnostic
self.check(&token::OpenDelim(token::Brace));
- let abi = if self.eat_keyword(keywords::Extern) {
+ let abi = if self.eat_keyword(kw::Extern) {
self.parse_opt_abi()?.unwrap_or(Abi::C)
} else {
Abi::Rust
};
- self.expect_keyword(keywords::Fn)?;
+ self.expect_keyword(kw::Fn)?;
let fn_span = self.prev_span;
let (ident, item_, extra_attrs) =
self.parse_item_fn(Unsafety::Unsafe,
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Mod) {
+ if self.eat_keyword(kw::Mod) {
// MODULE ITEM
let (ident, item_, extra_attrs) =
self.parse_item_mod(&attrs[..])?;
attrs);
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Enum) {
+ if self.eat_keyword(kw::Enum) {
// ENUM ITEM
let (ident, item_, extra_attrs) = self.parse_item_enum()?;
let prev_span = self.prev_span;
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.check_keyword(keywords::Trait)
- || (self.check_keyword(keywords::Auto)
- && self.look_ahead(1, |t| t.is_keyword(keywords::Trait)))
+ if self.check_keyword(kw::Trait)
+ || (self.check_keyword(kw::Auto)
+ && self.look_ahead(1, |t| t.is_keyword(kw::Trait)))
{
- let is_auto = if self.eat_keyword(keywords::Trait) {
+ let is_auto = if self.eat_keyword(kw::Trait) {
IsAuto::No
} else {
- self.expect_keyword(keywords::Auto)?;
- self.expect_keyword(keywords::Trait)?;
+ self.expect_keyword(kw::Auto)?;
+ self.expect_keyword(kw::Trait)?;
IsAuto::Yes
};
// TRAIT ITEM
maybe_append(attrs, extra_attrs));
return Ok(Some(item));
}
- if self.eat_keyword(keywords::Struct) {
+ if self.eat_keyword(kw::Struct) {
// STRUCT ITEM
let (ident, item_, extra_attrs) = self.parse_item_struct()?;
let prev_span = self.prev_span;
// FOREIGN STATIC ITEM
// Treat `const` as `static` for error recovery, but don't add it to expected tokens.
- if self.check_keyword(keywords::Static) || self.token.is_keyword(keywords::Const) {
- if self.token.is_keyword(keywords::Const) {
+ if self.check_keyword(kw::Static) || self.token.is_keyword(kw::Const) {
+ if self.token.is_keyword(kw::Const) {
self.diagnostic()
.struct_span_err(self.span, "extern items cannot be `const`")
.span_suggestion(
return Ok(self.parse_item_foreign_static(visibility, lo, attrs)?);
}
// FOREIGN FUNCTION ITEM
- if self.check_keyword(keywords::Fn) {
+ if self.check_keyword(kw::Fn) {
return Ok(self.parse_item_foreign_fn(visibility, lo, attrs)?);
}
// FOREIGN TYPE ITEM
- if self.check_keyword(keywords::Type) {
+ if self.check_keyword(kw::Type) {
return Ok(self.parse_item_foreign_type(visibility, lo, attrs)?);
}
Some(mac) => {
Ok(
ForeignItem {
- ident: keywords::Invalid.ident(),
+ ident: Ident::invalid(),
span: lo.to(self.prev_span),
id: ast::DUMMY_NODE_ID,
attrs,
let id = if self.token.is_ident() {
self.parse_ident()?
} else {
- keywords::Invalid.ident() // no special identifier
+ Ident::invalid() // no special identifier
};
// eat a matched-delimiter token tree:
let (delim, tts) = self.expect_delimited_token_tree()?;
}
fn parse_rename(&mut self) -> PResult<'a, Option<Ident>> {
- if self.eat_keyword(keywords::As) {
+ if self.eat_keyword(kw::As) {
self.parse_ident_or_underscore().map(Some)
} else {
Ok(None)
pub fn parse_optional_str(&mut self) -> Option<(Symbol, ast::StrStyle, Option<ast::Name>)> {
let ret = match self.token {
- token::Literal(token::Str_(s), suf) => (s, ast::StrStyle::Cooked, suf),
- token::Literal(token::StrRaw(s, n), suf) => (s, ast::StrStyle::Raw(n), suf),
+ token::Literal(token::Lit { kind: token::Str, symbol, suffix }) =>
+ (symbol, ast::StrStyle::Cooked, suffix),
+ token::Literal(token::Lit { kind: token::StrRaw(n), symbol, suffix }) =>
+ (symbol, ast::StrStyle::Raw(n), suffix),
_ => return None
};
self.bump();
pub use BinOpToken::*;
pub use Nonterminal::*;
pub use DelimToken::*;
-pub use Lit::*;
+pub use LitKind::*;
pub use Token::*;
use crate::ast::{self};
use crate::parse::ParseSess;
use crate::print::pprust;
use crate::ptr::P;
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::syntax::parse::parse_stream_from_source_str;
use crate::tokenstream::{self, DelimSpan, TokenStream, TokenTree};
}
}
-#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
-pub enum Lit {
- Bool(ast::Name), // AST only, must never appear in a `Token`
- Byte(ast::Name),
- Char(ast::Name),
- Err(ast::Name),
- Integer(ast::Name),
- Float(ast::Name),
- Str_(ast::Name),
- StrRaw(ast::Name, u16), /* raw str delimited by n hash symbols */
- ByteStr(ast::Name),
- ByteStrRaw(ast::Name, u16), /* raw byte str delimited by n hash symbols */
+#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub enum LitKind {
+ Bool, // AST only, must never appear in a `Token`
+ Byte,
+ Char,
+ Integer,
+ Float,
+ Str,
+ StrRaw(u16), // raw string delimited by `n` hash symbols
+ ByteStr,
+ ByteStrRaw(u16), // raw byte string delimited by `n` hash symbols
+ Err,
}
-#[cfg(target_arch = "x86_64")]
-static_assert_size!(Lit, 8);
+/// A literal token.
+#[derive(Clone, Copy, PartialEq, RustcEncodable, RustcDecodable, Debug)]
+pub struct Lit {
+ pub kind: LitKind,
+ pub symbol: Symbol,
+ pub suffix: Option<Symbol>,
+}
-impl Lit {
- crate fn literal_name(&self) -> &'static str {
- match *self {
- Bool(_) => panic!("literal token contains `Lit::Bool`"),
- Byte(_) => "byte literal",
- Char(_) => "char literal",
- Err(_) => "invalid literal",
- Integer(_) => "integer literal",
- Float(_) => "float literal",
- Str_(_) | StrRaw(..) => "string literal",
- ByteStr(_) | ByteStrRaw(..) => "byte string literal"
+impl LitKind {
+ /// An English article for the literal token kind.
+ crate fn article(self) -> &'static str {
+ match self {
+ Integer | Err => "an",
+ _ => "a",
}
}
- crate fn may_have_suffix(&self) -> bool {
- match *self {
- Integer(..) | Float(..) => true,
+ crate fn descr(self) -> &'static str {
+ match self {
+ Bool => panic!("literal token contains `Lit::Bool`"),
+ Byte => "byte",
+ Char => "char",
+ Integer => "integer",
+ Float => "float",
+ Str | StrRaw(..) => "string",
+ ByteStr | ByteStrRaw(..) => "byte string",
+ Err => "error",
+ }
+ }
+
+ crate fn may_have_suffix(self) -> bool {
+ match self {
+ Integer | Float | Err => true,
_ => false,
}
}
+}
- // See comments in `Nonterminal::to_tokenstream` for why we care about
- // *probably* equal here rather than actual equality
- fn probably_equal_for_proc_macro(&self, other: &Lit) -> bool {
- mem::discriminant(self) == mem::discriminant(other)
+impl Lit {
+ pub fn new(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Lit {
+ Lit { kind, symbol, suffix }
}
}
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
[
- keywords::Async.name(),
+ kw::Async,
// FIXME: remove when `await!(..)` syntax is removed
// https://github.com/rust-lang/rust/issues/60610
- keywords::Await.name(),
-
- keywords::Do.name(),
- keywords::Box.name(),
- keywords::Break.name(),
- keywords::Continue.name(),
- keywords::False.name(),
- keywords::For.name(),
- keywords::If.name(),
- keywords::Loop.name(),
- keywords::Match.name(),
- keywords::Move.name(),
- keywords::Return.name(),
- keywords::True.name(),
- keywords::Unsafe.name(),
- keywords::While.name(),
- keywords::Yield.name(),
- keywords::Static.name(),
+ kw::Await,
+
+ kw::Do,
+ kw::Box,
+ kw::Break,
+ kw::Continue,
+ kw::False,
+ kw::For,
+ kw::If,
+ kw::Loop,
+ kw::Match,
+ kw::Move,
+ kw::Return,
+ kw::True,
+ kw::Unsafe,
+ kw::While,
+ kw::Yield,
+ kw::Static,
].contains(&ident.name)
}
!ident_token.is_reserved_ident() ||
ident_token.is_path_segment_keyword() ||
[
- keywords::Underscore.name(),
- keywords::For.name(),
- keywords::Impl.name(),
- keywords::Fn.name(),
- keywords::Unsafe.name(),
- keywords::Extern.name(),
- keywords::Typeof.name(),
- keywords::Dyn.name(),
+ kw::Underscore,
+ kw::For,
+ kw::Impl,
+ kw::Fn,
+ kw::Unsafe,
+ kw::Extern,
+ kw::Typeof,
+ kw::Dyn,
].contains(&ident.name)
}
CloseDelim(DelimToken),
/* Literals */
- Literal(Lit, Option<ast::Name>),
+ Literal(Lit),
/* Name components */
Ident(ast::Ident, /* is_raw */ bool),
/// Returns `true` if the token can appear at the start of a generic bound.
crate fn can_begin_bound(&self) -> bool {
- self.is_path_start() || self.is_lifetime() || self.is_keyword(keywords::For) ||
+ self.is_path_start() || self.is_lifetime() || self.is_keyword(kw::For) ||
self == &Question || self == &OpenDelim(Paren)
}
+ pub fn lit(kind: LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Token {
+ Literal(Lit::new(kind, symbol, suffix))
+ }
+
/// Returns `true` if the token is any literal
crate fn is_lit(&self) -> bool {
match *self {
}
}
+ crate fn expect_lit(&self) -> Lit {
+ match *self {
+ Literal(lit) => lit,
+ _=> panic!("`expect_lit` called on non-literal"),
+ }
+ }
+
/// Returns `true` if the token is any literal, a minus (which can prefix a literal,
/// for example a '-42', or one of the boolean idents).
crate fn can_begin_literal_or_bool(&self) -> bool {
match *self {
Literal(..) => true,
BinOp(Minus) => true,
- Ident(ident, false) if ident.name == keywords::True.name() => true,
- Ident(ident, false) if ident.name == keywords::False.name() => true,
+ Ident(ident, false) if ident.name == kw::True => true,
+ Ident(ident, false) if ident.name == kw::False => true,
Interpolated(ref nt) => match **nt {
NtLiteral(..) => true,
_ => false,
/// Returns `true` if the token is either the `mut` or `const` keyword.
crate fn is_mutability(&self) -> bool {
- self.is_keyword(keywords::Mut) ||
- self.is_keyword(keywords::Const)
+ self.is_keyword(kw::Mut) ||
+ self.is_keyword(kw::Const)
}
crate fn is_qpath_start(&self) -> bool {
}
/// Returns `true` if the token is a given keyword, `kw`.
- pub fn is_keyword(&self, kw: keywords::Keyword) -> bool {
- self.ident().map(|(ident, is_raw)| ident.name == kw.name() && !is_raw).unwrap_or(false)
+ pub fn is_keyword(&self, kw: Symbol) -> bool {
+ self.ident().map(|(ident, is_raw)| ident.name == kw && !is_raw).unwrap_or(false)
}
pub fn is_path_segment_keyword(&self) -> bool {
(&DocComment(a), &DocComment(b)) |
(&Shebang(a), &Shebang(b)) => a == b,
+ (&Literal(a), &Literal(b)) => a == b,
+
(&Lifetime(a), &Lifetime(b)) => a.name == b.name,
(&Ident(a, b), &Ident(c, d)) => b == d && (a.name == c.name ||
- a.name == keywords::DollarCrate.name() ||
- c.name == keywords::DollarCrate.name()),
-
- (&Literal(ref a, b), &Literal(ref c, d)) => {
- b == d && a.probably_equal_for_proc_macro(c)
- }
+ a.name == kw::DollarCrate ||
+ c.name == kw::DollarCrate),
(&Interpolated(_), &Interpolated(_)) => false,
use crate::print::pp::Breaks::{Consistent, Inconsistent};
use crate::ptr::P;
use crate::std_inject;
-use crate::symbol::{keywords, sym};
+use crate::symbol::{kw, sym};
use crate::tokenstream::{self, TokenStream, TokenTree};
use rustc_target::spec::abi::{self, Abi};
}
}
-pub fn literal_to_string(lit: token::Lit, suffix: Option<ast::Name>) -> String {
- let mut out = match lit {
- token::Byte(b) => format!("b'{}'", b),
- token::Char(c) => format!("'{}'", c),
- token::Err(c) => format!("'{}'", c),
- token::Bool(c) |
- token::Float(c) |
- token::Integer(c) => c.to_string(),
- token::Str_(s) => format!("\"{}\"", s),
- token::StrRaw(s, n) => format!("r{delim}\"{string}\"{delim}",
- delim="#".repeat(n as usize),
- string=s),
- token::ByteStr(v) => format!("b\"{}\"", v),
- token::ByteStrRaw(s, n) => format!("br{delim}\"{string}\"{delim}",
- delim="#".repeat(n as usize),
- string=s),
+pub fn literal_to_string(lit: token::Lit) -> String {
+ let token::Lit { kind, symbol, suffix } = lit;
+ let mut out = match kind {
+ token::Byte => format!("b'{}'", symbol),
+ token::Char => format!("'{}'", symbol),
+ token::Bool |
+ token::Float |
+ token::Integer => symbol.to_string(),
+ token::Str => format!("\"{}\"", symbol),
+ token::StrRaw(n) => format!("r{delim}\"{string}\"{delim}",
+ delim="#".repeat(n as usize),
+ string=symbol),
+ token::ByteStr => format!("b\"{}\"", symbol),
+ token::ByteStrRaw(n) => format!("br{delim}\"{string}\"{delim}",
+ delim="#".repeat(n as usize),
+ string=symbol),
+ token::Err => format!("'{}'", symbol),
};
if let Some(suffix) = suffix {
token::SingleQuote => "'".to_string(),
/* Literals */
- token::Literal(lit, suf) => literal_to_string(lit, suf),
+ token::Literal(lit) => literal_to_string(lit),
/* Name components */
token::Ident(s, false) => s.to_string(),
fn print_literal(&mut self, lit: &ast::Lit) -> io::Result<()> {
self.maybe_print_comment(lit.span.lo())?;
- self.writer().word(literal_to_string(lit.token, lit.suffix))
+ self.writer().word(literal_to_string(lit.token))
}
fn print_string(&mut self, st: &str,
if i > 0 {
self.writer().word("::")?
}
- if segment.ident.name != keywords::PathRoot.name() {
- if segment.ident.name == keywords::DollarCrate.name() {
+ if segment.ident.name != kw::PathRoot {
+ if segment.ident.name == kw::DollarCrate {
self.print_dollar_crate(segment.ident)?;
} else {
self.writer().word(segment.ident.as_str().to_string())?;
self.s.word(";")?;
}
ast::ItemKind::Mac(ref mac) => {
- if item.ident.name == keywords::Invalid.name() {
+ if item.ident.name == kw::Invalid {
self.print_mac(mac)?;
match mac.node.delim {
MacDelimiter::Brace => {}
colons_before_params: bool)
-> io::Result<()>
{
- if segment.ident.name != keywords::PathRoot.name() {
- if segment.ident.name == keywords::DollarCrate.name() {
+ if segment.ident.name != kw::PathRoot {
+ if segment.ident.name == kw::DollarCrate {
self.print_dollar_crate(segment.ident)?;
} else {
self.print_ident(segment.ident)?;
self.print_explicit_self(&eself)?;
} else {
let invalid = if let PatKind::Ident(_, ident, _) = input.pat.node {
- ident.name == keywords::Invalid.name()
+ ident.name == kw::Invalid
} else {
false
};
use crate::attr;
use crate::edition::Edition;
use crate::ext::hygiene::{Mark, SyntaxContext};
-use crate::symbol::{Ident, Symbol, keywords, sym};
+use crate::symbol::{Ident, Symbol, kw, sym};
use crate::source_map::{ExpnInfo, MacroAttribute, dummy_spanned, respan};
use crate::ptr::P;
use crate::tokenstream::TokenStream;
vis: respan(span.shrink_to_lo(), ast::VisibilityKind::Inherited),
node: ast::ItemKind::Use(P(ast::UseTree {
prefix: ast::Path {
- segments: iter::once(keywords::PathRoot.ident())
+ segments: iter::once(ast::Ident::with_empty_ctxt(kw::PathRoot))
.chain(
[name, "prelude", "v1"].iter().cloned()
.map(ast::Ident::from_str)
span,
})),
id: ast::DUMMY_NODE_ID,
- ident: keywords::Invalid.ident(),
+ ident: ast::Ident::invalid(),
span,
tokens: None,
}));
use crate::print::pprust;
use crate::ast::{self, Ident};
use crate::ptr::P;
-use crate::symbol::{self, Symbol, keywords, sym};
+use crate::symbol::{self, Symbol, kw, sym};
use crate::ThinVec;
struct Test {
fn flat_map_item(&mut self, i: P<ast::Item>) -> SmallVec<[P<ast::Item>; 1]> {
let ident = i.ident;
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
self.cx.path.push(ident);
}
debug!("current path: {}", path_name_i(&self.cx.path));
}
item.node = ast::ItemKind::Mod(module);
}
- if ident.name != keywords::Invalid.name() {
+ if ident.name != kw::Invalid {
self.cx.path.pop();
}
smallvec![P(item)]
tests: Vec<Ident>,
tested_submods: Vec<(Ident, Ident)>)
-> (P<ast::Item>, Ident) {
- let super_ = Ident::with_empty_ctxt(keywords::Super.name());
+ let super_ = Ident::with_empty_ctxt(kw::Super);
let items = tests.into_iter().map(|r| {
cx.ext_cx.item_use_simple(DUMMY_SP, dummy_spanned(ast::VisibilityKind::Public),
use crate::parse::token::{Token, BinOpToken};
-use crate::symbol::keywords;
+use crate::symbol::kw;
use crate::ast::{self, BinOpKind};
/// Associative operator with precedence.
// DotDotDot is no longer supported, but we need some way to display the error
Token::DotDotDot => Some(DotDotEq),
Token::Colon => Some(Colon),
- _ if t.is_keyword(keywords::As) => Some(As),
+ _ if t.is_keyword(kw::As) => Some(As),
_ => None
}
}
use syntax::source_map::Spanned;
use syntax::ext::base::*;
use syntax::ext::build::AstBuilder;
-use syntax::parse::token;
+use syntax::parse::token::{self, Token};
use syntax::parse::parser::Parser;
use syntax::print::pprust;
use syntax::ptr::P;
tts: custom_message.unwrap_or_else(|| {
TokenStream::from(TokenTree::Token(
DUMMY_SP,
- token::Literal(
- token::Lit::Str_(Name::intern(&format!(
- "assertion failed: {}",
- pprust::expr_to_string(&cond_expr).escape_debug()
- ))),
- None,
- ),
+ Token::lit(token::Str, Symbol::intern(&format!(
+ "assertion failed: {}",
+ pprust::expr_to_string(&cond_expr).escape_debug()
+ )), None),
))
}).into(),
delim: MacDelimiter::Parenthesis,
//
// Parse this as an actual message, and suggest inserting a comma. Eventually, this should be
// turned into an error.
- let custom_message = if let token::Literal(token::Lit::Str_(_), _) = parser.token {
+ let custom_message = if let token::Literal(token::Lit { kind: token::Str, .. }) = parser.token {
let mut err = cx.struct_span_warn(parser.span, "unexpected string literal");
let comma_span = cx.source_map().next_point(parser.prev_span);
err.span_suggestion_short(
use syntax::ext::base::{Annotatable, ExtCtxt};
use syntax::ext::build::AstBuilder;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax_pos::Span;
pub fn expand_deriving_clone(cx: &mut ExtCtxt<'_>,
let mut stmts = Vec::new();
if is_union {
// let _: AssertParamIsCopy<Self>;
- let self_ty = cx.ty_path(cx.path_ident(trait_span, keywords::SelfUpper.ident()));
+ let self_ty =
+ cx.ty_path(cx.path_ident(trait_span, ast::Ident::with_empty_ctxt(kw::SelfUpper)));
assert_ty_bounds(cx, &mut stmts, self_ty, trait_span, "AssertParamIsCopy");
} else {
match *substr.fields {
use syntax::source_map::{self, respan};
use syntax::util::map_in_place::MapInPlace;
use syntax::ptr::P;
-use syntax::symbol::{Symbol, keywords, sym};
+use syntax::symbol::{Symbol, kw, sym};
use syntax::parse::ParseSess;
use syntax_pos::{DUMMY_SP, Span};
};
cx.item(self.span,
- keywords::Invalid.ident(),
+ Ident::invalid(),
a,
ast::ItemKind::Impl(unsafety,
ast::ImplPolarity::Positive,
let args = {
let self_args = explicit_self.map(|explicit_self| {
- ast::Arg::from_self(explicit_self,
- keywords::SelfLower.ident().with_span_pos(trait_.span))
+ let ident = Ident::with_empty_ctxt(kw::SelfLower).with_span_pos(trait_.span);
+ ast::Arg::from_self(explicit_self, ident)
});
let nonself_args = arg_types.into_iter()
.map(|(name, ty)| cx.arg(trait_.span, name, ty));
use syntax::source_map::{respan, DUMMY_SP};
use syntax::ptr::P;
use syntax_pos::Span;
-use syntax_pos::symbol::keywords;
+use syntax_pos::symbol::kw;
/// The types of pointers
#[derive(Clone)]
PathKind::Local => cx.path_all(span, false, idents, params, Vec::new()),
PathKind::Std => {
let def_site = DUMMY_SP.apply_mark(cx.current_expansion.mark);
- idents.insert(0, Ident::new(keywords::DollarCrate.name(), def_site));
+ idents.insert(0, Ident::new(kw::DollarCrate, def_site));
cx.path_all(span, false, idents, params, Vec::new())
}
}
use syntax::ast::{self, Ident, GenericArg};
use syntax::ext::base::{self, *};
use syntax::ext::build::AstBuilder;
-use syntax::symbol::{keywords, Symbol, sym};
+use syntax::symbol::{kw, sym, Symbol};
use syntax_pos::Span;
use syntax::tokenstream;
let sp = sp.apply_mark(cx.current_expansion.mark);
let e = match env::var(&*var.as_str()) {
Err(..) => {
- let lt = cx.lifetime(sp, keywords::StaticLifetime.ident());
+ let lt = cx.lifetime(sp, Ident::with_empty_ctxt(kw::StaticLifetime));
cx.expr_path(cx.path_all(sp,
true,
cx.std_path(&["option", "Option", "None"]),
match parse_global_asm(cx, sp, tts) {
Ok(Some(global_asm)) => {
MacEager::items(smallvec![P(ast::Item {
- ident: ast::Ident::with_empty_ctxt(Symbol::intern("")),
+ ident: ast::Ident::invalid(),
attrs: Vec::new(),
id: ast::DUMMY_NODE_ID,
node: ast::ItemKind::GlobalAsm(P(global_asm)),
use syntax::parse::ParseSess;
use syntax::ptr::P;
use syntax::symbol::Symbol;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax::visit::{self, Visitor};
use syntax_pos::{Span, DUMMY_SP};
let custom_derive = Ident::from_str("custom_derive");
let attr = Ident::from_str("attr");
let bang = Ident::from_str("bang");
- let crate_kw = Ident::with_empty_ctxt(keywords::Crate.name());
+ let crate_kw = Ident::with_empty_ctxt(kw::Crate);
let decls = {
let local_path = |sp: Span, name| {
use syntax::parse::{self, token, ParseSess};
use syntax::tokenstream::{self, DelimSpan, IsJoint::*, TokenStream, TreeAndJoint};
use syntax_pos::hygiene::{SyntaxContext, Transparency};
-use syntax_pos::symbol::{keywords, Symbol};
+use syntax_pos::symbol::{kw, Symbol};
use syntax_pos::{BytePos, FileName, MultiSpan, Pos, SourceFile, Span};
trait FromInternal<T> {
Question => op!('?'),
SingleQuote => op!('\''),
- Ident(ident, false) if ident.name == keywords::DollarCrate.name() =>
+ Ident(ident, false) if ident.name == kw::DollarCrate =>
tt!(Ident::dollar_crate()),
Ident(ident, is_raw) => tt!(Ident::new(ident.name, is_raw)),
Lifetime(ident) => {
stack.push(tt!(Ident::new(ident.name, false)));
tt!(Punct::new('\'', true))
}
- Literal(lit, suffix) => tt!(Literal { lit, suffix }),
+ Literal(lit) => tt!(Literal { lit }),
DocComment(c) => {
let style = comments::doc_comment_style(&c.as_str());
let stripped = comments::strip_doc_comment_decoration(&c.as_str());
let stream = vec![
Ident(ast::Ident::new(Symbol::intern("doc"), span), false),
Eq,
- Literal(Lit::Str_(Symbol::intern(&escaped)), None),
+ Token::lit(token::Str, Symbol::intern(&escaped), None),
]
.into_iter()
.map(|token| tokenstream::TokenTree::Token(span, token))
return tokenstream::TokenTree::Token(span, token).into();
}
TokenTree::Literal(self::Literal {
- lit: Lit::Integer(ref a),
- suffix,
+ lit: token::Lit { kind: token::Integer, symbol, suffix },
span,
- }) if a.as_str().starts_with("-") => {
+ }) if symbol.as_str().starts_with("-") => {
let minus = BinOp(BinOpToken::Minus);
- let integer = Symbol::intern(&a.as_str()[1..]);
- let integer = Literal(Lit::Integer(integer), suffix);
+ let symbol = Symbol::intern(&symbol.as_str()[1..]);
+ let integer = Token::lit(token::Integer, symbol, suffix);
let a = tokenstream::TokenTree::Token(span, minus);
let b = tokenstream::TokenTree::Token(span, integer);
return vec![a, b].into_iter().collect();
}
TokenTree::Literal(self::Literal {
- lit: Lit::Float(ref a),
- suffix,
+ lit: token::Lit { kind: token::Float, symbol, suffix },
span,
- }) if a.as_str().starts_with("-") => {
+ }) if symbol.as_str().starts_with("-") => {
let minus = BinOp(BinOpToken::Minus);
- let float = Symbol::intern(&a.as_str()[1..]);
- let float = Literal(Lit::Float(float), suffix);
+ let symbol = Symbol::intern(&symbol.as_str()[1..]);
+ let float = Token::lit(token::Float, symbol, suffix);
let a = tokenstream::TokenTree::Token(span, minus);
let b = tokenstream::TokenTree::Token(span, float);
return vec![a, b].into_iter().collect();
}
- TokenTree::Literal(self::Literal { lit, suffix, span }) => {
- return tokenstream::TokenTree::Token(span, Literal(lit, suffix)).into()
+ TokenTree::Literal(self::Literal { lit, span }) => {
+ return tokenstream::TokenTree::Token(span, Literal(lit)).into()
}
};
}
fn dollar_crate(span: Span) -> Ident {
// `$crate` is accepted as an ident only if it comes from the compiler.
- Ident { sym: keywords::DollarCrate.name(), is_raw: false, span }
+ Ident { sym: kw::DollarCrate, is_raw: false, span }
}
}
#[derive(Clone, Debug)]
pub struct Literal {
lit: token::Lit,
- suffix: Option<Symbol>,
span: Span,
}
call_site: to_span(Transparency::Transparent),
}
}
+
+ fn lit(&mut self, kind: token::LitKind, symbol: Symbol, suffix: Option<Symbol>) -> Literal {
+ Literal {
+ lit: token::Lit::new(kind, symbol, suffix),
+ span: server::Span::call_site(self),
+ }
+ }
}
impl server::Types for Rustc<'_> {
format!("{:?}", literal)
}
fn integer(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Integer(Symbol::intern(n)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Integer, Symbol::intern(n), None)
}
fn typed_integer(&mut self, n: &str, kind: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Integer(Symbol::intern(n)),
- suffix: Some(Symbol::intern(kind)),
- span: server::Span::call_site(self),
- }
+ self.lit(token::Integer, Symbol::intern(n), Some(Symbol::intern(kind)))
}
fn float(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Float(Symbol::intern(n)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Float, Symbol::intern(n), None)
}
fn f32(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Float(Symbol::intern(n)),
- suffix: Some(Symbol::intern("f32")),
- span: server::Span::call_site(self),
- }
+ self.lit(token::Float, Symbol::intern(n), Some(Symbol::intern("f32")))
}
fn f64(&mut self, n: &str) -> Self::Literal {
- Literal {
- lit: token::Lit::Float(Symbol::intern(n)),
- suffix: Some(Symbol::intern("f64")),
- span: server::Span::call_site(self),
- }
+ self.lit(token::Float, Symbol::intern(n), Some(Symbol::intern("f64")))
}
fn string(&mut self, string: &str) -> Self::Literal {
let mut escaped = String::new();
for ch in string.chars() {
escaped.extend(ch.escape_debug());
}
- Literal {
- lit: token::Lit::Str_(Symbol::intern(&escaped)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Str, Symbol::intern(&escaped), None)
}
fn character(&mut self, ch: char) -> Self::Literal {
let mut escaped = String::new();
escaped.extend(ch.escape_unicode());
- Literal {
- lit: token::Lit::Char(Symbol::intern(&escaped)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::Char, Symbol::intern(&escaped), None)
}
fn byte_string(&mut self, bytes: &[u8]) -> Self::Literal {
let string = bytes
.flat_map(ascii::escape_default)
.map(Into::<char>::into)
.collect::<String>();
- Literal {
- lit: token::Lit::ByteStr(Symbol::intern(&string)),
- suffix: None,
- span: server::Span::call_site(self),
- }
+ self.lit(token::ByteStr, Symbol::intern(&string), None)
}
fn span(&mut self, literal: &Self::Literal) -> Self::Span {
literal.span
use syntax::ext::base::{self, ExtCtxt};
use syntax::feature_gate;
-use syntax::symbol::{keywords, sym};
+use syntax::symbol::{kw, sym};
use syntax_pos::Span;
use syntax::tokenstream::TokenTree;
}
match (tt.len(), tt.first()) {
- (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::True) => {
+ (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::True) => {
cx.set_trace_macros(true);
}
- (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(keywords::False) => {
+ (1, Some(&TokenTree::Token(_, ref tok))) if tok.is_keyword(kw::False) => {
cx.set_trace_macros(false);
}
_ => cx.span_err(sp, "trace_macros! accepts only `true` or `false`"),
use crate::GLOBALS;
use crate::Span;
use crate::edition::Edition;
-use crate::symbol::{keywords, Symbol};
+use crate::symbol::{kw, Symbol};
use serialize::{Encodable, Decodable, Encoder, Decoder};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
prev_ctxt: SyntaxContext(0),
opaque: SyntaxContext(0),
opaque_and_semitransparent: SyntaxContext(0),
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
}],
markings: FxHashMap::default(),
}
prev_ctxt: SyntaxContext::empty(),
opaque: SyntaxContext::empty(),
opaque_and_semitransparent: SyntaxContext::empty(),
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
SyntaxContext(data.syntax_contexts.len() as u32 - 1)
})
prev_ctxt,
opaque: new_opaque,
opaque_and_semitransparent: new_opaque,
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
new_opaque
});
prev_ctxt,
opaque,
opaque_and_semitransparent: new_opaque_and_semitransparent,
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
new_opaque_and_semitransparent
});
prev_ctxt,
opaque,
opaque_and_semitransparent,
- dollar_crate_name: keywords::DollarCrate.name(),
+ dollar_crate_name: kw::DollarCrate,
});
new_opaque_and_semitransparent_and_transparent
})
&mut data.syntax_contexts[self.0 as usize].dollar_crate_name, dollar_crate_name
);
assert!(dollar_crate_name == prev_dollar_crate_name ||
- prev_dollar_crate_name == keywords::DollarCrate.name(),
+ prev_dollar_crate_name == kw::DollarCrate,
"$crate name is reset for a syntax context");
})
}
extern_prelude,
extern_types,
f16c_target_feature,
+ f32,
+ f64,
feature,
ffi_returns_twice,
field_init_shorthand,
rustc_layout_scalar_valid_range_end,
rustc_layout_scalar_valid_range_start,
rustc_mir,
+ rustc_nonnull_optimization_guaranteed,
rustc_object_lifetime_default,
rustc_on_unimplemented,
rustc_outlives,
Ident::new(name, DUMMY_SP)
}
+ #[inline]
+ pub fn invalid() -> Ident {
+ Ident::with_empty_ctxt(kw::Invalid)
+ }
+
/// Maps an interned string to an identifier with an empty syntax context.
pub fn from_interned_str(string: InternedString) -> Ident {
Ident::with_empty_ctxt(string.as_symbol())
/// Transforms an underscore identifier into one with the same name, but
/// gensymed. Leaves non-underscore identifiers unchanged.
pub fn gensym_if_underscore(self) -> Ident {
- if self.name == keywords::Underscore.name() { self.gensym() } else { self }
+ if self.name == kw::Underscore { self.gensym() } else { self }
}
// WARNING: this function is deprecated and will be removed in the future.
this.strings.reserve(init.len());
// We can't allocate empty strings in the arena, so handle this here.
- assert!(keywords::Invalid.name().as_u32() == 0 && init[0].is_empty());
- this.names.insert("", keywords::Invalid.name());
+ assert!(kw::Invalid.as_u32() == 0 && init[0].is_empty());
+ this.names.insert("", kw::Invalid);
this.strings.push("");
for string in &init[1..] {
}
}
-pub mod keywords {
- use super::{Symbol, Ident};
-
- #[derive(Clone, Copy, PartialEq, Eq)]
- pub struct Keyword {
- ident: Ident,
- }
-
- impl Keyword {
- #[inline]
- pub fn ident(self) -> Ident {
- self.ident
- }
-
- #[inline]
- pub fn name(self) -> Symbol {
- self.ident.name
- }
- }
-
+// This module has a very short name because it's used a lot.
+pub mod kw {
+ use super::Symbol;
keywords!();
}
impl Symbol {
fn is_used_keyword_2018(self) -> bool {
- self == keywords::Dyn.name()
+ self == kw::Dyn
}
fn is_unused_keyword_2018(self) -> bool {
- self >= keywords::Async.name() && self <= keywords::Try.name()
+ self >= kw::Async && self <= kw::Try
+ }
+
+ /// Used for sanity checking rustdoc keyword sections.
+ pub fn is_doc_keyword(self) -> bool {
+ self <= kw::Union
}
}
// Returns `true` for reserved identifiers used internally for elided lifetimes,
// unnamed method parameters, crate root module, error recovery etc.
pub fn is_special(self) -> bool {
- self.name <= keywords::Underscore.name()
+ self.name <= kw::Underscore
}
/// Returns `true` if the token is a keyword used in the language.
pub fn is_used_keyword(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
- self.name >= keywords::As.name() && self.name <= keywords::While.name() ||
+ self.name >= kw::As && self.name <= kw::While ||
self.name.is_used_keyword_2018() && self.span.rust_2018()
}
/// Returns `true` if the token is a keyword reserved for possible future use.
pub fn is_unused_keyword(self) -> bool {
// Note: `span.edition()` is relatively expensive, don't call it unless necessary.
- self.name >= keywords::Abstract.name() && self.name <= keywords::Yield.name() ||
+ self.name >= kw::Abstract && self.name <= kw::Yield ||
self.name.is_unused_keyword_2018() && self.span.rust_2018()
}
/// A keyword or reserved identifier that can be used as a path segment.
pub fn is_path_segment_keyword(self) -> bool {
- self.name == keywords::Super.name() ||
- self.name == keywords::SelfLower.name() ||
- self.name == keywords::SelfUpper.name() ||
- self.name == keywords::Crate.name() ||
- self.name == keywords::PathRoot.name() ||
- self.name == keywords::DollarCrate.name()
+ self.name == kw::Super ||
+ self.name == kw::SelfLower ||
+ self.name == kw::SelfUpper ||
+ self.name == kw::Crate ||
+ self.name == kw::PathRoot ||
+ self.name == kw::DollarCrate
}
/// This identifier can be a raw identifier.
pub fn can_be_raw(self) -> bool {
- self.name != keywords::Invalid.name() && self.name != keywords::Underscore.name() &&
+ self.name != kw::Invalid && self.name != kw::Underscore &&
!self.is_path_segment_keyword()
}
fn without_first_quote_test() {
GLOBALS.set(&Globals::new(edition::DEFAULT_EDITION), || {
let i = Ident::from_str("'break");
- assert_eq!(i.without_first_quote().name, keywords::Break.name());
+ assert_eq!(i.without_first_quote().name, kw::Break);
});
}
}
-Subproject commit 2c5656ae593851d0b2336a727cc14b77a06b8ac0
+Subproject commit 4efebe31651d5520bcba968878dbb8a4971d2045
#[cfg(cfail2)]
pub trait T2: T1 { }
//[cfail2]~^ ERROR cycle detected when computing the supertraits of `T2`
-//[cfail2]~| ERROR cycle detected when computing the supertraits of `T2`
pub trait T1: T2 { }
// END RUST SOURCE
// START rustc.main.ElaborateDrops.before.mir
// let mut _0: ();
+// let _1: std::boxed::Box<S>;
// let mut _2: std::boxed::Box<S>;
// let mut _3: ();
// let mut _4: std::boxed::Box<S>;
// scope 1 {
-// let _1: std::boxed::Box<S>;
-// }
-// scope 2 {
// }
// bb0: {
// StorageLive(_1);
// END RUST SOURCE
// START rustc.main.ElaborateDrops.after.mir
// let mut _0: ();
+// let _1: ();
// let mut _2: S;
// let mut _3: S;
// let mut _4: S;
// let mut _5: bool;
// scope 1 {
-// let _1: ();
-// }
-// scope 2 {
// }
// ...
// bb0: {
// END rustc.main.ElaborateDrops.after.mir
// START rustc.test.ElaborateDrops.after.mir
// let mut _0: ();
+// let _1: S;
// let mut _3: ();
// let mut _4: S;
// let mut _5: S;
// let mut _6: bool;
// ...
-// let _1: S;
-// ...
// let mut _2: S;
// ...
// bb0: {
// fn main() -> (){
// let mut _0: ();
// let mut _1: ();
+// let _2: i32;
// let mut _3: bool;
// let mut _4: !;
// let mut _5: ();
// let mut _6: &i32;
// scope 1 {
-// let _2: i32;
-// }
-// scope 2 {
// }
// bb0: {
// goto -> bb1;
// unreachable;
// }
// bb17: {
-// StorageDead(_4);
// goto -> bb18;
// }
// bb18: {
--- /dev/null
+// Test that StorageDead and Drops are generated properly for bindings in
+// matches:
+// * The MIR should only contain a single drop of `s` and `t`: at the end
+// of their respective arms.
+// * StorageDead and StorageLive statements are correctly matched up on
+// non-unwind paths.
+// * The visibility scopes of the match arms should be disjoint, and contain.
+// all of the bindings for that scope.
+// * No drop flags are used.
+
+#![feature(nll, bind_by_move_pattern_guards)]
+
+fn complicated_match(cond: bool, items: (bool, bool, String)) -> i32 {
+ match items {
+ (false, a, s) | (a, false, s) if if cond { return 3 } else { a } => 1,
+ (true, b, t) | (false, b, t) => 2,
+ }
+}
+
+const CASES: &[(bool, bool, bool, i32)] = &[
+ (false, false, false, 2),
+ (false, false, true, 1),
+ (false, true, false, 1),
+ (false, true, true, 2),
+ (true, false, false, 3),
+ (true, false, true, 3),
+ (true, true, false, 3),
+ (true, true, true, 2),
+];
+
+fn main() {
+ for &(cond, items_1, items_2, result) in CASES {
+ assert_eq!(
+ complicated_match(cond, (items_1, items_2, String::new())),
+ result,
+ );
+ }
+}
+
+// END RUST SOURCE
+// START rustc.complicated_match.SimplifyCfg-initial.after.mir
+// let mut _0: i32;
+// let mut _3: &bool; // Temp for fake borrow of `items.0`
+// let mut _4: &bool; // Temp for fake borrow of `items.1`
+// let _5: bool; // `a` in arm
+// let _6: &bool; // `a` in guard
+// let _7: std::string::String; // `s` in arm
+// let _8: &std::string::String; // `s` in guard
+// let mut _9: bool; // `if cond { return 3 } else { a }`
+// let mut _10: bool; // `cond`
+// let mut _11: !; // `return 3`
+// let mut _12: bool; // `if cond { return 3 } else { a }`
+// let mut _13: bool; // `cond`
+// let mut _14: !; // `return 3`
+// let _15: bool; // `b`
+// let _16: std::string::String; // `t`
+// scope 1 {
+// }
+// scope 2 {
+// }
+// bb0: {
+// FakeRead(ForMatchedPlace, _2);
+// switchInt((_2.0: bool)) -> [false: bb2, otherwise: bb7];
+// }
+// bb1 (cleanup): {
+// resume;
+// }
+// bb2: {
+// falseEdges -> [real: bb10, imaginary: bb3];
+// }
+// bb3: {
+// falseEdges -> [real: bb21, imaginary: bb4];
+// }
+// bb4: {
+// falseEdges -> [real: bb31, imaginary: bb5];
+// }
+// bb5: {
+// falseEdges -> [real: bb32, imaginary: bb6];
+// }
+// bb6: {
+// unreachable;
+// }
+// bb7: {
+// switchInt((_2.1: bool)) -> [false: bb3, otherwise: bb8];
+// }
+// bb8: {
+// switchInt((_2.0: bool)) -> [false: bb5, otherwise: bb4];
+// }
+// bb9: { // arm 1
+// _0 = const 1i32;
+// drop(_7) -> [return: bb29, unwind: bb16];
+// }
+// bb10: { // guard - first time
+// StorageLive(_6);
+// _6 = &(_2.1: bool);
+// StorageLive(_8);
+// _8 = &(_2.2: std::string::String);
+// _3 = &shallow (_2.0: bool);
+// _4 = &shallow (_2.1: bool);
+// StorageLive(_9);
+// StorageLive(_10);
+// _10 = _1;
+// FakeRead(ForMatchedPlace, _10);
+// switchInt(_10) -> [false: bb12, otherwise: bb11];
+// }
+// bb11: {
+// falseEdges -> [real: bb14, imaginary: bb12];
+// }
+// bb12: {
+// falseEdges -> [real: bb18, imaginary: bb13];
+// }
+// bb13: {
+// unreachable;
+// }
+// bb14: { // `return 3` - first time
+// _0 = const 3i32;
+// StorageDead(_10);
+// StorageDead(_9);
+// StorageDead(_8);
+// StorageDead(_6);
+// goto -> bb17;
+// }
+// bb15: {
+// return;
+// }
+// bb16 (cleanup): {
+// drop(_2) -> bb1;
+// }
+// bb17: {
+// drop(_2) -> [return: bb15, unwind: bb1];
+// }
+// bb18: { // `else` block - first time
+// _9 = (*_6);
+// StorageDead(_10);
+// FakeRead(ForMatchGuard, _3);
+// FakeRead(ForMatchGuard, _4);
+// FakeRead(ForGuardBinding, _6);
+// FakeRead(ForGuardBinding, _8);
+// switchInt(move _9) -> [false: bb20, otherwise: bb19];
+// }
+// bb19: {
+// StorageDead(_9);
+// StorageLive(_5);
+// _5 = (_2.1: bool);
+// StorageLive(_7);
+// _7 = move (_2.2: std::string::String);
+// goto -> bb9;
+// }
+// bb20: { // guard otherwise case - first time
+// StorageDead(_9);
+// StorageDead(_8);
+// StorageDead(_6);
+// falseEdges -> [real: bb7, imaginary: bb3];
+// }
+// bb21: { // guard - second time
+// StorageLive(_6);
+// _6 = &(_2.0: bool);
+// StorageLive(_8);
+// _8 = &(_2.2: std::string::String);
+// _3 = &shallow (_2.0: bool);
+// _4 = &shallow (_2.1: bool);
+// StorageLive(_12);
+// StorageLive(_13);
+// _13 = _1;
+// FakeRead(ForMatchedPlace, _13);
+// switchInt(_13) -> [false: bb23, otherwise: bb22];
+// }
+// bb22: {
+// falseEdges -> [real: bb25, imaginary: bb23];
+// }
+// bb23: {
+// falseEdges -> [real: bb26, imaginary: bb24];
+// }
+// bb24: {
+// unreachable;
+// }
+// bb25: { // `return 3` - second time
+// _0 = const 3i32;
+// StorageDead(_13);
+// StorageDead(_12);
+// StorageDead(_8);
+// StorageDead(_6);
+// goto -> bb17;
+// }
+// bb26: { // `else` block - second time
+// _12 = (*_6);
+// StorageDead(_13);
+// FakeRead(ForMatchGuard, _3);
+// FakeRead(ForMatchGuard, _4);
+// FakeRead(ForGuardBinding, _6);
+// FakeRead(ForGuardBinding, _8);
+// switchInt(move _12) -> [false: bb28, otherwise: bb27];
+// }
+// bb27: { // Guard otherwise case - second time
+// StorageDead(_12);
+// StorageLive(_5);
+// _5 = (_2.0: bool);
+// StorageLive(_7);
+// _7 = move (_2.2: std::string::String);
+// goto -> bb9;
+// }
+// bb28: { // rest of arm 1
+// StorageDead(_12);
+// StorageDead(_8);
+// StorageDead(_6);
+// falseEdges -> [real: bb8, imaginary: bb4];
+// }
+// bb29: {
+// StorageDead(_7);
+// StorageDead(_5);
+// StorageDead(_8);
+// StorageDead(_6);
+// goto -> bb34;
+// }
+// bb30: { // arm 2
+// _0 = const 2i32;
+// drop(_16) -> [return: bb33, unwind: bb16];
+// }
+// bb31: { // bindings for arm 2 - first pattern
+// StorageLive(_15);
+// _15 = (_2.1: bool);
+// StorageLive(_16);
+// _16 = move (_2.2: std::string::String);
+// goto -> bb30;
+// }
+// bb32: { // bindings for arm 2 - first pattern
+// StorageLive(_15);
+// _15 = (_2.1: bool);
+// StorageLive(_16);
+// _16 = move (_2.2: std::string::String);
+// goto -> bb30;
+// }
+// bb33: { // rest of arm 2
+// StorageDead(_16);
+// StorageDead(_15);
+// goto -> bb34;
+// }
+// bb34: { // end of match
+// drop(_2) -> [return: bb15, unwind: bb1];
+// }
+// END rustc.complicated_match.SimplifyCfg-initial.after.mir
+// START rustc.complicated_match.ElaborateDrops.after.mir
+// let _16: std::string::String; // No drop flags, which would come after this.
+// scope 1 {
+// END rustc.complicated_match.ElaborateDrops.after.mir
// _2 = std::option::Option::<i32>::Some(const 42i32,);
// FakeRead(ForMatchedPlace, _2);
// _3 = discriminant(_2);
-// switchInt(move _3) -> [0isize: bb4, 1isize: bb2, otherwise: bb7];
+// switchInt(move _3) -> [0isize: bb4, 1isize: bb2, otherwise: bb6];
// }
// bb1 (cleanup): {
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb8, imaginary: bb3]; //pre_binding1
+// falseEdges -> [real: bb7, imaginary: bb3]; //pre_binding1
// }
// bb3: {
// falseEdges -> [real: bb11, imaginary: bb4]; //pre_binding2
// bb5: {
// unreachable;
// }
-// bb6: { // to pre_binding2
-// falseEdges -> [real: bb3, imaginary: bb3];
-// }
-// bb7: {
+// bb6: {
// unreachable;
// }
-// bb8: { // binding1 and guard
+// bb7: { // binding1 and guard
// StorageLive(_6);
// _6 = &(((promoted[0]: std::option::Option<i32>) as Some).0: i32);
// _4 = &shallow _2;
// StorageLive(_7);
-// _7 = const guard() -> [return: bb9, unwind: bb1];
+// _7 = const guard() -> [return: bb8, unwind: bb1];
// }
-// bb9: {
+// bb8: { // end of guard
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
-// switchInt(move _7) -> [false: bb6, otherwise: bb10];
+// switchInt(move _7) -> [false: bb10, otherwise: bb9];
// }
-// bb10: {
+// bb9: { // arm1
+// StorageDead(_7);
// StorageLive(_5);
// _5 = ((_2 as Some).0: i32);
// StorageLive(_8);
// _8 = _5;
// _1 = (const 1i32, move _8);
// StorageDead(_8);
+// StorageDead(_5);
+// StorageDead(_6);
// goto -> bb13;
// }
-// bb11: {
+// bb10: { // to pre_binding2
+// StorageDead(_7);
+// StorageDead(_6);
+// falseEdges -> [real: bb3, imaginary: bb3];
+// }
+// bb11: { // arm2
// StorageLive(_9);
// _9 = ((_2 as Some).0: i32);
// StorageLive(_10);
// _10 = _9;
// _1 = (const 2i32, move _10);
// StorageDead(_10);
+// StorageDead(_9);
// goto -> bb13;
// }
-// bb12: {
+// bb12: { // arm3
// _1 = (const 3i32, const 3i32);
// goto -> bb13;
// }
// bb13: {
-// ...
+// StorageDead(_2);
+// StorageDead(_1);
+// _0 = ();
// return;
// }
// END rustc.full_tested_match.QualifyAndPromoteConstants.after.mir
// _2 = std::option::Option::<i32>::Some(const 42i32,);
// FakeRead(ForMatchedPlace, _2);
// _3 = discriminant(_2);
-// switchInt(move _3) -> [0isize: bb3, 1isize: bb2, otherwise: bb7];
+// switchInt(move _3) -> [0isize: bb3, 1isize: bb2, otherwise: bb6];
// }
// bb1 (cleanup): {
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb8, imaginary: bb3];
+// falseEdges -> [real: bb7, imaginary: bb3];
// }
// bb3: {
// falseEdges -> [real: bb11, imaginary: bb4];
// bb5: {
// unreachable;
// }
-// bb6: { // to pre_binding3 (can skip 2 since this is `Some`)
-// falseEdges -> [real: bb4, imaginary: bb3];
-// }
-// bb7: {
+// bb6: {
// unreachable;
// }
-// bb8: { // binding1 and guard
+// bb7: { // binding1 and guard
// StorageLive(_6);
// _6 = &((_2 as Some).0: i32);
// _4 = &shallow _2;
// StorageLive(_7);
-// _7 = const guard() -> [return: bb9, unwind: bb1];
+// _7 = const guard() -> [return: bb8, unwind: bb1];
// }
-// bb9: { // end of guard
+// bb8: { // end of guard
// FakeRead(ForMatchGuard, _4);
// FakeRead(ForGuardBinding, _6);
-// switchInt(move _7) -> [false: bb6, otherwise: bb10];
+// switchInt(move _7) -> [false: bb10, otherwise: bb9];
// }
-// bb10: { // arm1
+// bb9: { // arm1
+// StorageDead(_7);
// StorageLive(_5);
// _5 = ((_2 as Some).0: i32);
// StorageLive(_8);
// _8 = _5;
// _1 = (const 1i32, move _8);
// StorageDead(_8);
+// StorageDead(_5);
+// StorageDead(_6);
// goto -> bb13;
// }
+// bb10: { // to pre_binding3 (can skip 2 since this is `Some`)
+// StorageDead(_7);
+// StorageDead(_6);
+// falseEdges -> [real: bb4, imaginary: bb3];
+// }
// bb11: { // arm2
// _1 = (const 3i32, const 3i32);
// goto -> bb13;
// _10 = _9;
// _1 = (const 2i32, move _10);
// StorageDead(_10);
+// StorageDead(_9);
// goto -> bb13;
// }
// bb13: {
-// ...
+// StorageDead(_2);
+// StorageDead(_1);
+// _0 = ();
// return;
// }
// END rustc.full_tested_match2.QualifyAndPromoteConstants.before.mir
//
// START rustc.main.QualifyAndPromoteConstants.before.mir
-// bb0: {
+// bb0: {
// ...
// _2 = std::option::Option::<i32>::Some(const 1i32,);
// FakeRead(ForMatchedPlace, _2);
// resume;
// }
// bb2: {
-// falseEdges -> [real: bb9, imaginary: bb3];
+// falseEdges -> [real: bb7, imaginary: bb3];
// }
// bb3: {
-// falseEdges -> [real: bb12, imaginary: bb4];
+// falseEdges -> [real: bb11, imaginary: bb4];
// }
// bb4: {
-// falseEdges -> [real: bb13, imaginary: bb5];
+// falseEdges -> [real: bb12, imaginary: bb5];
// }
// bb5: {
// falseEdges -> [real: bb16, imaginary: bb6];
// bb6: {
// unreachable;
// }
-// bb7: {
-// falseEdges -> [real: bb3, imaginary: bb3];
-// }
-// bb8: {
-// falseEdges -> [real: bb5, imaginary: bb5];
-// }
-// bb9: { // binding1: Some(w) if guard()
+// bb7: { // binding1: Some(w) if guard()
// StorageLive(_7);
// _7 = &((_2 as Some).0: i32);
// _5 = &shallow _2;
// StorageLive(_8);
-// _8 = const guard() -> [return: bb10, unwind: bb1];
+// _8 = const guard() -> [return: bb8, unwind: bb1];
// }
-// bb10: { //end of guard
+// bb8: { //end of guard1
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _7);
-// switchInt(move _8) -> [false: bb7, otherwise: bb11];
+// switchInt(move _8) -> [false: bb10, otherwise: bb9];
// }
-// bb11: { // set up bindings for arm1
+// bb9: {
+// StorageDead(_8);
// StorageLive(_6);
// _6 = ((_2 as Some).0: i32);
// _1 = const 1i32;
+// StorageDead(_6);
+// StorageDead(_7);
// goto -> bb17;
// }
-// bb12: { // binding2 & arm2
+// bb10: {
+// StorageDead(_8);
+// StorageDead(_7);
+// falseEdges -> [real: bb3, imaginary: bb3];
+// }
+// bb11: { // binding2 & arm2
// StorageLive(_9);
// _9 = _2;
// _1 = const 2i32;
+// StorageDead(_9);
// goto -> bb17;
// }
-// bb13: { // binding3: Some(y) if guard2(y)
+// bb12: { // binding3: Some(y) if guard2(y)
// StorageLive(_11);
// _11 = &((_2 as Some).0: i32);
// _5 = &shallow _2;
// StorageLive(_12);
// StorageLive(_13);
// _13 = (*_11);
-// _12 = const guard2(move _13) -> [return: bb14, unwind: bb1];
+// _12 = const guard2(move _13) -> [return: bb13, unwind: bb1];
// }
-// bb14: { // end of guard2
+// bb13: { // end of guard2
// StorageDead(_13);
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForGuardBinding, _11);
-// switchInt(move _12) -> [false: bb8, otherwise: bb15];
+// switchInt(move _12) -> [false: bb15, otherwise: bb14];
// }
-// bb15: { // binding4 & arm4
+// bb14: { // binding4 & arm4
+// StorageDead(_12);
// StorageLive(_10);
// _10 = ((_2 as Some).0: i32);
// _1 = const 3i32;
+// StorageDead(_10);
+// StorageDead(_11);
// goto -> bb17;
// }
+// bb15: {
+// StorageDead(_12);
+// StorageDead(_11);
+// falseEdges -> [real: bb5, imaginary: bb5];
+// }
// bb16: {
// StorageLive(_14);
// _14 = _2;
// _1 = const 4i32;
+// StorageDead(_14);
// goto -> bb17;
// }
// bb17: {
-// ...
+// StorageDead(_2);
+// StorageDead(_1);
+// _0 = ();
// return;
// }
// END rustc.main.QualifyAndPromoteConstants.before.mir
// START rustc.main.SimplifyCfg-initial.after.mir
// bb0: {
// ...
-// switchInt(move _4) -> [false: bb7, otherwise: bb8];
+// switchInt(move _4) -> [false: bb6, otherwise: bb7];
// }
// bb1: {
-// falseEdges -> [real: bb12, imaginary: bb2];
+// falseEdges -> [real: bb10, imaginary: bb2];
// }
// bb2: {
// falseEdges -> [real: bb13, imaginary: bb3];
// unreachable;
// }
// bb6: {
-// falseEdges -> [real: bb4, imaginary: bb2];
+// _6 = Le(const 10i32, _1);
+// switchInt(move _6) -> [false: bb8, otherwise: bb9];
// }
// bb7: {
-// _6 = Le(const 10i32, _1);
-// switchInt(move _6) -> [false: bb9, otherwise: bb10];
+// _5 = Lt(_1, const 10i32);
+// switchInt(move _5) -> [false: bb6, otherwise: bb1];
// }
// bb8: {
-// _5 = Lt(_1, const 10i32);
-// switchInt(move _5) -> [false: bb7, otherwise: bb1];
+// switchInt(_1) -> [-1i32: bb3, otherwise: bb4];
// }
// bb9: {
-// switchInt(_1) -> [-1i32: bb3, otherwise: bb4];
+// _7 = Le(_1, const 20i32);
+// switchInt(move _7) -> [false: bb8, otherwise: bb2];
// }
// bb10: {
-// _7 = Le(_1, const 20i32);
-// switchInt(move _7) -> [false: bb9, otherwise: bb2];
+// _8 = &shallow _1;
+// StorageLive(_9);
+// _9 = _2;
+// FakeRead(ForMatchGuard, _8);
+// switchInt(move _9) -> [false: bb12, otherwise: bb11];
// }
// bb11: {
+// StorageDead(_9);
// _3 = const 0i32;
// goto -> bb16;
// }
// bb12: {
-// _8 = &shallow _1;
-// StorageLive(_9);
-// _9 = _2;
-// FakeRead(ForMatchGuard, _8);
-// switchInt(move _9) -> [false: bb6, otherwise: bb11];
+// StorageDead(_9);
+// falseEdges -> [real: bb4, imaginary: bb2];
// }
// bb13: {
// _3 = const 1i32;
// goto -> bb16;
// }
// bb16: {
-// StorageDead(_9);
// _0 = ();
// StorageDead(_2);
// StorageDead(_1);
// START rustc.main.EraseRegions.before.mir
// fn main() -> () {
// let mut _0: ();
+// let mut _1: Packed;
// let mut _2: Aligned;
// let mut _3: Droppy;
// let mut _4: Aligned;
// let mut _5: Droppy;
// let mut _6: Aligned;
// scope 1 {
-// let mut _1: Packed;
-// }
-// scope 2 {
// }
//
// bb0: {
// bb0: {
// FakeRead(ForMatchedPlace, _1);
// _3 = discriminant(_1);
-// switchInt(move _3) -> [1isize: bb5, otherwise: bb2];
+// switchInt(move _3) -> [1isize: bb4, otherwise: bb2];
// }
// bb1: {
-// goto -> bb7;
+// goto -> bb5;
// }
// bb2: {
// goto -> bb8;
// unreachable;
// }
// bb4: {
-// goto -> bb2;
-// }
-// bb5: {
// switchInt((*(*((_1 as Some).0: &'<empty> &'<empty> i32)))) -> [0i32: bb1, otherwise: bb2];
// }
-// bb6: {
-// _0 = const 0i32;
-// goto -> bb9;
-// }
-// bb7: {
+// bb5: {
// _4 = &shallow _1;
// _5 = &shallow ((_1 as Some).0: &'<empty> &'<empty> i32);
// _6 = &shallow (*((_1 as Some).0: &'<empty> &'<empty> i32));
// FakeRead(ForMatchGuard, _5);
// FakeRead(ForMatchGuard, _6);
// FakeRead(ForMatchGuard, _7);
-// switchInt(move _8) -> [false: bb4, otherwise: bb6];
+// switchInt(move _8) -> [false: bb7, otherwise: bb6];
+// }
+// bb6: {
+// StorageDead(_8);
+// _0 = const 0i32;
+// goto -> bb9;
+// }
+// bb7: {
+// StorageDead(_8);
+// goto -> bb2;
// }
// bb8: {
// _0 = const 1i32;
// goto -> bb9;
// }
// bb9: {
-// StorageDead(_8);
// return;
// }
// bb10 (cleanup): {
// bb0: {
// nop;
// _3 = discriminant(_1);
-// switchInt(move _3) -> [1isize: bb5, otherwise: bb2];
+// switchInt(move _3) -> [1isize: bb4, otherwise: bb2];
// }
// bb1: {
-// goto -> bb7;
+// goto -> bb5;
// }
// bb2: {
// goto -> bb8;
// unreachable;
// }
// bb4: {
-// goto -> bb2;
-// }
-// bb5: {
// switchInt((*(*((_1 as Some).0: &'<empty> &'<empty> i32)))) -> [0i32: bb1, otherwise: bb2];
// }
-// bb6: {
-// _0 = const 0i32;
-// goto -> bb9;
-// }
-// bb7: {
+// bb5: {
// nop;
// nop;
// nop;
// nop;
// nop;
// nop;
-// switchInt(move _8) -> [false: bb4, otherwise: bb6];
+// switchInt(move _8) -> [false: bb7, otherwise: bb6];
+// }
+// bb6: {
+// StorageDead(_8);
+// _0 = const 0i32;
+// goto -> bb9;
+// }
+// bb7: {
+// StorageDead(_8);
+// goto -> bb2;
// }
// bb8: {
// _0 = const 1i32;
// goto -> bb9;
// }
// bb9: {
-// StorageDead(_8);
// return;
// }
// bb10 (cleanup): {
// _2 = Foo { tup: const "hi", data: move _3 };
// _1 = &_2;
// _0 = &(*_1);
-// StorageDead(_1);
// StorageDead(_5);
+// StorageDead(_1);
// return;
// }
//}
fn provide(&self, providers: &mut Providers) {
rustc_codegen_utils::symbol_names::provide(providers);
- providers.target_features_whitelist = |_tcx, _cnum| {
- Default::default() // Just a dummy
+ providers.target_features_whitelist = |tcx, _cnum| {
+ tcx.arena.alloc(Default::default()) // Just a dummy
};
providers.is_reachable_non_generic = |_tcx, _defid| true;
providers.exported_symbols = |_tcx, _crate| Arc::new(Vec::new());
--- /dev/null
+use std::marker::PhantomData;
+
+pub struct True;
+pub struct False;
+
+pub trait InterfaceType{
+ type Send;
+}
+
+
+pub struct FooInterface<T>(PhantomData<fn()->T>);
+
+impl<T> InterfaceType for FooInterface<T> {
+ type Send=False;
+}
+
+
+pub struct DynTrait<I>{
+ _interface:PhantomData<fn()->I>,
+ _unsync_unsend:PhantomData<::std::rc::Rc<()>>,
+}
+
+unsafe impl<I> Send for DynTrait<I>
+where
+ I:InterfaceType<Send=True>
+{}
+
+// @has issue_60726/struct.IntoIter.html
+// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]//code' "impl<T> !Send for \
+// IntoIter<T>"
+// @has - '//*[@id="synthetic-implementations-list"]/*[@class="impl"]//code' "impl<T> !Sync for \
+// IntoIter<T>"
+pub struct IntoIter<T>{
+ hello:DynTrait<FooInterface<T>>,
+}
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0503]: cannot use `y` because it was mutably borrowed
--> $DIR/borrowck-anon-fields-variant.rs:37:7
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0502]: cannot borrow `*block.current` as immutable because it is also borrowed as mutable
--> $DIR/borrowck-describe-lvalue.rs:227:33
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0382]: use of moved value: `x`
--> $DIR/borrowck-describe-lvalue.rs:282:22
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0510]: cannot mutably borrow `x` in match guard
--> $DIR/borrowck-mutate-in-guard.rs:15:33
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to 3 previous errors
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0381]: use of possibly uninitialized variable: `b`
--> $DIR/const_let_refutable.rs:4:9
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to 2 previous errors
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0723]: trait bounds other than `Sized` on const fn parameters are unstable
--> $DIR/min_const_fn.rs:144:41
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to 2 previous errors
where T : Trait,
T : Add<T::Item>
//~^ ERROR cycle detected
- //~| ERROR associated type `Item` not found for `T`
{
data: T
}
LL | T : Add<T::Item>
| ^^^^^^^
-error[E0220]: associated type `Item` not found for `T`
- --> $DIR/cycle-projection-based-on-where-clause.rs:17:19
- |
-LL | T : Add<T::Item>
- | ^^^^^^^ associated type `Item` not found
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-Some errors have detailed explanations: E0220, E0391.
-For more information about an error, try `rustc --explain E0220`.
+For more information about this error, try `rustc --explain E0391`.
-{"artifact":"$TEST_BUILD_DIR/emit-artifact-notifications.nll/libemit_artifact_notifications.rmeta"}
+{"artifact":"$TEST_BUILD_DIR/emit-artifact-notifications.nll/libemit_artifact_notifications.rmeta","emit":"metadata"}
-{"artifact":"$TEST_BUILD_DIR/emit-artifact-notifications/libemit_artifact_notifications.rmeta"}
+{"artifact":"$TEST_BUILD_DIR/emit-artifact-notifications/libemit_artifact_notifications.rmeta","emit":"metadata"}
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/feature-gate-nll.rs:10:1
#[rustc_variance] //~ ERROR the `#[rustc_variance]` attribute is just used for rustc unit tests and will never be stable
#[rustc_error] //~ ERROR the `#[rustc_error]` attribute is just used for rustc unit tests and will never be stable
+#[rustc_nonnull_optimization_guaranteed] //~ ERROR the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable niche optimizations in libcore and will never be stable
fn main() {}
= note: for more information, see https://github.com/rust-lang/rust/issues/29642
= help: add #![feature(rustc_attrs)] to the crate attributes to enable
-error: aborting due to 2 previous errors
+error[E0658]: the `#[rustc_nonnull_optimization_guaranteed]` attribute is just used to enable niche optimizations in libcore and will never be stable
+ --> $DIR/feature-gate-rustc-attrs-1.rs:7:1
+ |
+LL | #[rustc_nonnull_optimization_guaranteed]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = note: for more information, see https://github.com/rust-lang/rust/issues/29642
+ = help: add #![feature(rustc_attrs)] to the crate attributes to enable
+
+error: aborting due to 3 previous errors
For more information about this error, try `rustc --explain E0658`.
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
trait T : Iterator<Item=Self::Item>
//~^ ERROR cycle detected
-//~| ERROR associated type `Item` not found for `Self`
{}
fn main() {}
|
LL | / trait T : Iterator<Item=Self::Item>
LL | |
-LL | |
LL | | {}
| |__^
|
|
LL | / trait T : Iterator<Item=Self::Item>
LL | |
-LL | |
LL | | {}
| |__^
-error[E0220]: associated type `Item` not found for `Self`
- --> $DIR/issue-20772.rs:1:25
- |
-LL | trait T : Iterator<Item=Self::Item>
- | ^^^^^^^^^^ associated type `Item` not found
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-Some errors have detailed explanations: E0220, E0391.
-For more information about an error, try `rustc --explain E0220`.
+For more information about this error, try `rustc --explain E0391`.
fn foo<T: Trait<A = T::B>>() { }
//~^ ERROR cycle detected
-//~| ERROR associated type `B` not found for `T`
fn main() { }
LL | fn foo<T: Trait<A = T::B>>() { }
| ^^^^
-error[E0220]: associated type `B` not found for `T`
- --> $DIR/issue-21177.rs:6:21
- |
-LL | fn foo<T: Trait<A = T::B>>() { }
- | ^^^^ associated type `B` not found
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-Some errors have detailed explanations: E0220, E0391.
-For more information about an error, try `rustc --explain E0220`.
+For more information about this error, try `rustc --explain E0391`.
const A: i32 = B; //~ ERROR cycle detected
-//~^ ERROR cycle detected
const B: i32 = A;
LL | const A: i32 = B;
| ^
note: ...which requires const checking if rvalue is promotable to static `B`...
- --> $DIR/issue-23302-3.rs:4:1
+ --> $DIR/issue-23302-3.rs:3:1
|
LL | const B: i32 = A;
| ^^^^^^^^^^^^^^^^^
note: ...which requires checking which parts of `B` are promotable to static...
- --> $DIR/issue-23302-3.rs:4:16
+ --> $DIR/issue-23302-3.rs:3:16
|
LL | const B: i32 = A;
| ^
= note: ...which again requires const checking if rvalue is promotable to static `A`, completing the cycle
= note: cycle used when running analysis passes on this crate
-error[E0391]: cycle detected when processing `A`
- --> $DIR/issue-23302-3.rs:1:16
- |
-LL | const A: i32 = B;
- | ^
- |
-note: ...which requires processing `B`...
- --> $DIR/issue-23302-3.rs:4:16
- |
-LL | const B: i32 = A;
- | ^
- = note: ...which again requires processing `A`, completing the cycle
-note: cycle used when processing `A`
- --> $DIR/issue-23302-3.rs:1:1
- |
-LL | const A: i32 = B;
- | ^^^^^^^^^^^^^^^^^
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
For more information about this error, try `rustc --explain E0391`.
= note: ...therefore, they cannot allow references to captured variables to escape
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-40510-1.rs:20:1
= note: ...therefore, they cannot allow references to captured variables to escape
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-40510-3.rs:22:1
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:62:5
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
warning[E0713]: borrow may still be in use when destructor runs
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:73:5
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-45696-scribble-on-boxed-borrow.rs:80:1
= note: ...therefore, they cannot allow references to captured variables to escape
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: compilation successful
--> $DIR/issue-49824.rs:6:1
#![deny(improper_ctypes)]
#![allow(dead_code)]
+use std::num;
+
enum Z { }
enum U { A }
enum B { C, D }
#[repr(isize)]
enum Isize { A, B, C }
+#[repr(transparent)]
+struct Transparent<T>(T, std::marker::PhantomData<Z>);
+
+struct Rust<T>(T);
+
extern {
fn zf(x: Z);
fn uf(x: U); //~ ERROR enum has no representation hint
fn bf(x: B); //~ ERROR enum has no representation hint
fn tf(x: T); //~ ERROR enum has no representation hint
- fn reprc(x: ReprC);
- fn u8(x: U8);
- fn isize(x: Isize);
+ fn repr_c(x: ReprC);
+ fn repr_u8(x: U8);
+ fn repr_isize(x: Isize);
+ fn option_ref(x: Option<&'static u8>);
+ fn option_fn(x: Option<extern "C" fn()>);
+ fn nonnull(x: Option<std::ptr::NonNull<u8>>);
+ fn nonzero_u8(x: Option<num::NonZeroU8>);
+ fn nonzero_u16(x: Option<num::NonZeroU16>);
+ fn nonzero_u32(x: Option<num::NonZeroU32>);
+ fn nonzero_u64(x: Option<num::NonZeroU64>);
+ fn nonzero_u128(x: Option<num::NonZeroU128>);
+ //~^ ERROR 128-bit integers don't currently have a known stable ABI
+ fn nonzero_usize(x: Option<num::NonZeroUsize>);
+ fn nonzero_i8(x: Option<num::NonZeroI8>);
+ fn nonzero_i16(x: Option<num::NonZeroI16>);
+ fn nonzero_i32(x: Option<num::NonZeroI32>);
+ fn nonzero_i64(x: Option<num::NonZeroI64>);
+ fn nonzero_i128(x: Option<num::NonZeroI128>);
+ //~^ ERROR 128-bit integers don't currently have a known stable ABI
+ fn nonzero_isize(x: Option<num::NonZeroIsize>);
+ fn repr_transparent(x: Option<Transparent<num::NonZeroU8>>);
+ fn repr_rust(x: Option<Rust<num::NonZeroU8>>); //~ ERROR enum has no representation hint
+ fn no_result(x: Result<(), num::NonZeroI32>); //~ ERROR enum has no representation hint
}
pub fn main() { }
error: `extern` block uses type `U` which is not FFI-safe: enum has no representation hint
- --> $DIR/lint-ctypes-enum.rs:20:13
+ --> $DIR/lint-ctypes-enum.rs:27:13
|
LL | fn uf(x: U);
| ^
| ^^^^^^^^^^^^^^^
= help: consider adding a #[repr(...)] attribute to this enum
note: type defined here
- --> $DIR/lint-ctypes-enum.rs:5:1
+ --> $DIR/lint-ctypes-enum.rs:7:1
|
LL | enum U { A }
| ^^^^^^^^^^^^
error: `extern` block uses type `B` which is not FFI-safe: enum has no representation hint
- --> $DIR/lint-ctypes-enum.rs:21:13
+ --> $DIR/lint-ctypes-enum.rs:28:13
|
LL | fn bf(x: B);
| ^
|
= help: consider adding a #[repr(...)] attribute to this enum
note: type defined here
- --> $DIR/lint-ctypes-enum.rs:6:1
+ --> $DIR/lint-ctypes-enum.rs:8:1
|
LL | enum B { C, D }
| ^^^^^^^^^^^^^^^
error: `extern` block uses type `T` which is not FFI-safe: enum has no representation hint
- --> $DIR/lint-ctypes-enum.rs:22:13
+ --> $DIR/lint-ctypes-enum.rs:29:13
|
LL | fn tf(x: T);
| ^
|
= help: consider adding a #[repr(...)] attribute to this enum
note: type defined here
- --> $DIR/lint-ctypes-enum.rs:7:1
+ --> $DIR/lint-ctypes-enum.rs:9:1
|
LL | enum T { E, F, G }
| ^^^^^^^^^^^^^^^^^^
-error: aborting due to 3 previous errors
+error: `extern` block uses type `u128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
+ --> $DIR/lint-ctypes-enum.rs:40:23
+ |
+LL | fn nonzero_u128(x: Option<num::NonZeroU128>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `extern` block uses type `i128` which is not FFI-safe: 128-bit integers don't currently have a known stable ABI
+ --> $DIR/lint-ctypes-enum.rs:47:23
+ |
+LL | fn nonzero_i128(x: Option<num::NonZeroI128>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: `extern` block uses type `std::option::Option<Rust<std::num::NonZeroU8>>` which is not FFI-safe: enum has no representation hint
+ --> $DIR/lint-ctypes-enum.rs:51:20
+ |
+LL | fn repr_rust(x: Option<Rust<num::NonZeroU8>>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a #[repr(...)] attribute to this enum
+
+error: `extern` block uses type `std::result::Result<(), std::num::NonZeroI32>` which is not FFI-safe: enum has no representation hint
+ --> $DIR/lint-ctypes-enum.rs:52:20
+ |
+LL | fn no_result(x: Result<(), num::NonZeroI32>);
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+ = help: consider adding a #[repr(...)] attribute to this enum
+
+error: aborting due to 7 previous errors
--- /dev/null
+fn deny_on_arm() {
+ match 0 {
+ #[deny(unused_variables)]
+ //~^ NOTE lint level defined here
+ y => (),
+ //~^ ERROR unused variable
+ }
+}
+
+#[deny(unused_variables)]
+fn allow_on_arm() {
+ match 0 {
+ #[allow(unused_variables)]
+ y => (), // OK
+ }
+}
+
+fn main() {}
--- /dev/null
+error: unused variable: `y`
+ --> $DIR/lint-match-arms.rs:5:9
+ |
+LL | y => (),
+ | ^ help: consider prefixing with an underscore: `_y`
+ |
+note: lint level defined here
+ --> $DIR/lint-match-arms.rs:3:16
+ |
+LL | #[deny(unused_variables)]
+ | ^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
_ => {}
}
+ // Attribute should be respected on match arms
+ match 0 {
+ #[allow(unused_mut)]
+ mut x => {
+ let mut y = 1;
+ },
+ }
+
let x = |mut y: isize| y = 32;
fn nothing(mut foo: isize) { foo = 37; }
| help: remove this `mut`
error: variable does not need to be mutable
- --> $DIR/lint-unused-mut-variables.rs:130:9
+ --> $DIR/lint-unused-mut-variables.rs:138:9
|
LL | let mut b = vec![2];
| ----^
| help: remove this `mut`
|
note: lint level defined here
- --> $DIR/lint-unused-mut-variables.rs:126:8
+ --> $DIR/lint-unused-mut-variables.rs:134:8
|
LL | #[deny(unused_mut)]
| ^^^^^^^^^^
--- /dev/null
+// Regression test for issue #61033.
+
+macro_rules! test1 {
+ ($x:ident, $($tt:tt)*) => { $($tt)+ } //~ERROR this must repeat at least once
+}
+
+fn main() {
+ test1!(x,);
+}
--- /dev/null
+error: this must repeat at least once
+ --> $DIR/issue-61033-1.rs:4:34
+ |
+LL | ($x:ident, $($tt:tt)*) => { $($tt)+ }
+ | ^^^^^
+
+error: aborting due to previous error
+
--- /dev/null
+// Regression test for issue #61033.
+
+macro_rules! test2 {
+ (
+ $(* $id1:ident)*
+ $(+ $id2:ident)*
+ ) => {
+ $( //~ERROR meta-variable `id1` repeats 2 times
+ $id1 + $id2 // $id1 and $id2 may repeat different numbers of times
+ )*
+ }
+}
+
+fn main() {
+ test2! {
+ * a * b
+ + a + b + c
+ }
+}
--- /dev/null
+error: meta-variable `id1` repeats 2 times, but `id2` repeats 3 times
+ --> $DIR/issue-61033-2.rs:8:10
+ |
+LL | $(
+ | __________^
+LL | | $id1 + $id2 // $id1 and $id2 may repeat different numbers of times
+LL | | )*
+ | |_________^
+
+error: aborting due to previous error
+
-error: invalid suffix `is` for numeric literal
+error: invalid suffix `is` for integer literal
--> $DIR/old-suffixes-are-really-forbidden.rs:2:13
|
LL | let a = 1_is;
|
= help: the suffix must be one of the integral types (`u32`, `isize`, etc)
-error: invalid suffix `us` for numeric literal
+error: invalid suffix `us` for integer literal
--> $DIR/old-suffixes-are-really-forbidden.rs:3:13
|
LL | let b = 2_us;
1234f1024; //~ ERROR invalid width `1024` for float literal
1234.5f1024; //~ ERROR invalid width `1024` for float literal
- 1234suffix; //~ ERROR invalid suffix `suffix` for numeric literal
- 0b101suffix; //~ ERROR invalid suffix `suffix` for numeric literal
+ 1234suffix; //~ ERROR invalid suffix `suffix` for integer literal
+ 0b101suffix; //~ ERROR invalid suffix `suffix` for integer literal
1.0suffix; //~ ERROR invalid suffix `suffix` for float literal
1.0e10suffix; //~ ERROR invalid suffix `suffix` for float literal
}
|
= help: valid widths are 32 and 64
-error: invalid suffix `suffix` for numeric literal
+error: invalid suffix `suffix` for integer literal
--> $DIR/bad-lit-suffixes.rs:25:5
|
LL | 1234suffix;
|
= help: the suffix must be one of the integral types (`u32`, `isize`, etc)
-error: invalid suffix `suffix` for numeric literal
+error: invalid suffix `suffix` for integer literal
--> $DIR/bad-lit-suffixes.rs:26:5
|
LL | 0b101suffix;
fn main() {
9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
- //~^ ERROR int literal is too large
+ //~^ ERROR integer literal is too large
; // the span shouldn't point to this.
}
-error: int literal is too large
+error: integer literal is too large
--> $DIR/int-literal-too-large-span.rs:4:5
|
LL | 9999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999999
fn main() {
let __isize = 340282366920938463463374607431768211456; // 2^128
- //~^ ERROR int literal is too large
+ //~^ ERROR integer literal is too large
}
-error: int literal is too large
+error: integer literal is too large
--> $DIR/issue-5544-a.rs:2:19
|
LL | let __isize = 340282366920938463463374607431768211456; // 2^128
fn main() {
let __isize = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ff;
- //~^ ERROR int literal is too large
+ //~^ ERROR integer literal is too large
}
-error: int literal is too large
+error: integer literal is too large
--> $DIR/issue-5544-b.rs:2:19
|
LL | let __isize = 0xffff_ffff_ffff_ffff_ffff_ffff_ffff_ffff_ff;
0o; //~ ERROR: no valid digits
1e+; //~ ERROR: expected at least one digit in exponent
0x539.0; //~ ERROR: hexadecimal float literal is not supported
- 9900000000000000000000000000999999999999999999999999999999; //~ ERROR: int literal is too large
- 9900000000000000000000000000999999999999999999999999999999; //~ ERROR: int literal is too large
+ 9900000000000000000000000000999999999999999999999999999999;
+ //~^ ERROR: integer literal is too large
+ 9900000000000000000000000000999999999999999999999999999999;
+ //~^ ERROR: integer literal is too large
0x; //~ ERROR: no valid digits
0xu32; //~ ERROR: no valid digits
0ou32; //~ ERROR: no valid digits
| ^^^^^^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:18:5
+ --> $DIR/lex-bad-numeric-literals.rs:20:5
|
LL | 0x;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:19:5
+ --> $DIR/lex-bad-numeric-literals.rs:21:5
|
LL | 0xu32;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:20:5
+ --> $DIR/lex-bad-numeric-literals.rs:22:5
|
LL | 0ou32;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:21:5
+ --> $DIR/lex-bad-numeric-literals.rs:23:5
|
LL | 0bu32;
| ^^
error: no valid digits found for number
- --> $DIR/lex-bad-numeric-literals.rs:22:5
+ --> $DIR/lex-bad-numeric-literals.rs:24:5
|
LL | 0b;
| ^^
error: octal float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:24:5
+ --> $DIR/lex-bad-numeric-literals.rs:26:5
|
LL | 0o123.456;
| ^^^^^^^^^
error: binary float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:26:5
+ --> $DIR/lex-bad-numeric-literals.rs:28:5
|
LL | 0b111.101;
| ^^^^^^^^^
LL | 0o2f32;
| ^^^^^^ not supported
-error: int literal is too large
+error: integer literal is too large
--> $DIR/lex-bad-numeric-literals.rs:16:5
|
LL | 9900000000000000000000000000999999999999999999999999999999;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: int literal is too large
- --> $DIR/lex-bad-numeric-literals.rs:17:5
+error: integer literal is too large
+ --> $DIR/lex-bad-numeric-literals.rs:18:5
|
LL | 9900000000000000000000000000999999999999999999999999999999;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: octal float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:23:5
+ --> $DIR/lex-bad-numeric-literals.rs:25:5
|
LL | 0o123f64;
| ^^^^^^^^ not supported
error: binary float literal is not supported
- --> $DIR/lex-bad-numeric-literals.rs:25:5
+ --> $DIR/lex-bad-numeric-literals.rs:27:5
|
LL | 0b101f64;
| ^^^^^^^^ not supported
0b101.010;
//~^ ERROR binary float literal is not supported
0b101p4f64;
- //~^ ERROR invalid suffix `p4f64` for numeric literal
+ //~^ ERROR invalid suffix `p4f64` for integer literal
}
LL | 0b101010f64;
| ^^^^^^^^^^^ not supported
-error: invalid suffix `p4f64` for numeric literal
+error: invalid suffix `p4f64` for integer literal
--> $DIR/no-binary-float-literal.rs:6:5
|
LL | 0b101p4f64;
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error: aborting due to previous error
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0625]: thread-local statics cannot be accessed at compile-time
--> $DIR/thread-local-in-ctfe.rs:15:16
|
= warning: this error has been downgraded to a warning for backwards compatibility with previous releases
= warning: this represents potential undefined behavior in your code and this warning will become a hard error in the future
+ = note: for more information, try `rustc --explain E0729`
error[E0625]: thread-local statics cannot be accessed at compile-time
--> $DIR/thread-local-in-ctfe.rs:25:5
-Subproject commit 60a609acaed3bf2b3ec6ab995bccf0f03bc26060
+Subproject commit a8eeb7cdb135da1cd582c6093c1739732727a4a2