[[package]]
name = "minifier"
-version = "0.0.11"
+version = "0.0.14"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"regex 0.2.10 (registry+https://github.com/rust-lang/crates.io-index)",
name = "rustc_allocator"
version = "0.0.0"
dependencies = [
+ "log 0.4.1 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc 0.0.0",
"rustc_errors 0.0.0",
"rustc_target 0.0.0",
name = "rustdoc"
version = "0.0.0"
dependencies = [
- "minifier 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)",
+ "minifier 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)",
"pulldown-cmark 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"tempfile 3.0.2 (registry+https://github.com/rust-lang/crates.io-index)",
]
version = "0.0.0"
dependencies = [
"arena 0.0.0",
+ "cfg-if 0.1.2 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc_data_structures 0.0.0",
"scoped-tls 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"serialize 0.0.0",
"checksum mdbook 0.1.7 (registry+https://github.com/rust-lang/crates.io-index)" = "90b5a8d7e341ceee5db3882a06078d42661ddcfa2b3687319cc5da76ec4e782f"
"checksum memchr 2.0.1 (registry+https://github.com/rust-lang/crates.io-index)" = "796fba70e76612589ed2ce7f45282f5af869e0fdd7cc6199fa1aa1f1d591ba9d"
"checksum memoffset 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)" = "0f9dc261e2b62d7a622bf416ea3c5245cdd5d9a7fcc428c0d06804dfce1775b3"
-"checksum minifier 0.0.11 (registry+https://github.com/rust-lang/crates.io-index)" = "26f3e36a4db1981b16567e4abfd6ddc3641bc9b950bdc868701f656bf9b74bdd"
+"checksum minifier 0.0.14 (registry+https://github.com/rust-lang/crates.io-index)" = "78cb57f9a385530d60f2d67f6e108050b478b7a0ffd0bb9c350803e1356535dd"
"checksum miniz-sys 0.1.10 (registry+https://github.com/rust-lang/crates.io-index)" = "609ce024854aeb19a0ef7567d348aaa5a746b32fb72e336df7fcc16869d7e2b4"
"checksum miow 0.3.1 (registry+https://github.com/rust-lang/crates.io-index)" = "9224c91f82b3c47cf53dcf78dfaa20d6888fbcc5d272d5f2fcdf8a697f3c987d"
"checksum nibble_vec 0.0.4 (registry+https://github.com/rust-lang/crates.io-index)" = "c8d77f3db4bce033f4d04db08079b2ef1c3d02b44e86f25d08886fafa7756ffa"
} else if component == "rustfmt" {
format!("{}-{}", component, builder.rustfmt_package_vers())
} else if component == "llvm-tools" {
- format!("{}-{}", component, builder.llvm_tools_vers())
+ format!("{}-{}", component, builder.llvm_tools_package_vers())
} else {
assert!(component.starts_with("rust"));
format!("{}-{}", component, builder.rust_package_vers())
let cargo_installer = builder.ensure(Cargo { stage, target });
let rustfmt_installer = builder.ensure(Rustfmt { stage, target });
let rls_installer = builder.ensure(Rls { stage, target });
+ let llvm_tools_installer = builder.ensure(LlvmTools { stage, target });
let mingw_installer = builder.ensure(Mingw { host: target });
let analysis_installer = builder.ensure(Analysis {
compiler: builder.compiler(stage, self.host),
tarballs.push(cargo_installer);
tarballs.extend(rls_installer.clone());
tarballs.extend(rustfmt_installer.clone());
+ tarballs.extend(llvm_tools_installer.clone());
tarballs.push(analysis_installer);
tarballs.push(std_installer);
if builder.config.docs {
cmd.arg(builder.package_vers(&builder.release_num("cargo")));
cmd.arg(builder.package_vers(&builder.release_num("rls")));
cmd.arg(builder.package_vers(&builder.release_num("rustfmt")));
- cmd.arg(builder.llvm_tools_vers());
+ cmd.arg(builder.llvm_tools_package_vers());
cmd.arg(addr);
builder.create_dir(&distdir(builder));
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct LlvmTools {
pub stage: u32,
- pub compiler: Compiler,
pub target: Interned<String>,
}
fn make_run(run: RunConfig) {
run.builder.ensure(LlvmTools {
stage: run.builder.top_stage,
- compiler: run.builder.compiler(run.builder.top_stage, run.target),
target: run.target,
});
}
fn run(self, builder: &Builder) -> Option<PathBuf> {
- let compiler = self.compiler;
- let host = compiler.host;
-
let stage = self.stage;
+ let target = self.target;
assert!(builder.config.extended);
- builder.info(&format!("Dist LlvmTools stage{} ({})", stage, host));
+ builder.info(&format!("Dist LlvmTools stage{} ({})", stage, target));
let src = builder.src.join("src/llvm");
let name = pkgname(builder, "llvm-tools");
// Prepare the image directory
for tool in LLVM_TOOLS {
let exe = builder
- .llvm_out(host)
+ .llvm_out(target)
.join("bin")
- .join(exe(tool, &compiler.host));
+ .join(exe(tool, &target));
builder.install(&exe, &image.join("bin"), 0o755);
}
builder.create_dir(&overlay);
builder.install(&src.join("README.txt"), &overlay, 0o644);
builder.install(&src.join("LICENSE.TXT"), &overlay, 0o644);
+ builder.create(&overlay.join("version"), &builder.llvm_tools_vers());
// Generate the installer tarball
let mut cmd = rust_installer(builder);
.arg("--work-dir").arg(&tmpdir(builder))
.arg("--output-dir").arg(&distdir(builder))
.arg("--non-installed-overlay").arg(&overlay)
- .arg(format!("--package-name={}-{}", name, host))
+ .arg(format!("--package-name={}-{}", name, target))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=llvm-tools");
builder.run(&mut cmd);
- Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
}
}
self.package_vers(&self.release_num("rustfmt"))
}
- fn llvm_tools_vers(&self) -> String {
- // japaric: should we use LLVM version here?
- // let stdout = build_helper::output(
- // Command::new(self.llvm_out(self.config.build).join("build/bin/llvm-size"))
- // .arg("--version"),
- // );
-
- // for line in stdout.lines() {
- // if line.contains("LLVM version") {
- // if let Some(vers) = line.split_whitespace().nth(2) {
- // return vers.to_string();
- // }
- // }
- // }
-
- // panic!("The output of $LLVM_TOOL has changed; \
- // please fix `bootstrap::Build.llvm_tools_vers`");
+ fn llvm_tools_package_vers(&self) -> String {
+ self.package_vers(&self.rust_version())
+ }
+ fn llvm_tools_vers(&self) -> String {
self.rust_version()
}
/// Whether this tool requires LLVM to run
pub fn uses_llvm_tools(&self) -> bool {
match self {
- $(Tool::$name => true $(&& $llvm)*,)+
+ $(Tool::$name => false $(|| $llvm)*,)+
}
}
}
}
}
-// FIXME(#51459): We have only checked that RustInstaller does not require
-// the LLVM binaries when running. We should go through all tools to determine
-// if they really need LLVM binaries, and make `llvm_tools` a required argument.
tool!(
Rustbook, "src/tools/rustbook", "rustbook", Mode::ToolRustc;
ErrorIndex, "src/tools/error_index_generator", "error_index_generator", Mode::ToolRustc;
Tidy, "src/tools/tidy", "tidy", Mode::ToolStd;
Linkchecker, "src/tools/linkchecker", "linkchecker", Mode::ToolStd;
CargoTest, "src/tools/cargotest", "cargotest", Mode::ToolStd;
- Compiletest, "src/tools/compiletest", "compiletest", Mode::ToolTest;
+ Compiletest, "src/tools/compiletest", "compiletest", Mode::ToolTest, llvm_tools = true;
BuildManifest, "src/tools/build-manifest", "build-manifest", Mode::ToolStd;
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client", Mode::ToolStd;
- RustInstaller, "src/tools/rust-installer", "fabricate", Mode::ToolStd, llvm_tools = false;
+ RustInstaller, "src/tools/rust-installer", "fabricate", Mode::ToolStd;
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes", Mode::ToolStd;
);
--- /dev/null
+# `irrefutable_let_patterns`
+
+The tracking issue for this feature is: [#44495]
+
+[#44495]: https://github.com/rust-lang/rust/issues/44495
+
+------------------------
+
+This feature changes the way that "irrefutable patterns" are handled
+in the `if let` and `while let` forms. An *irrefutable pattern* is one
+that cannot fail to match -- for example, the `_` pattern matches any
+value, and hence it is "irrefutable". Without this feature, using an
+irrefutable pattern in an `if let` gives a hard error (since often
+this indicates programmer error). But when the feature is enabled, the
+error becomes a lint (since in some cases irrefutable patterns are
+expected). This means you can use `#[allow]` to silence the lint:
+
+```rust
+#![feature(irrefutable_let_patterns)]
+
+#[allow(irrefutable_let_patterns)]
+fn main() {
+ // These two examples used to be errors, but now they
+ // trigger a lint (that is allowed):
+ if let _ = 5 {}
+ while let _ = 5 { break; }
+}
+```
use core::mem::{self, PinMut};
use core::ops::{CoerceUnsized, Deref, DerefMut, Generator, GeneratorState};
use core::ptr::{self, NonNull, Unique};
-use core::task::{Context, Poll, UnsafeTask, TaskObj};
+use core::task::{Context, Poll, UnsafeTask, TaskObj, LocalTaskObj};
use core::convert::From;
use raw_vec::RawVec;
}
#[unstable(feature = "futures_api", issue = "50547")]
-unsafe impl<F: Future<Output = ()> + Send + 'static> UnsafeTask for PinBox<F> {
+unsafe impl<F: Future<Output = ()> + 'static> UnsafeTask for PinBox<F> {
fn into_raw(self) -> *mut () {
PinBox::into_raw(self) as *mut ()
}
TaskObj::new(PinBox::from(boxed))
}
}
+
+#[unstable(feature = "futures_api", issue = "50547")]
+impl<F: Future<Output = ()> + 'static> From<PinBox<F>> for LocalTaskObj {
+ fn from(boxed: PinBox<F>) -> Self {
+ LocalTaskObj::new(boxed)
+ }
+}
+
+#[unstable(feature = "futures_api", issue = "50547")]
+impl<F: Future<Output = ()> + 'static> From<Box<F>> for LocalTaskObj {
+ fn from(boxed: Box<F>) -> Self {
+ LocalTaskObj::new(PinBox::from(boxed))
+ }
+}
#![cfg_attr(stage0, feature(repr_transparent))]
#![feature(rustc_attrs)]
#![feature(specialization)]
+#![feature(split_ascii_whitespace)]
#![feature(staged_api)]
#![feature(str_internals)]
#![feature(trusted_len)]
pub use core::str::pattern;
#[stable(feature = "encode_utf16", since = "1.8.0")]
pub use core::str::EncodeUtf16;
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+pub use core::str::SplitAsciiWhitespace;
#[unstable(feature = "slice_concat_ext",
reason = "trait should not have to exist",
/// or returns `LayoutErr` if either of the following conditions
/// are not met:
///
+ /// * `align` must not be zero,
+ ///
/// * `align` must be a power of two,
///
/// * `size`, when rounded up to the nearest multiple of `align`,
b'\\' => ([b'\\', b'\\', 0, 0], 2),
b'\'' => ([b'\\', b'\'', 0, 0], 2),
b'"' => ([b'\\', b'"', 0, 0], 2),
- b'\x20' ... b'\x7e' => ([c, 0, 0, 0], 1),
+ b'\x20' ..= b'\x7e' => ([c, 0, 0, 0], 1),
_ => ([b'\\', b'x', hexify(c >> 4), hexify(c & 0xf)], 4),
};
fn hexify(b: u8) -> u8 {
match b {
- 0 ... 9 => b'0' + b,
+ 0 ..= 9 => b'0' + b,
_ => b'a' + b - 10,
}
}
}
}
-// Values [1, MIN_WRITING-1] represent the number of `Ref` active. Values in
-// [MIN_WRITING, MAX-1] represent the number of `RefMut` active. Multiple
-// `RefMut`s can only be active at a time if they refer to distinct,
-// nonoverlapping components of a `RefCell` (e.g., different ranges of a slice).
+// Positive values represent the number of `Ref` active. Negative values
+// represent the number of `RefMut` active. Multiple `RefMut`s can only be
+// active at a time if they refer to distinct, nonoverlapping components of a
+// `RefCell` (e.g., different ranges of a slice).
//
// `Ref` and `RefMut` are both two words in size, and so there will likely never
// be enough `Ref`s or `RefMut`s in existence to overflow half of the `usize`
-// range. Thus, a `BorrowFlag` will probably never overflow. However, this is
-// not a guarantee, as a pathological program could repeatedly create and then
-// mem::forget `Ref`s or `RefMut`s. Thus, all code must explicitly check for
-// overflow in order to avoid unsafety.
-type BorrowFlag = usize;
+// range. Thus, a `BorrowFlag` will probably never overflow or underflow.
+// However, this is not a guarantee, as a pathological program could repeatedly
+// create and then mem::forget `Ref`s or `RefMut`s. Thus, all code must
+// explicitly check for overflow and underflow in order to avoid unsafety, or at
+// least behave correctly in the event that overflow or underflow happens (e.g.,
+// see BorrowRef::new).
+type BorrowFlag = isize;
const UNUSED: BorrowFlag = 0;
-const MIN_WRITING: BorrowFlag = (!0)/2 + 1; // 0b1000...
+
+#[inline(always)]
+fn is_writing(x: BorrowFlag) -> bool {
+ x < UNUSED
+}
+
+#[inline(always)]
+fn is_reading(x: BorrowFlag) -> bool {
+ x > UNUSED
+}
impl<T> RefCell<T> {
/// Creates a new `RefCell` containing `value`.
#[inline]
fn new(borrow: &'b Cell<BorrowFlag>) -> Option<BorrowRef<'b>> {
let b = borrow.get();
- if b >= MIN_WRITING {
+ if is_writing(b) || b == isize::max_value() {
+ // If there's currently a writing borrow, or if incrementing the
+ // refcount would overflow into a writing borrow.
None
} else {
- // Prevent the borrow counter from overflowing into
- // a writing borrow.
- assert!(b < MIN_WRITING - 1);
borrow.set(b + 1);
Some(BorrowRef { borrow })
}
#[inline]
fn drop(&mut self) {
let borrow = self.borrow.get();
- debug_assert!(borrow < MIN_WRITING && borrow != UNUSED);
+ debug_assert!(is_reading(borrow));
self.borrow.set(borrow - 1);
}
}
#[inline]
fn clone(&self) -> BorrowRef<'b> {
// Since this Ref exists, we know the borrow flag
- // is not set to WRITING.
+ // is a reading borrow.
let borrow = self.borrow.get();
- debug_assert!(borrow != UNUSED);
+ debug_assert!(is_reading(borrow));
// Prevent the borrow counter from overflowing into
// a writing borrow.
- assert!(borrow < MIN_WRITING - 1);
+ assert!(borrow != isize::max_value());
self.borrow.set(borrow + 1);
BorrowRef { borrow: self.borrow }
}
#[inline]
fn drop(&mut self) {
let borrow = self.borrow.get();
- debug_assert!(borrow >= MIN_WRITING);
- self.borrow.set(if borrow == MIN_WRITING {
- UNUSED
- } else {
- borrow - 1
- });
+ debug_assert!(is_writing(borrow));
+ self.borrow.set(borrow + 1);
}
}
// NOTE: Unlike BorrowRefMut::clone, new is called to create the initial
// mutable reference, and so there must currently be no existing
// references. Thus, while clone increments the mutable refcount, here
- // we simply go directly from UNUSED to MIN_WRITING.
+ // we explicitly only allow going from UNUSED to UNUSED - 1.
match borrow.get() {
UNUSED => {
- borrow.set(MIN_WRITING);
+ borrow.set(UNUSED - 1);
Some(BorrowRefMut { borrow: borrow })
},
_ => None,
#[inline]
fn clone(&self) -> BorrowRefMut<'b> {
let borrow = self.borrow.get();
- debug_assert!(borrow >= MIN_WRITING);
- // Prevent the borrow counter from overflowing.
- assert!(borrow != !0);
- self.borrow.set(borrow + 1);
+ debug_assert!(is_writing(borrow));
+ // Prevent the borrow counter from underflowing.
+ assert!(borrow != isize::min_value());
+ self.borrow.set(borrow - 1);
BorrowRefMut { borrow: self.borrow }
}
}
}
}
macro_rules! continuation_byte {
- () => { continuation_byte!(0x80...0xBF) };
+ () => { continuation_byte!(0x80..=0xBF) };
($range: pat) => {
match self.0.peek() {
Some(&byte @ $range) => {
}
match first_byte {
- 0x00...0x7F => {
+ 0x00..=0x7F => {
first_byte!(0b1111_1111);
}
- 0xC2...0xDF => {
+ 0xC2..=0xDF => {
first_byte!(0b0001_1111);
continuation_byte!();
}
0xE0 => {
first_byte!(0b0000_1111);
- continuation_byte!(0xA0...0xBF); // 0x80...0x9F here are overlong
+ continuation_byte!(0xA0..=0xBF); // 0x80..=0x9F here are overlong
continuation_byte!();
}
- 0xE1...0xEC | 0xEE...0xEF => {
+ 0xE1..=0xEC | 0xEE..=0xEF => {
first_byte!(0b0000_1111);
continuation_byte!();
continuation_byte!();
}
0xED => {
first_byte!(0b0000_1111);
- continuation_byte!(0x80...0x9F); // 0xA0..0xBF here are surrogates
+ continuation_byte!(0x80..=0x9F); // 0xA0..0xBF here are surrogates
continuation_byte!();
}
0xF0 => {
first_byte!(0b0000_0111);
- continuation_byte!(0x90...0xBF); // 0x80..0x8F here are overlong
+ continuation_byte!(0x90..=0xBF); // 0x80..0x8F here are overlong
continuation_byte!();
continuation_byte!();
}
- 0xF1...0xF3 => {
+ 0xF1..=0xF3 => {
first_byte!(0b0000_0111);
continuation_byte!();
continuation_byte!();
}
0xF4 => {
first_byte!(0b0000_0111);
- continuation_byte!(0x80...0x8F); // 0x90..0xBF here are beyond char::MAX
+ continuation_byte!(0x80..=0x8F); // 0x90..0xBF here are beyond char::MAX
continuation_byte!();
continuation_byte!();
}
panic!("to_digit: radix is too high (maximum 36)");
}
let val = match self {
- '0' ... '9' => self as u32 - '0' as u32,
- 'a' ... 'z' => self as u32 - 'a' as u32 + 10,
- 'A' ... 'Z' => self as u32 - 'A' as u32 + 10,
+ '0' ..= '9' => self as u32 - '0' as u32,
+ 'a' ..= 'z' => self as u32 - 'a' as u32 + 10,
+ 'A' ..= 'Z' => self as u32 - 'A' as u32 + 10,
_ => return None,
};
if val < radix { Some(val) }
'\r' => EscapeDefaultState::Backslash('r'),
'\n' => EscapeDefaultState::Backslash('n'),
'\\' | '\'' | '"' => EscapeDefaultState::Backslash(self),
- '\x20' ... '\x7e' => EscapeDefaultState::Char(self),
+ '\x20' ..= '\x7e' => EscapeDefaultState::Char(self),
_ => EscapeDefaultState::Unicode(self.escape_unicode())
};
EscapeDefault { state: init_state }
#[inline]
pub fn is_alphabetic(self) -> bool {
match self {
- 'a'...'z' | 'A'...'Z' => true,
+ 'a'..='z' | 'A'..='Z' => true,
c if c > '\x7f' => derived_property::Alphabetic(c),
_ => false,
}
#[inline]
pub fn is_lowercase(self) -> bool {
match self {
- 'a'...'z' => true,
+ 'a'..='z' => true,
c if c > '\x7f' => derived_property::Lowercase(c),
_ => false,
}
#[inline]
pub fn is_uppercase(self) -> bool {
match self {
- 'A'...'Z' => true,
+ 'A'..='Z' => true,
c if c > '\x7f' => derived_property::Uppercase(c),
_ => false,
}
#[inline]
pub fn is_whitespace(self) -> bool {
match self {
- ' ' | '\x09'...'\x0d' => true,
+ ' ' | '\x09'..='\x0d' => true,
c if c > '\x7f' => property::White_Space(c),
_ => false,
}
#[inline]
pub fn is_numeric(self) -> bool {
match self {
- '0'...'9' => true,
+ '0'..='9' => true,
c if c > '\x7f' => general_category::N(c),
_ => false,
}
fn digit(x: u8) -> u8 {
match x {
$($x => $conv,)+
- x => panic!("number not in the range 0..{}: {}", Self::BASE - 1, x),
+ x => panic!("number not in the range 0..={}: {}", Self::BASE - 1, x),
}
}
}
}
}
-radix! { Binary, 2, "0b", x @ 0 ... 1 => b'0' + x }
-radix! { Octal, 8, "0o", x @ 0 ... 7 => b'0' + x }
-radix! { LowerHex, 16, "0x", x @ 0 ... 9 => b'0' + x,
- x @ 10 ... 15 => b'a' + (x - 10) }
-radix! { UpperHex, 16, "0x", x @ 0 ... 9 => b'0' + x,
- x @ 10 ... 15 => b'A' + (x - 10) }
+radix! { Binary, 2, "0b", x @ 0 ..= 1 => b'0' + x }
+radix! { Octal, 8, "0o", x @ 0 ..= 7 => b'0' + x }
+radix! { LowerHex, 16, "0x", x @ 0 ..= 9 => b'0' + x,
+ x @ 10 ..= 15 => b'a' + (x - 10) }
+radix! { UpperHex, 16, "0x", x @ 0 ..= 9 => b'0' + x,
+ x @ 10 ..= 15 => b'A' + (x - 10) }
macro_rules! int_base {
($Trait:ident for $T:ident as $U:ident -> $Radix:ident) => {
pub fn new(reference: &'a mut T) -> PinMut<'a, T> {
PinMut { inner: reference }
}
+
+ /// Get a mutable reference to the data inside of this `PinMut`.
+ #[unstable(feature = "pin", issue = "49150")]
+ pub fn get_mut(this: PinMut<'a, T>) -> &'a mut T {
+ this.inner
+ }
}
/// the data out of the mutable reference you receive when you call this
/// function.
#[unstable(feature = "pin", issue = "49150")]
- pub unsafe fn get_mut(this: PinMut<'a, T>) -> &'a mut T {
+ pub unsafe fn get_mut_unchecked(this: PinMut<'a, T>) -> &'a mut T {
this.inner
}
/// Construct a new pin by mapping the interior value.
///
- /// For example, if you wanted to get a `PinMut` of a field of something, you
- /// could use this to get access to that field in one line of code.
+ /// For example, if you wanted to get a `PinMut` of a field of something,
+ /// you could use this to get access to that field in one line of code.
///
/// This function is unsafe. You must guarantee that the data you return
/// will not move so long as the argument value does not move (for example,
/// because it is one of the fields of that value), and also that you do
/// not move out of the argument you receive to the interior function.
#[unstable(feature = "pin", issue = "49150")]
- pub unsafe fn map<U, F>(this: PinMut<'a, T>, f: F) -> PinMut<'a, U> where
+ pub unsafe fn map_unchecked<U, F>(this: PinMut<'a, T>, f: F) -> PinMut<'a, U> where
F: FnOnce(&mut T) -> &mut U
{
PinMut { inner: f(this.inner) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let bytes = i32::min_value().to_be().to_bytes();
/// assert_eq!(bytes, [0x80, 0, 0, 0]);
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
unsafe { mem::transmute(self) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let int = i32::from_be(i32::from_bytes([0x80, 0, 0, 0]));
/// assert_eq!(int, i32::min_value());
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
unsafe { mem::transmute(bytes) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let bytes = 0x1234_5678_u32.to_be().to_bytes();
/// assert_eq!(bytes, [0x12, 0x34, 0x56, 0x78]);
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn to_bytes(self) -> [u8; mem::size_of::<Self>()] {
unsafe { mem::transmute(self) }
/// # Examples
///
/// ```
- /// #![feature(int_to_from_bytes)]
- ///
/// let int = u32::from_be(u32::from_bytes([0x12, 0x34, 0x56, 0x78]));
/// assert_eq!(int, 0x1234_5678_u32);
/// ```
- #[unstable(feature = "int_to_from_bytes", issue = "49792")]
+ #[stable(feature = "int_to_from_bytes", since = "1.29.0")]
#[inline]
pub fn from_bytes(bytes: [u8; mem::size_of::<Self>()]) -> Self {
unsafe { mem::transmute(bytes) }
#[unstable(feature = "pin", issue = "49150")]
pub fn as_pin_mut<'a>(self: PinMut<'a, Self>) -> Option<PinMut<'a, T>> {
unsafe {
- PinMut::get_mut(self).as_mut().map(|x| PinMut::new_unchecked(x))
+ PinMut::get_mut_unchecked(self).as_mut().map(|x| PinMut::new_unchecked(x))
}
}
/// assert_eq!(s.binary_search(&4), Err(7));
/// assert_eq!(s.binary_search(&100), Err(13));
/// let r = s.binary_search(&1);
- /// assert!(match r { Ok(1...4) => true, _ => false, });
+ /// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn binary_search(&self, x: &T) -> Result<usize, usize>
/// assert_eq!(s.binary_search_by(|probe| probe.cmp(&seek)), Err(13));
/// let seek = 1;
/// let r = s.binary_search_by(|probe| probe.cmp(&seek));
- /// assert!(match r { Ok(1...4) => true, _ => false, });
+ /// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
/// assert_eq!(s.binary_search_by_key(&4, |&(a,b)| b), Err(7));
/// assert_eq!(s.binary_search_by_key(&100, |&(a,b)| b), Err(13));
/// let r = s.binary_search_by_key(&1, |&(a,b)| b);
- /// assert!(match r { Ok(1...4) => true, _ => false, });
+ /// assert!(match r { Ok(1..=4) => true, _ => false, });
/// ```
#[stable(feature = "slice_binary_search_by_key", since = "1.10.0")]
#[inline]
}
3 => {
match (byte, safe_get(self.source, i)) {
- (0xE0, 0xA0 ... 0xBF) => (),
- (0xE1 ... 0xEC, 0x80 ... 0xBF) => (),
- (0xED, 0x80 ... 0x9F) => (),
- (0xEE ... 0xEF, 0x80 ... 0xBF) => (),
+ (0xE0, 0xA0 ..= 0xBF) => (),
+ (0xE1 ..= 0xEC, 0x80 ..= 0xBF) => (),
+ (0xED, 0x80 ..= 0x9F) => (),
+ (0xEE ..= 0xEF, 0x80 ..= 0xBF) => (),
_ => {
error!();
}
}
4 => {
match (byte, safe_get(self.source, i)) {
- (0xF0, 0x90 ... 0xBF) => (),
- (0xF1 ... 0xF3, 0x80 ... 0xBF) => (),
- (0xF4, 0x80 ... 0x8F) => (),
+ (0xF0, 0x90 ..= 0xBF) => (),
+ (0xF1 ..= 0xF3, 0x80 ..= 0xBF) => (),
+ (0xF4, 0x80 ..= 0x8F) => (),
_ => {
error!();
}
use fmt;
use iter::{Map, Cloned, FusedIterator, TrustedLen, Filter};
use iter_private::TrustedRandomAccess;
-use slice::{self, SliceIndex};
+use slice::{self, SliceIndex, Split as SliceSplit};
use mem;
pub mod pattern;
},
3 => {
match (first, next!()) {
- (0xE0 , 0xA0 ... 0xBF) |
- (0xE1 ... 0xEC, 0x80 ... 0xBF) |
- (0xED , 0x80 ... 0x9F) |
- (0xEE ... 0xEF, 0x80 ... 0xBF) => {}
+ (0xE0 , 0xA0 ..= 0xBF) |
+ (0xE1 ..= 0xEC, 0x80 ..= 0xBF) |
+ (0xED , 0x80 ..= 0x9F) |
+ (0xEE ..= 0xEF, 0x80 ..= 0xBF) => {}
_ => err!(Some(1))
}
if next!() & !CONT_MASK != TAG_CONT_U8 {
}
4 => {
match (first, next!()) {
- (0xF0 , 0x90 ... 0xBF) |
- (0xF1 ... 0xF3, 0x80 ... 0xBF) |
- (0xF4 , 0x80 ... 0x8F) => {}
+ (0xF0 , 0x90 ..= 0xBF) |
+ (0xF1 ..= 0xF3, 0x80 ..= 0xBF) |
+ (0xF4 , 0x80 ..= 0x8F) => {}
_ => err!(Some(1))
}
if next!() & !CONT_MASK != TAG_CONT_U8 {
/// the original string slice, separated by any amount of whitespace.
///
/// 'Whitespace' is defined according to the terms of the Unicode Derived
- /// Core Property `White_Space`.
+ /// Core Property `White_Space`. If you only want to split on ASCII whitespace
+ /// instead, use [`split_ascii_whitespace`].
+ ///
+ /// [`split_ascii_whitespace`]: #method.split_ascii_whitespace
///
/// # Examples
///
SplitWhitespace { inner: self.split(IsWhitespace).filter(IsNotEmpty) }
}
+ /// Split a string slice by ASCII whitespace.
+ ///
+ /// The iterator returned will return string slices that are sub-slices of
+ /// the original string slice, separated by any amount of ASCII whitespace.
+ ///
+ /// To split by Unicode `Whitespace` instead, use [`split_whitespace`].
+ ///
+ /// [`split_whitespace`]: #method.split_whitespace
+ ///
+ /// # Examples
+ ///
+ /// Basic usage:
+ ///
+ /// ```
+ /// #![feature(split_ascii_whitespace)]
+ /// let mut iter = "A few words".split_ascii_whitespace();
+ ///
+ /// assert_eq!(Some("A"), iter.next());
+ /// assert_eq!(Some("few"), iter.next());
+ /// assert_eq!(Some("words"), iter.next());
+ ///
+ /// assert_eq!(None, iter.next());
+ /// ```
+ ///
+ /// All kinds of ASCII whitespace are considered:
+ ///
+ /// ```
+ /// let mut iter = " Mary had\ta little \n\t lamb".split_whitespace();
+ /// assert_eq!(Some("Mary"), iter.next());
+ /// assert_eq!(Some("had"), iter.next());
+ /// assert_eq!(Some("a"), iter.next());
+ /// assert_eq!(Some("little"), iter.next());
+ /// assert_eq!(Some("lamb"), iter.next());
+ ///
+ /// assert_eq!(None, iter.next());
+ /// ```
+ #[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+ #[inline]
+ pub fn split_ascii_whitespace(&self) -> SplitAsciiWhitespace {
+ let inner = self
+ .as_bytes()
+ .split(IsAsciiWhitespace)
+ .filter(IsNotEmpty)
+ .map(UnsafeBytesToStr);
+ SplitAsciiWhitespace { inner }
+ }
+
/// An iterator over the lines of a string, as string slices.
///
/// Lines are ended with either a newline (`\n`) or a carriage return with
inner: Filter<Split<'a, IsWhitespace>, IsNotEmpty>,
}
+/// An iterator over the non-ASCII-whitespace substrings of a string,
+/// separated by any amount of ASCII whitespace.
+///
+/// This struct is created by the [`split_ascii_whitespace`] method on [`str`].
+/// See its documentation for more.
+///
+/// [`split_ascii_whitespace`]: ../../std/primitive.str.html#method.split_ascii_whitespace
+/// [`str`]: ../../std/primitive.str.html
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+#[derive(Clone, Debug)]
+pub struct SplitAsciiWhitespace<'a> {
+ inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, IsNotEmpty>, UnsafeBytesToStr>,
+}
+
#[derive(Clone)]
struct IsWhitespace;
}
}
+#[derive(Clone)]
+struct IsAsciiWhitespace;
+
+impl<'a> FnOnce<(&'a u8, )> for IsAsciiWhitespace {
+ type Output = bool;
+
+ #[inline]
+ extern "rust-call" fn call_once(mut self, arg: (&u8, )) -> bool {
+ self.call_mut(arg)
+ }
+}
+
+impl<'a> FnMut<(&'a u8, )> for IsAsciiWhitespace {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, arg: (&u8, )) -> bool {
+ arg.0.is_ascii_whitespace()
+ }
+}
+
#[derive(Clone)]
struct IsNotEmpty;
type Output = bool;
#[inline]
- extern "rust-call" fn call_once(mut self, arg: (&&str, )) -> bool {
+ extern "rust-call" fn call_once(mut self, arg: (&'a &'b str, )) -> bool {
self.call_mut(arg)
}
}
impl<'a, 'b> FnMut<(&'a &'b str, )> for IsNotEmpty {
#[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (&&str, )) -> bool {
+ extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b str, )) -> bool {
+ !arg.0.is_empty()
+ }
+}
+
+impl<'a, 'b> FnOnce<(&'a &'b [u8], )> for IsNotEmpty {
+ type Output = bool;
+
+ #[inline]
+ extern "rust-call" fn call_once(mut self, arg: (&'a &'b [u8], )) -> bool {
+ self.call_mut(arg)
+ }
+}
+
+impl<'a, 'b> FnMut<(&'a &'b [u8], )> for IsNotEmpty {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b [u8], )) -> bool {
!arg.0.is_empty()
}
}
+#[derive(Clone)]
+struct UnsafeBytesToStr;
+
+impl<'a> FnOnce<(&'a [u8], )> for UnsafeBytesToStr {
+ type Output = &'a str;
+
+ #[inline]
+ extern "rust-call" fn call_once(mut self, arg: (&'a [u8], )) -> &'a str {
+ self.call_mut(arg)
+ }
+}
+
+impl<'a> FnMut<(&'a [u8], )> for UnsafeBytesToStr {
+ #[inline]
+ extern "rust-call" fn call_mut(&mut self, arg: (&'a [u8], )) -> &'a str {
+ unsafe { from_utf8_unchecked(arg.0) }
+ }
+}
+
#[stable(feature = "split_whitespace", since = "1.1.0")]
impl<'a> Iterator for SplitWhitespace<'a> {
type Item = &'a str;
+ #[inline]
fn next(&mut self) -> Option<&'a str> {
self.inner.next()
}
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
}
#[stable(feature = "split_whitespace", since = "1.1.0")]
impl<'a> DoubleEndedIterator for SplitWhitespace<'a> {
+ #[inline]
fn next_back(&mut self) -> Option<&'a str> {
self.inner.next_back()
}
#[stable(feature = "fused", since = "1.26.0")]
impl<'a> FusedIterator for SplitWhitespace<'a> {}
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> Iterator for SplitAsciiWhitespace<'a> {
+ type Item = &'a str;
+
+ #[inline]
+ fn next(&mut self) -> Option<&'a str> {
+ self.inner.next()
+ }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ self.inner.size_hint()
+ }
+}
+
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> DoubleEndedIterator for SplitAsciiWhitespace<'a> {
+ #[inline]
+ fn next_back(&mut self) -> Option<&'a str> {
+ self.inner.next_back()
+ }
+}
+
+#[unstable(feature = "split_ascii_whitespace", issue = "48656")]
+impl<'a> FusedIterator for SplitAsciiWhitespace<'a> {}
+
/// An iterator of [`u16`] over the string encoded as UTF-16.
///
/// [`u16`]: ../../std/primitive.u16.html
+++ /dev/null
-// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-#![unstable(feature = "futures_api",
- reason = "futures in libcore are unstable",
- issue = "50547")]
-
-//! Types and Traits for working with asynchronous tasks.
-
-use fmt;
-use ptr::NonNull;
-use future::Future;
-use mem::PinMut;
-
-/// Indicates whether a value is available or if the current task has been
-/// scheduled to receive a wakeup instead.
-#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
-pub enum Poll<T> {
- /// Represents that a value is immediately ready.
- Ready(T),
-
- /// Represents that a value is not ready yet.
- ///
- /// When a function returns `Pending`, the function *must* also
- /// ensure that the current task is scheduled to be awoken when
- /// progress can be made.
- Pending,
-}
-
-impl<T> Poll<T> {
- /// Change the ready value of this `Poll` with the closure provided
- pub fn map<U, F>(self, f: F) -> Poll<U>
- where F: FnOnce(T) -> U
- {
- match self {
- Poll::Ready(t) => Poll::Ready(f(t)),
- Poll::Pending => Poll::Pending,
- }
- }
-
- /// Returns whether this is `Poll::Ready`
- pub fn is_ready(&self) -> bool {
- match *self {
- Poll::Ready(_) => true,
- Poll::Pending => false,
- }
- }
-
- /// Returns whether this is `Poll::Pending`
- pub fn is_pending(&self) -> bool {
- !self.is_ready()
- }
-}
-
-impl<T, E> Poll<Result<T, E>> {
- /// Change the success value of this `Poll` with the closure provided
- pub fn map_ok<U, F>(self, f: F) -> Poll<Result<U, E>>
- where F: FnOnce(T) -> U
- {
- match self {
- Poll::Ready(Ok(t)) => Poll::Ready(Ok(f(t))),
- Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
- Poll::Pending => Poll::Pending,
- }
- }
-
- /// Change the error value of this `Poll` with the closure provided
- pub fn map_err<U, F>(self, f: F) -> Poll<Result<T, U>>
- where F: FnOnce(E) -> U
- {
- match self {
- Poll::Ready(Ok(t)) => Poll::Ready(Ok(t)),
- Poll::Ready(Err(e)) => Poll::Ready(Err(f(e))),
- Poll::Pending => Poll::Pending,
- }
- }
-}
-
-impl<T> From<T> for Poll<T> {
- fn from(t: T) -> Poll<T> {
- Poll::Ready(t)
- }
-}
-
-/// A `Waker` is a handle for waking up a task by notifying its executor that it
-/// is ready to be run.
-///
-/// This handle contains a trait object pointing to an instance of the `UnsafeWake`
-/// trait, allowing notifications to get routed through it.
-#[repr(transparent)]
-pub struct Waker {
- inner: NonNull<UnsafeWake>,
-}
-
-unsafe impl Send for Waker {}
-unsafe impl Sync for Waker {}
-
-impl Waker {
- /// Constructs a new `Waker` directly.
- ///
- /// Note that most code will not need to call this. Implementers of the
- /// `UnsafeWake` trait will typically provide a wrapper that calls this
- /// but you otherwise shouldn't call it directly.
- ///
- /// If you're working with the standard library then it's recommended to
- /// use the `Waker::from` function instead which works with the safe
- /// `Arc` type and the safe `Wake` trait.
- #[inline]
- pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {
- Waker { inner: inner }
- }
-
- /// Wake up the task associated with this `Waker`.
- #[inline]
- pub fn wake(&self) {
- unsafe { self.inner.as_ref().wake() }
- }
-
- /// Returns whether or not this `Waker` and `other` awaken the same task.
- ///
- /// This function works on a best-effort basis, and may return false even
- /// when the `Waker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `Waker`s will awaken the same
- /// task.
- ///
- /// This function is primarily used for optimization purposes.
- #[inline]
- pub fn will_wake(&self, other: &Waker) -> bool {
- self.inner == other.inner
- }
-}
-
-impl Clone for Waker {
- #[inline]
- fn clone(&self) -> Self {
- unsafe {
- self.inner.as_ref().clone_raw()
- }
- }
-}
-
-impl fmt::Debug for Waker {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("Waker")
- .finish()
- }
-}
-
-impl Drop for Waker {
- #[inline]
- fn drop(&mut self) {
- unsafe {
- self.inner.as_ref().drop_raw()
- }
- }
-}
-
-/// A `LocalWaker` is a handle for waking up a task by notifying its executor that it
-/// is ready to be run.
-///
-/// This is similar to the `Waker` type, but cannot be sent across threads.
-/// Task executors can use this type to implement more optimized singlethreaded wakeup
-/// behavior.
-#[repr(transparent)]
-pub struct LocalWaker {
- inner: NonNull<UnsafeWake>,
-}
-
-impl !Send for LocalWaker {}
-impl !Sync for LocalWaker {}
-
-impl LocalWaker {
- /// Constructs a new `LocalWaker` directly.
- ///
- /// Note that most code will not need to call this. Implementers of the
- /// `UnsafeWake` trait will typically provide a wrapper that calls this
- /// but you otherwise shouldn't call it directly.
- ///
- /// If you're working with the standard library then it's recommended to
- /// use the `LocalWaker::from` function instead which works with the safe
- /// `Rc` type and the safe `LocalWake` trait.
- ///
- /// For this function to be used safely, it must be sound to call `inner.wake_local()`
- /// on the current thread.
- #[inline]
- pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {
- LocalWaker { inner: inner }
- }
-
- /// Wake up the task associated with this `LocalWaker`.
- #[inline]
- pub fn wake(&self) {
- unsafe { self.inner.as_ref().wake_local() }
- }
-
- /// Returns whether or not this `LocalWaker` and `other` `LocalWaker` awaken the same task.
- ///
- /// This function works on a best-effort basis, and may return false even
- /// when the `LocalWaker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `LocalWaker`s will awaken the same
- /// task.
- ///
- /// This function is primarily used for optimization purposes.
- #[inline]
- pub fn will_wake(&self, other: &LocalWaker) -> bool {
- self.inner == other.inner
- }
-
- /// Returns whether or not this `LocalWaker` and `other` `Waker` awaken the same task.
- ///
- /// This function works on a best-effort basis, and may return false even
- /// when the `Waker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `LocalWaker`s will awaken the same
- /// task.
- ///
- /// This function is primarily used for optimization purposes.
- #[inline]
- pub fn will_wake_nonlocal(&self, other: &Waker) -> bool {
- self.inner == other.inner
- }
-}
-
-impl From<LocalWaker> for Waker {
- #[inline]
- fn from(local_waker: LocalWaker) -> Self {
- Waker { inner: local_waker.inner }
- }
-}
-
-impl Clone for LocalWaker {
- #[inline]
- fn clone(&self) -> Self {
- unsafe {
- LocalWaker { inner: self.inner.as_ref().clone_raw().inner }
- }
- }
-}
-
-impl fmt::Debug for LocalWaker {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("Waker")
- .finish()
- }
-}
-
-impl Drop for LocalWaker {
- #[inline]
- fn drop(&mut self) {
- unsafe {
- self.inner.as_ref().drop_raw()
- }
- }
-}
-
-/// An unsafe trait for implementing custom memory management for a `Waker` or `LocalWaker`.
-///
-/// A `Waker` conceptually is a cloneable trait object for `Wake`, and is
-/// most often essentially just `Arc<dyn Wake>`. However, in some contexts
-/// (particularly `no_std`), it's desirable to avoid `Arc` in favor of some
-/// custom memory management strategy. This trait is designed to allow for such
-/// customization.
-///
-/// When using `std`, a default implementation of the `UnsafeWake` trait is provided for
-/// `Arc<T>` where `T: Wake` and `Rc<T>` where `T: LocalWake`.
-///
-/// Although the methods on `UnsafeWake` take pointers rather than references,
-pub unsafe trait UnsafeWake: Send + Sync {
- /// Creates a clone of this `UnsafeWake` and stores it behind a `Waker`.
- ///
- /// This function will create a new uniquely owned handle that under the
- /// hood references the same notification instance. In other words calls
- /// to `wake` on the returned handle should be equivalent to calls to
- /// `wake` on this handle.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e. hasn't been dropped.
- unsafe fn clone_raw(&self) -> Waker;
-
- /// Drops this instance of `UnsafeWake`, deallocating resources
- /// associated with it.
- ///
- /// FIXME(cramertj)
- /// This method is intended to have a signature such as:
- ///
- /// ```ignore (not-a-doctest)
- /// fn drop_raw(self: *mut Self);
- /// ```
- ///
- /// Unfortunately in Rust today that signature is not object safe.
- /// Nevertheless it's recommended to implement this function *as if* that
- /// were its signature. As such it is not safe to call on an invalid
- /// pointer, nor is the validity of the pointer guaranteed after this
- /// function returns.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e. hasn't been dropped.
- unsafe fn drop_raw(&self);
-
- /// Indicates that the associated task is ready to make progress and should
- /// be `poll`ed.
- ///
- /// Executors generally maintain a queue of "ready" tasks; `wake` should place
- /// the associated task onto this queue.
- ///
- /// # Panics
- ///
- /// Implementations should avoid panicking, but clients should also be prepared
- /// for panics.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e. hasn't been dropped.
- unsafe fn wake(&self);
-
- /// Indicates that the associated task is ready to make progress and should
- /// be `poll`ed. This function is the same as `wake`, but can only be called
- /// from the thread that this `UnsafeWake` is "local" to. This allows for
- /// implementors to provide specialized wakeup behavior specific to the current
- /// thread. This function is called by `LocalWaker::wake`.
- ///
- /// Executors generally maintain a queue of "ready" tasks; `wake_local` should place
- /// the associated task onto this queue.
- ///
- /// # Panics
- ///
- /// Implementations should avoid panicking, but clients should also be prepared
- /// for panics.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e. hasn't been dropped, and that the
- /// `UnsafeWake` hasn't moved from the thread on which it was created.
- unsafe fn wake_local(&self) {
- self.wake()
- }
-}
-
-/// Information about the currently-running task.
-///
-/// Contexts are always tied to the stack, since they are set up specifically
-/// when performing a single `poll` step on a task.
-pub struct Context<'a> {
- local_waker: &'a LocalWaker,
- executor: &'a mut Executor,
-}
-
-impl<'a> fmt::Debug for Context<'a> {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("Context")
- .finish()
- }
-}
-
-impl<'a> Context<'a> {
- /// Create a new task `Context` with the provided `local_waker`, `waker`, and `executor`.
- #[inline]
- pub fn new(local_waker: &'a LocalWaker, executor: &'a mut Executor) -> Context<'a> {
- Context {
- local_waker,
- executor,
- }
- }
-
- /// Get the `LocalWaker` associated with the current task.
- #[inline]
- pub fn local_waker(&self) -> &'a LocalWaker {
- self.local_waker
- }
-
- /// Get the `Waker` associated with the current task.
- #[inline]
- pub fn waker(&self) -> &'a Waker {
- unsafe { &*(self.local_waker as *const LocalWaker as *const Waker) }
- }
-
- /// Get the default executor associated with this task.
- ///
- /// This method is useful primarily if you want to explicitly handle
- /// spawn failures.
- #[inline]
- pub fn executor(&mut self) -> &mut Executor {
- self.executor
- }
-
- /// Produce a context like the current one, but using the given waker instead.
- ///
- /// This advanced method is primarily used when building "internal
- /// schedulers" within a task, where you want to provide some customized
- /// wakeup logic.
- #[inline]
- pub fn with_waker<'b>(&'b mut self, local_waker: &'b LocalWaker) -> Context<'b> {
- Context {
- local_waker,
- executor: self.executor,
- }
- }
-
- /// Produce a context like the current one, but using the given executor
- /// instead.
- ///
- /// This advanced method is primarily used when building "internal
- /// schedulers" within a task.
- #[inline]
- pub fn with_executor<'b, E>(&'b mut self, executor: &'b mut E) -> Context<'b>
- where E: Executor
- {
- Context {
- local_waker: self.local_waker,
- executor: executor,
- }
- }
-}
-
-/// A task executor.
-///
-/// A *task* is a `()`-producing async value that runs at the top level, and will
-/// be `poll`ed until completion. It's also the unit at which wake-up
-/// notifications occur. Executors, such as thread pools, allow tasks to be
-/// spawned and are responsible for putting tasks onto ready queues when
-/// they are woken up, and polling them when they are ready.
-pub trait Executor {
- /// Spawn the given task, polling it until completion.
- ///
- /// # Errors
- ///
- /// The executor may be unable to spawn tasks, either because it has
- /// been shut down or is resource-constrained.
- fn spawn_obj(&mut self, task: TaskObj) -> Result<(), SpawnObjError>;
-
- /// Determine whether the executor is able to spawn new tasks.
- ///
- /// # Returns
- ///
- /// An `Ok` return means the executor is *likely* (but not guaranteed)
- /// to accept a subsequent spawn attempt. Likewise, an `Err` return
- /// means that `spawn` is likely, but not guaranteed, to yield an error.
- #[inline]
- fn status(&self) -> Result<(), SpawnErrorKind> {
- Ok(())
- }
-}
-
-/// A custom trait object for polling tasks, roughly akin to
-/// `Box<Future<Output = ()> + Send>`.
-pub struct TaskObj {
- ptr: *mut (),
- poll_fn: unsafe fn(*mut (), &mut Context) -> Poll<()>,
- drop_fn: unsafe fn(*mut ()),
-}
-
-impl fmt::Debug for TaskObj {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("TaskObj")
- .finish()
- }
-}
-
-unsafe impl Send for TaskObj {}
-
-/// A custom implementation of a task trait object for `TaskObj`, providing
-/// a hand-rolled vtable.
-///
-/// This custom representation is typically used only in `no_std` contexts,
-/// where the default `Box`-based implementation is not available.
-///
-/// The implementor must guarantee that it is safe to call `poll` repeatedly (in
-/// a non-concurrent fashion) with the result of `into_raw` until `drop` is
-/// called.
-pub unsafe trait UnsafeTask: Send + 'static {
- /// Convert a owned instance into a (conceptually owned) void pointer.
- fn into_raw(self) -> *mut ();
-
- /// Poll the task represented by the given void pointer.
- ///
- /// # Safety
- ///
- /// The trait implementor must guarantee that it is safe to repeatedly call
- /// `poll` with the result of `into_raw` until `drop` is called; such calls
- /// are not, however, allowed to race with each other or with calls to `drop`.
- unsafe fn poll(task: *mut (), cx: &mut Context) -> Poll<()>;
-
- /// Drops the task represented by the given void pointer.
- ///
- /// # Safety
- ///
- /// The trait implementor must guarantee that it is safe to call this
- /// function once per `into_raw` invocation; that call cannot race with
- /// other calls to `drop` or `poll`.
- unsafe fn drop(task: *mut ());
-}
-
-impl TaskObj {
- /// Create a `TaskObj` from a custom trait object representation.
- #[inline]
- pub fn new<T: UnsafeTask>(t: T) -> TaskObj {
- TaskObj {
- ptr: t.into_raw(),
- poll_fn: T::poll,
- drop_fn: T::drop,
- }
- }
-}
-
-impl Future for TaskObj {
- type Output = ();
-
- #[inline]
- fn poll(self: PinMut<Self>, cx: &mut Context) -> Poll<()> {
- unsafe {
- (self.poll_fn)(self.ptr, cx)
- }
- }
-}
-
-impl Drop for TaskObj {
- fn drop(&mut self) {
- unsafe {
- (self.drop_fn)(self.ptr)
- }
- }
-}
-
-/// Provides the reason that an executor was unable to spawn.
-pub struct SpawnErrorKind {
- _hidden: (),
-}
-
-impl fmt::Debug for SpawnErrorKind {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_tuple("SpawnErrorKind")
- .field(&"shutdown")
- .finish()
- }
-}
-
-impl SpawnErrorKind {
- /// Spawning is failing because the executor has been shut down.
- pub fn shutdown() -> SpawnErrorKind {
- SpawnErrorKind { _hidden: () }
- }
-
- /// Check whether this error is the `shutdown` error.
- pub fn is_shutdown(&self) -> bool {
- true
- }
-}
-
-/// The result of a failed spawn
-#[derive(Debug)]
-pub struct SpawnObjError {
- /// The kind of error
- pub kind: SpawnErrorKind,
-
- /// The task for which spawning was attempted
- pub task: TaskObj,
-}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![unstable(feature = "futures_api",
+ reason = "futures in libcore are unstable",
+ issue = "50547")]
+
+use fmt;
+use super::{Executor, Waker, LocalWaker};
+
+/// Information about the currently-running task.
+///
+/// Contexts are always tied to the stack, since they are set up specifically
+/// when performing a single `poll` step on a task.
+pub struct Context<'a> {
+ local_waker: &'a LocalWaker,
+ executor: &'a mut Executor,
+}
+
+impl<'a> fmt::Debug for Context<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("Context")
+ .finish()
+ }
+}
+
+impl<'a> Context<'a> {
+ /// Create a new task `Context` with the provided `local_waker`, `waker`, and `executor`.
+ #[inline]
+ pub fn new(local_waker: &'a LocalWaker, executor: &'a mut Executor) -> Context<'a> {
+ Context {
+ local_waker,
+ executor,
+ }
+ }
+
+ /// Get the `LocalWaker` associated with the current task.
+ #[inline]
+ pub fn local_waker(&self) -> &'a LocalWaker {
+ self.local_waker
+ }
+
+ /// Get the `Waker` associated with the current task.
+ #[inline]
+ pub fn waker(&self) -> &'a Waker {
+ unsafe { &*(self.local_waker as *const LocalWaker as *const Waker) }
+ }
+
+ /// Get the default executor associated with this task.
+ ///
+ /// This method is useful primarily if you want to explicitly handle
+ /// spawn failures.
+ #[inline]
+ pub fn executor(&mut self) -> &mut Executor {
+ self.executor
+ }
+
+ /// Produce a context like the current one, but using the given waker instead.
+ ///
+ /// This advanced method is primarily used when building "internal
+ /// schedulers" within a task, where you want to provide some customized
+ /// wakeup logic.
+ #[inline]
+ pub fn with_waker<'b>(&'b mut self, local_waker: &'b LocalWaker) -> Context<'b> {
+ Context {
+ local_waker,
+ executor: self.executor,
+ }
+ }
+
+ /// Produce a context like the current one, but using the given executor
+ /// instead.
+ ///
+ /// This advanced method is primarily used when building "internal
+ /// schedulers" within a task.
+ #[inline]
+ pub fn with_executor<'b, E>(&'b mut self, executor: &'b mut E) -> Context<'b>
+ where E: Executor
+ {
+ Context {
+ local_waker: self.local_waker,
+ executor: executor,
+ }
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![unstable(feature = "futures_api",
+ reason = "futures in libcore are unstable",
+ issue = "50547")]
+
+use fmt;
+use super::{TaskObj, LocalTaskObj};
+
+/// A task executor.
+///
+/// A *task* is a `()`-producing async value that runs at the top level, and will
+/// be `poll`ed until completion. It's also the unit at which wake-up
+/// notifications occur. Executors, such as thread pools, allow tasks to be
+/// spawned and are responsible for putting tasks onto ready queues when
+/// they are woken up, and polling them when they are ready.
+pub trait Executor {
+ /// Spawn the given task, polling it until completion.
+ ///
+ /// # Errors
+ ///
+ /// The executor may be unable to spawn tasks, either because it has
+ /// been shut down or is resource-constrained.
+ fn spawn_obj(&mut self, task: TaskObj) -> Result<(), SpawnObjError>;
+
+ /// Determine whether the executor is able to spawn new tasks.
+ ///
+ /// # Returns
+ ///
+ /// An `Ok` return means the executor is *likely* (but not guaranteed)
+ /// to accept a subsequent spawn attempt. Likewise, an `Err` return
+ /// means that `spawn` is likely, but not guaranteed, to yield an error.
+ #[inline]
+ fn status(&self) -> Result<(), SpawnErrorKind> {
+ Ok(())
+ }
+}
+
+/// Provides the reason that an executor was unable to spawn.
+pub struct SpawnErrorKind {
+ _hidden: (),
+}
+
+impl fmt::Debug for SpawnErrorKind {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_tuple("SpawnErrorKind")
+ .field(&"shutdown")
+ .finish()
+ }
+}
+
+impl SpawnErrorKind {
+ /// Spawning is failing because the executor has been shut down.
+ pub fn shutdown() -> SpawnErrorKind {
+ SpawnErrorKind { _hidden: () }
+ }
+
+ /// Check whether this error is the `shutdown` error.
+ pub fn is_shutdown(&self) -> bool {
+ true
+ }
+}
+
+/// The result of a failed spawn
+#[derive(Debug)]
+pub struct SpawnObjError {
+ /// The kind of error
+ pub kind: SpawnErrorKind,
+
+ /// The task for which spawning was attempted
+ pub task: TaskObj,
+}
+
+/// The result of a failed spawn
+#[derive(Debug)]
+pub struct SpawnLocalObjError {
+ /// The kind of error
+ pub kind: SpawnErrorKind,
+
+ /// The task for which spawning was attempted
+ pub task: LocalTaskObj,
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![unstable(feature = "futures_api",
+ reason = "futures in libcore are unstable",
+ issue = "50547")]
+
+//! Types and Traits for working with asynchronous tasks.
+
+mod context;
+pub use self::context::Context;
+
+mod executor;
+pub use self::executor::{
+ Executor, SpawnErrorKind, SpawnObjError, SpawnLocalObjError
+};
+
+mod poll;
+pub use self::poll::Poll;
+
+mod task;
+pub use self::task::{TaskObj, LocalTaskObj, UnsafeTask};
+
+mod wake;
+pub use self::wake::{Waker, LocalWaker, UnsafeWake};
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![unstable(feature = "futures_api",
+ reason = "futures in libcore are unstable",
+ issue = "50547")]
+
+/// Indicates whether a value is available or if the current task has been
+/// scheduled to receive a wakeup instead.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, Ord, PartialOrd, Hash)]
+pub enum Poll<T> {
+ /// Represents that a value is immediately ready.
+ Ready(T),
+
+ /// Represents that a value is not ready yet.
+ ///
+ /// When a function returns `Pending`, the function *must* also
+ /// ensure that the current task is scheduled to be awoken when
+ /// progress can be made.
+ Pending,
+}
+
+impl<T> Poll<T> {
+ /// Change the ready value of this `Poll` with the closure provided
+ pub fn map<U, F>(self, f: F) -> Poll<U>
+ where F: FnOnce(T) -> U
+ {
+ match self {
+ Poll::Ready(t) => Poll::Ready(f(t)),
+ Poll::Pending => Poll::Pending,
+ }
+ }
+
+ /// Returns whether this is `Poll::Ready`
+ pub fn is_ready(&self) -> bool {
+ match *self {
+ Poll::Ready(_) => true,
+ Poll::Pending => false,
+ }
+ }
+
+ /// Returns whether this is `Poll::Pending`
+ pub fn is_pending(&self) -> bool {
+ !self.is_ready()
+ }
+}
+
+impl<T, E> Poll<Result<T, E>> {
+ /// Change the success value of this `Poll` with the closure provided
+ pub fn map_ok<U, F>(self, f: F) -> Poll<Result<U, E>>
+ where F: FnOnce(T) -> U
+ {
+ match self {
+ Poll::Ready(Ok(t)) => Poll::Ready(Ok(f(t))),
+ Poll::Ready(Err(e)) => Poll::Ready(Err(e)),
+ Poll::Pending => Poll::Pending,
+ }
+ }
+
+ /// Change the error value of this `Poll` with the closure provided
+ pub fn map_err<U, F>(self, f: F) -> Poll<Result<T, U>>
+ where F: FnOnce(E) -> U
+ {
+ match self {
+ Poll::Ready(Ok(t)) => Poll::Ready(Ok(t)),
+ Poll::Ready(Err(e)) => Poll::Ready(Err(f(e))),
+ Poll::Pending => Poll::Pending,
+ }
+ }
+}
+
+impl<T> From<T> for Poll<T> {
+ fn from(t: T) -> Poll<T> {
+ Poll::Ready(t)
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![unstable(feature = "futures_api",
+ reason = "futures in libcore are unstable",
+ issue = "50547")]
+
+use fmt;
+use future::Future;
+use mem::PinMut;
+use super::{Context, Poll};
+
+/// A custom trait object for polling tasks, roughly akin to
+/// `Box<Future<Output = ()>>`.
+/// Contrary to `TaskObj`, `LocalTaskObj` does not have a `Send` bound.
+pub struct LocalTaskObj {
+ ptr: *mut (),
+ poll_fn: unsafe fn(*mut (), &mut Context) -> Poll<()>,
+ drop_fn: unsafe fn(*mut ()),
+}
+
+impl LocalTaskObj {
+ /// Create a `LocalTaskObj` from a custom trait object representation.
+ #[inline]
+ pub fn new<T: UnsafeTask>(t: T) -> LocalTaskObj {
+ LocalTaskObj {
+ ptr: t.into_raw(),
+ poll_fn: T::poll,
+ drop_fn: T::drop,
+ }
+ }
+
+ /// Converts the `LocalTaskObj` into a `TaskObj`
+ /// To make this operation safe one has to ensure that the `UnsafeTask`
+ /// instance from which this `LocalTaskObj` was created actually implements
+ /// `Send`.
+ pub unsafe fn as_task_obj(self) -> TaskObj {
+ TaskObj(self)
+ }
+}
+
+impl fmt::Debug for LocalTaskObj {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("LocalTaskObj")
+ .finish()
+ }
+}
+
+impl From<TaskObj> for LocalTaskObj {
+ fn from(task: TaskObj) -> LocalTaskObj {
+ task.0
+ }
+}
+
+impl Future for LocalTaskObj {
+ type Output = ();
+
+ #[inline]
+ fn poll(self: PinMut<Self>, cx: &mut Context) -> Poll<()> {
+ unsafe {
+ (self.poll_fn)(self.ptr, cx)
+ }
+ }
+}
+
+impl Drop for LocalTaskObj {
+ fn drop(&mut self) {
+ unsafe {
+ (self.drop_fn)(self.ptr)
+ }
+ }
+}
+
+/// A custom trait object for polling tasks, roughly akin to
+/// `Box<Future<Output = ()> + Send>`.
+pub struct TaskObj(LocalTaskObj);
+
+unsafe impl Send for TaskObj {}
+
+impl TaskObj {
+ /// Create a `TaskObj` from a custom trait object representation.
+ #[inline]
+ pub fn new<T: UnsafeTask + Send>(t: T) -> TaskObj {
+ TaskObj(LocalTaskObj::new(t))
+ }
+}
+
+impl fmt::Debug for TaskObj {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("TaskObj")
+ .finish()
+ }
+}
+
+impl Future for TaskObj {
+ type Output = ();
+
+ #[inline]
+ fn poll(self: PinMut<Self>, cx: &mut Context) -> Poll<()> {
+ let pinned_field = unsafe { PinMut::map_unchecked(self, |x| &mut x.0) };
+ pinned_field.poll(cx)
+ }
+}
+
+/// A custom implementation of a task trait object for `TaskObj`, providing
+/// a hand-rolled vtable.
+///
+/// This custom representation is typically used only in `no_std` contexts,
+/// where the default `Box`-based implementation is not available.
+///
+/// The implementor must guarantee that it is safe to call `poll` repeatedly (in
+/// a non-concurrent fashion) with the result of `into_raw` until `drop` is
+/// called.
+pub unsafe trait UnsafeTask: 'static {
+ /// Convert a owned instance into a (conceptually owned) void pointer.
+ fn into_raw(self) -> *mut ();
+
+ /// Poll the task represented by the given void pointer.
+ ///
+ /// # Safety
+ ///
+ /// The trait implementor must guarantee that it is safe to repeatedly call
+ /// `poll` with the result of `into_raw` until `drop` is called; such calls
+ /// are not, however, allowed to race with each other or with calls to `drop`.
+ unsafe fn poll(task: *mut (), cx: &mut Context) -> Poll<()>;
+
+ /// Drops the task represented by the given void pointer.
+ ///
+ /// # Safety
+ ///
+ /// The trait implementor must guarantee that it is safe to call this
+ /// function once per `into_raw` invocation; that call cannot race with
+ /// other calls to `drop` or `poll`.
+ unsafe fn drop(task: *mut ());
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![unstable(feature = "futures_api",
+ reason = "futures in libcore are unstable",
+ issue = "50547")]
+
+use fmt;
+use ptr::NonNull;
+
+/// A `Waker` is a handle for waking up a task by notifying its executor that it
+/// is ready to be run.
+///
+/// This handle contains a trait object pointing to an instance of the `UnsafeWake`
+/// trait, allowing notifications to get routed through it.
+#[repr(transparent)]
+pub struct Waker {
+ inner: NonNull<UnsafeWake>,
+}
+
+unsafe impl Send for Waker {}
+unsafe impl Sync for Waker {}
+
+impl Waker {
+ /// Constructs a new `Waker` directly.
+ ///
+ /// Note that most code will not need to call this. Implementers of the
+ /// `UnsafeWake` trait will typically provide a wrapper that calls this
+ /// but you otherwise shouldn't call it directly.
+ ///
+ /// If you're working with the standard library then it's recommended to
+ /// use the `Waker::from` function instead which works with the safe
+ /// `Arc` type and the safe `Wake` trait.
+ #[inline]
+ pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {
+ Waker { inner: inner }
+ }
+
+ /// Wake up the task associated with this `Waker`.
+ #[inline]
+ pub fn wake(&self) {
+ unsafe { self.inner.as_ref().wake() }
+ }
+
+ /// Returns whether or not this `Waker` and `other` awaken the same task.
+ ///
+ /// This function works on a best-effort basis, and may return false even
+ /// when the `Waker`s would awaken the same task. However, if this function
+ /// returns true, it is guaranteed that the `Waker`s will awaken the same
+ /// task.
+ ///
+ /// This function is primarily used for optimization purposes.
+ #[inline]
+ pub fn will_wake(&self, other: &Waker) -> bool {
+ self.inner == other.inner
+ }
+}
+
+impl Clone for Waker {
+ #[inline]
+ fn clone(&self) -> Self {
+ unsafe {
+ self.inner.as_ref().clone_raw()
+ }
+ }
+}
+
+impl fmt::Debug for Waker {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("Waker")
+ .finish()
+ }
+}
+
+impl Drop for Waker {
+ #[inline]
+ fn drop(&mut self) {
+ unsafe {
+ self.inner.as_ref().drop_raw()
+ }
+ }
+}
+
+/// A `LocalWaker` is a handle for waking up a task by notifying its executor that it
+/// is ready to be run.
+///
+/// This is similar to the `Waker` type, but cannot be sent across threads.
+/// Task executors can use this type to implement more optimized singlethreaded wakeup
+/// behavior.
+#[repr(transparent)]
+pub struct LocalWaker {
+ inner: NonNull<UnsafeWake>,
+}
+
+impl !Send for LocalWaker {}
+impl !Sync for LocalWaker {}
+
+impl LocalWaker {
+ /// Constructs a new `LocalWaker` directly.
+ ///
+ /// Note that most code will not need to call this. Implementers of the
+ /// `UnsafeWake` trait will typically provide a wrapper that calls this
+ /// but you otherwise shouldn't call it directly.
+ ///
+ /// If you're working with the standard library then it's recommended to
+ /// use the `LocalWaker::from` function instead which works with the safe
+ /// `Rc` type and the safe `LocalWake` trait.
+ ///
+ /// For this function to be used safely, it must be sound to call `inner.wake_local()`
+ /// on the current thread.
+ #[inline]
+ pub unsafe fn new(inner: NonNull<UnsafeWake>) -> Self {
+ LocalWaker { inner: inner }
+ }
+
+ /// Wake up the task associated with this `LocalWaker`.
+ #[inline]
+ pub fn wake(&self) {
+ unsafe { self.inner.as_ref().wake_local() }
+ }
+
+ /// Returns whether or not this `LocalWaker` and `other` `LocalWaker` awaken the same task.
+ ///
+ /// This function works on a best-effort basis, and may return false even
+ /// when the `LocalWaker`s would awaken the same task. However, if this function
+ /// returns true, it is guaranteed that the `LocalWaker`s will awaken the same
+ /// task.
+ ///
+ /// This function is primarily used for optimization purposes.
+ #[inline]
+ pub fn will_wake(&self, other: &LocalWaker) -> bool {
+ self.inner == other.inner
+ }
+
+ /// Returns whether or not this `LocalWaker` and `other` `Waker` awaken the same task.
+ ///
+ /// This function works on a best-effort basis, and may return false even
+ /// when the `Waker`s would awaken the same task. However, if this function
+ /// returns true, it is guaranteed that the `LocalWaker`s will awaken the same
+ /// task.
+ ///
+ /// This function is primarily used for optimization purposes.
+ #[inline]
+ pub fn will_wake_nonlocal(&self, other: &Waker) -> bool {
+ self.inner == other.inner
+ }
+}
+
+impl From<LocalWaker> for Waker {
+ #[inline]
+ fn from(local_waker: LocalWaker) -> Self {
+ Waker { inner: local_waker.inner }
+ }
+}
+
+impl Clone for LocalWaker {
+ #[inline]
+ fn clone(&self) -> Self {
+ unsafe {
+ LocalWaker { inner: self.inner.as_ref().clone_raw().inner }
+ }
+ }
+}
+
+impl fmt::Debug for LocalWaker {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ f.debug_struct("Waker")
+ .finish()
+ }
+}
+
+impl Drop for LocalWaker {
+ #[inline]
+ fn drop(&mut self) {
+ unsafe {
+ self.inner.as_ref().drop_raw()
+ }
+ }
+}
+
+/// An unsafe trait for implementing custom memory management for a `Waker` or `LocalWaker`.
+///
+/// A `Waker` conceptually is a cloneable trait object for `Wake`, and is
+/// most often essentially just `Arc<dyn Wake>`. However, in some contexts
+/// (particularly `no_std`), it's desirable to avoid `Arc` in favor of some
+/// custom memory management strategy. This trait is designed to allow for such
+/// customization.
+///
+/// When using `std`, a default implementation of the `UnsafeWake` trait is provided for
+/// `Arc<T>` where `T: Wake` and `Rc<T>` where `T: LocalWake`.
+///
+/// Although the methods on `UnsafeWake` take pointers rather than references,
+pub unsafe trait UnsafeWake: Send + Sync {
+ /// Creates a clone of this `UnsafeWake` and stores it behind a `Waker`.
+ ///
+ /// This function will create a new uniquely owned handle that under the
+ /// hood references the same notification instance. In other words calls
+ /// to `wake` on the returned handle should be equivalent to calls to
+ /// `wake` on this handle.
+ ///
+ /// # Unsafety
+ ///
+ /// This function is unsafe to call because it's asserting the `UnsafeWake`
+ /// value is in a consistent state, i.e. hasn't been dropped.
+ unsafe fn clone_raw(&self) -> Waker;
+
+ /// Drops this instance of `UnsafeWake`, deallocating resources
+ /// associated with it.
+ ///
+ /// FIXME(cramertj)
+ /// This method is intended to have a signature such as:
+ ///
+ /// ```ignore (not-a-doctest)
+ /// fn drop_raw(self: *mut Self);
+ /// ```
+ ///
+ /// Unfortunately in Rust today that signature is not object safe.
+ /// Nevertheless it's recommended to implement this function *as if* that
+ /// were its signature. As such it is not safe to call on an invalid
+ /// pointer, nor is the validity of the pointer guaranteed after this
+ /// function returns.
+ ///
+ /// # Unsafety
+ ///
+ /// This function is unsafe to call because it's asserting the `UnsafeWake`
+ /// value is in a consistent state, i.e. hasn't been dropped.
+ unsafe fn drop_raw(&self);
+
+ /// Indicates that the associated task is ready to make progress and should
+ /// be `poll`ed.
+ ///
+ /// Executors generally maintain a queue of "ready" tasks; `wake` should place
+ /// the associated task onto this queue.
+ ///
+ /// # Panics
+ ///
+ /// Implementations should avoid panicking, but clients should also be prepared
+ /// for panics.
+ ///
+ /// # Unsafety
+ ///
+ /// This function is unsafe to call because it's asserting the `UnsafeWake`
+ /// value is in a consistent state, i.e. hasn't been dropped.
+ unsafe fn wake(&self);
+
+ /// Indicates that the associated task is ready to make progress and should
+ /// be `poll`ed. This function is the same as `wake`, but can only be called
+ /// from the thread that this `UnsafeWake` is "local" to. This allows for
+ /// implementors to provide specialized wakeup behavior specific to the current
+ /// thread. This function is called by `LocalWaker::wake`.
+ ///
+ /// Executors generally maintain a queue of "ready" tasks; `wake_local` should place
+ /// the associated task onto this queue.
+ ///
+ /// # Panics
+ ///
+ /// Implementations should avoid panicking, but clients should also be prepared
+ /// for panics.
+ ///
+ /// # Unsafety
+ ///
+ /// This function is unsafe to call because it's asserting the `UnsafeWake`
+ /// value is in a consistent state, i.e. hasn't been dropped, and that the
+ /// `UnsafeWake` hasn't moved from the thread on which it was created.
+ unsafe fn wake_local(&self) {
+ self.wake()
+ }
+}
assert_eq!((-9.0 as $fty).max($nan), -9.0);
assert!(($nan as $fty).max($nan).is_nan());
}
+ #[test]
+ fn mod_euc() {
+ let a: $fty = 42.0;
+ assert!($inf.mod_euc(a).is_nan());
+ assert_eq!(a.mod_euc($inf), a);
+ assert!(a.mod_euc($nan).is_nan());
+ assert!($inf.mod_euc($inf).is_nan());
+ assert!($inf.mod_euc($nan).is_nan());
+ assert!($nan.mod_euc($inf).is_nan());
+ }
+ #[test]
+ fn div_euc() {
+ let a: $fty = 42.0;
+ assert_eq!(a.div_euc($inf), 0.0);
+ assert!(a.div_euc($nan).is_nan());
+ assert!($inf.div_euc($inf).is_nan());
+ assert!($inf.div_euc($nan).is_nan());
+ assert!($nan.div_euc($inf).is_nan());
+ }
} }
}
assert_eq!(b.binary_search(&0), Err(0));
assert_eq!(b.binary_search(&1), Ok(0));
assert_eq!(b.binary_search(&2), Err(1));
- assert!(match b.binary_search(&3) { Ok(1...3) => true, _ => false });
- assert!(match b.binary_search(&3) { Ok(1...3) => true, _ => false });
+ assert!(match b.binary_search(&3) { Ok(1..=3) => true, _ => false });
+ assert!(match b.binary_search(&3) { Ok(1..=3) => true, _ => false });
assert_eq!(b.binary_search(&4), Err(4));
assert_eq!(b.binary_search(&5), Err(4));
assert_eq!(b.binary_search(&6), Err(4));
assert_eq!(Duration::new(2, 0).checked_div(0), None);
}
+#[test]
+fn correct_sum() {
+ let durations = [
+ Duration::new(1, 999_999_999),
+ Duration::new(2, 999_999_999),
+ Duration::new(0, 999_999_999),
+ Duration::new(0, 999_999_999),
+ Duration::new(0, 999_999_999),
+ Duration::new(5, 0),
+ ];
+ let sum = durations.iter().sum::<Duration>();
+ assert_eq!(sum, Duration::new(1+2+5+4, 1_000_000_000 - 5));
+}
+
#[test]
fn debug_formatting_extreme_values() {
assert_eq!(
}
}
+macro_rules! sum_durations {
+ ($iter:expr) => {{
+ let mut total_secs: u64 = 0;
+ let mut total_nanos: u64 = 0;
+
+ for entry in $iter {
+ total_secs = total_secs
+ .checked_add(entry.secs)
+ .expect("overflow in iter::sum over durations");
+ total_nanos = match total_nanos.checked_add(entry.nanos as u64) {
+ Some(n) => n,
+ None => {
+ total_secs = total_secs
+ .checked_add(total_nanos / NANOS_PER_SEC as u64)
+ .expect("overflow in iter::sum over durations");
+ (total_nanos % NANOS_PER_SEC as u64) + entry.nanos as u64
+ }
+ };
+ }
+ total_secs = total_secs
+ .checked_add(total_nanos / NANOS_PER_SEC as u64)
+ .expect("overflow in iter::sum over durations");
+ total_nanos = total_nanos % NANOS_PER_SEC as u64;
+ Duration {
+ secs: total_secs,
+ nanos: total_nanos as u32,
+ }
+ }};
+}
+
#[stable(feature = "duration_sum", since = "1.16.0")]
impl Sum for Duration {
fn sum<I: Iterator<Item=Duration>>(iter: I) -> Duration {
- iter.fold(Duration::new(0, 0), |a, b| a + b)
+ sum_durations!(iter)
}
}
#[stable(feature = "duration_sum", since = "1.16.0")]
impl<'a> Sum<&'a Duration> for Duration {
fn sum<I: Iterator<Item=&'a Duration>>(iter: I) -> Duration {
- iter.fold(Duration::new(0, 0), |a, b| a + *b)
+ sum_durations!(iter)
}
}
//! user of the `DepNode` API of having to know how to compute the expected
//! fingerprint for a given set of node parameters.
-use mir::interpret::{GlobalId, ConstValue};
+use mir::interpret::GlobalId;
use hir::def_id::{CrateNum, DefId, DefIndex, CRATE_DEF_INDEX};
use hir::map::DefPathHash;
use hir::{HirId, ItemLocalId};
use std::fmt;
use std::hash::Hash;
use syntax_pos::symbol::InternedString;
-use traits::query::{CanonicalProjectionGoal,
- CanonicalTyGoal, CanonicalPredicateGoal};
-use ty::{TyCtxt, Instance, InstanceDef, ParamEnv, ParamEnvAnd, PolyTraitRef, Ty};
+use traits::query::{
+ CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal,
+ CanonicalPredicateGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal,
+};
+use ty::{TyCtxt, FnSig, Instance, InstanceDef,
+ ParamEnv, ParamEnvAnd, Predicate, PolyFnSig, PolyTraitRef, Ty, self};
use ty::subst::Substs;
// erase!() just makes tokens go away. It's used to specify which macro argument
// queries). Making them anonymous avoids hashing the result, which
// may save a bit of time.
[anon] EraseRegionsTy { ty: Ty<'tcx> },
- [anon] ConstValueToAllocation { val: ConstValue<'tcx>, ty: Ty<'tcx> },
+ [anon] ConstValueToAllocation { val: &'tcx ty::Const<'tcx> },
[input] Freevars(DefId),
[input] MaybeUnusedTraitImport(DefId),
[] NormalizeTyAfterErasingRegions(ParamEnvAnd<'tcx, Ty<'tcx>>),
[] DropckOutlives(CanonicalTyGoal<'tcx>),
[] EvaluateObligation(CanonicalPredicateGoal<'tcx>),
+ [] TypeOpEq(CanonicalTypeOpEqGoal<'tcx>),
+ [] TypeOpSubtype(CanonicalTypeOpSubtypeGoal<'tcx>),
+ [] TypeOpProvePredicate(CanonicalTypeOpProvePredicateGoal<'tcx>),
+ [] TypeOpNormalizeTy(CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>),
+ [] TypeOpNormalizePredicate(CanonicalTypeOpNormalizeGoal<'tcx, Predicate<'tcx>>),
+ [] TypeOpNormalizePolyFnSig(CanonicalTypeOpNormalizeGoal<'tcx, PolyFnSig<'tcx>>),
+ [] TypeOpNormalizeFnSig(CanonicalTypeOpNormalizeGoal<'tcx, FnSig<'tcx>>),
[] SubstituteNormalizeAndTestPredicates { key: (DefId, &'tcx Substs<'tcx>) },
ItemFn(Name, &'a Generics, FnHeader, &'a Visibility, &'a [Attribute]),
/// fn foo(&self)
- Method(Name, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
+ Method(Ident, &'a MethodSig, Option<&'a Visibility>, &'a [Attribute]),
/// |x, y| {}
Closure(&'a [Attribute]),
}
pub fn walk_label<'v, V: Visitor<'v>>(visitor: &mut V, label: &'v Label) {
- visitor.visit_name(label.span, label.name);
+ visitor.visit_ident(label.ident);
}
pub fn walk_lifetime<'v, V: Visitor<'v>>(visitor: &mut V, lifetime: &'v Lifetime) {
visitor.visit_id(lifetime.id);
match lifetime.name {
- LifetimeName::Param(ParamName::Plain(name)) => {
- visitor.visit_name(lifetime.span, name);
+ LifetimeName::Param(ParamName::Plain(ident)) => {
+ visitor.visit_ident(ident);
}
LifetimeName::Param(ParamName::Fresh(_)) |
LifetimeName::Static |
pub fn walk_path_segment<'v, V: Visitor<'v>>(visitor: &mut V,
path_span: Span,
segment: &'v PathSegment) {
- visitor.visit_name(path_span, segment.name);
+ visitor.visit_ident(segment.ident);
if let Some(ref args) = segment.args {
visitor.visit_generic_args(path_span, args);
}
pub fn walk_assoc_type_binding<'v, V: Visitor<'v>>(visitor: &mut V,
type_binding: &'v TypeBinding) {
visitor.visit_id(type_binding.id);
- visitor.visit_name(type_binding.span, type_binding.name);
+ visitor.visit_ident(type_binding.ident);
visitor.visit_ty(&type_binding.ty);
}
PatKind::Ref(ref subpattern, _) => {
visitor.visit_pat(subpattern)
}
- PatKind::Binding(_, canonical_id, ref pth1, ref optional_subpattern) => {
+ PatKind::Binding(_, canonical_id, ident, ref optional_subpattern) => {
visitor.visit_def_mention(Def::Local(canonical_id));
- visitor.visit_name(pth1.span, pth1.node);
+ visitor.visit_ident(ident);
walk_list!(visitor, visit_pat, optional_subpattern);
}
PatKind::Lit(ref expression) => visitor.visit_expr(expression),
visitor.visit_name(foreign_item.span, foreign_item.name);
match foreign_item.node {
- ForeignItemFn(ref function_declaration, ref names, ref generics) => {
+ ForeignItemFn(ref function_declaration, ref param_names, ref generics) => {
visitor.visit_generics(generics);
visitor.visit_fn_decl(function_declaration);
- for name in names {
- visitor.visit_name(name.span, name.node);
+ for ¶m_name in param_names {
+ visitor.visit_ident(param_name);
}
}
ForeignItemStatic(ref typ, _) => visitor.visit_ty(typ),
visitor.visit_id(param.id);
walk_list!(visitor, visit_attribute, ¶m.attrs);
match param.name {
- ParamName::Plain(name) => visitor.visit_name(param.span, name),
+ ParamName::Plain(ident) => visitor.visit_ident(ident),
ParamName::Fresh(_) => {}
}
match param.kind {
}
pub fn walk_trait_item<'v, V: Visitor<'v>>(visitor: &mut V, trait_item: &'v TraitItem) {
- visitor.visit_name(trait_item.span, trait_item.name);
+ visitor.visit_ident(trait_item.ident);
walk_list!(visitor, visit_attribute, &trait_item.attrs);
visitor.visit_generics(&trait_item.generics);
match trait_item.node {
visitor.visit_ty(ty);
walk_list!(visitor, visit_nested_body, default);
}
- TraitItemKind::Method(ref sig, TraitMethod::Required(ref names)) => {
+ TraitItemKind::Method(ref sig, TraitMethod::Required(ref param_names)) => {
visitor.visit_id(trait_item.id);
visitor.visit_fn_decl(&sig.decl);
- for name in names {
- visitor.visit_name(name.span, name.node);
+ for ¶m_name in param_names {
+ visitor.visit_ident(param_name);
}
}
TraitItemKind::Method(ref sig, TraitMethod::Provided(body_id)) => {
- visitor.visit_fn(FnKind::Method(trait_item.name,
+ visitor.visit_fn(FnKind::Method(trait_item.ident,
sig,
None,
&trait_item.attrs),
pub fn walk_trait_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, trait_item_ref: &'v TraitItemRef) {
// NB: Deliberately force a compilation error if/when new fields are added.
- let TraitItemRef { id, name, ref kind, span, ref defaultness } = *trait_item_ref;
+ let TraitItemRef { id, ident, ref kind, span: _, ref defaultness } = *trait_item_ref;
visitor.visit_nested_trait_item(id);
- visitor.visit_name(span, name);
+ visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
visitor.visit_defaultness(defaultness);
}
let ImplItem {
id: _,
hir_id: _,
- name,
+ ident,
ref vis,
ref defaultness,
ref attrs,
ref generics,
ref node,
- span
+ span: _,
} = *impl_item;
- visitor.visit_name(span, name);
+ visitor.visit_ident(ident);
visitor.visit_vis(vis);
visitor.visit_defaultness(defaultness);
walk_list!(visitor, visit_attribute, attrs);
visitor.visit_nested_body(body);
}
ImplItemKind::Method(ref sig, body_id) => {
- visitor.visit_fn(FnKind::Method(impl_item.name,
+ visitor.visit_fn(FnKind::Method(impl_item.ident,
sig,
Some(&impl_item.vis),
&impl_item.attrs),
pub fn walk_impl_item_ref<'v, V: Visitor<'v>>(visitor: &mut V, impl_item_ref: &'v ImplItemRef) {
// NB: Deliberately force a compilation error if/when new fields are added.
- let ImplItemRef { id, name, ref kind, span, ref vis, ref defaultness } = *impl_item_ref;
+ let ImplItemRef { id, ident, ref kind, span: _, ref vis, ref defaultness } = *impl_item_ref;
visitor.visit_nested_impl_item(id);
- visitor.visit_name(span, name);
+ visitor.visit_ident(ident);
visitor.visit_associated_item_kind(kind);
visitor.visit_vis(vis);
visitor.visit_defaultness(defaultness);
use rustc_data_structures::indexed_vec::IndexVec;
use session::Session;
use util::common::FN_OUTPUT_NAME;
-use util::nodemap::{DefIdMap, FxHashMap, NodeMap};
+use util::nodemap::{DefIdMap, NodeMap};
use std::collections::{BTreeMap, HashSet};
use std::fmt::Debug;
cstore: &'a CrateStore,
resolver: &'a mut Resolver,
- name_map: FxHashMap<Ident, Name>,
/// The items being lowered are collected here.
items: BTreeMap<NodeId, hir::Item>,
// When `is_collectin_in_band_lifetimes` is true, each lifetime is checked
// against this list to see if it is already in-scope, or if a definition
// needs to be created for it.
- in_scope_lifetimes: Vec<Name>,
+ in_scope_lifetimes: Vec<Ident>,
type_def_lifetime_params: DefIdMap<usize>,
sess,
cstore,
resolver,
- name_map: FxHashMap(),
items: BTreeMap::new(),
trait_items: BTreeMap::new(),
impl_items: BTreeMap::new(),
self.sess.diagnostic()
}
- fn str_to_ident(&self, s: &'static str) -> Name {
- Symbol::gensym(s)
+ fn str_to_ident(&self, s: &'static str) -> Ident {
+ Ident::with_empty_ctxt(Symbol::gensym(s))
}
fn allow_internal_unstable(&self, reason: CompilerDesugaringKind, span: Span) -> Span {
format: codemap::CompilerDesugaring(reason),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: codemap::hygiene::default_edition(),
});
span.with_ctxt(SyntaxContext::empty().apply_mark(mark))
// that collisions are ok here and this shouldn't
// really show up for end-user.
let str_name = match hir_name {
- ParamName::Plain(name) => name.as_str(),
- ParamName::Fresh(_) => keywords::UnderscoreLifetime.name().as_str(),
+ ParamName::Plain(ident) => ident.as_interned_str(),
+ ParamName::Fresh(_) => keywords::UnderscoreLifetime.name().as_interned_str(),
};
// Add a definition for the in-band lifetime def
self.resolver.definitions().create_def_with_parent(
parent_id.index,
def_node_id,
- DefPathData::LifetimeParam(str_name.as_interned_str()),
+ DefPathData::LifetimeParam(str_name),
DefIndexAddressSpace::High,
Mark::root(),
span,
/// lifetimes are enabled, then we want to push that lifetime into
/// the vector of names to define later. In that case, it will get
/// added to the appropriate generics.
- fn maybe_collect_in_band_lifetime(&mut self, span: Span, name: Name) {
+ fn maybe_collect_in_band_lifetime(&mut self, ident: Ident) {
if !self.is_collecting_in_band_lifetimes {
return;
}
- if self.in_scope_lifetimes.contains(&name) {
+ if self.in_scope_lifetimes.contains(&ident.modern()) {
return;
}
- let hir_name = ParamName::Plain(name);
+ let hir_name = ParamName::Plain(ident);
- if self.lifetimes_to_define.iter().any(|(_, lt_name)| *lt_name == hir_name) {
+ if self.lifetimes_to_define.iter()
+ .any(|(_, lt_name)| lt_name.modern() == hir_name.modern()) {
return;
}
- self.lifetimes_to_define.push((span, hir_name));
+ self.lifetimes_to_define.push((ident.span, hir_name));
}
/// When we have either an elided or `'_` lifetime in an impl
{
let old_len = self.in_scope_lifetimes.len();
let lt_def_names = params.iter().filter_map(|param| match param.kind {
- GenericParamKind::Lifetime { .. } => Some(param.ident.name),
+ GenericParamKind::Lifetime { .. } => Some(param.ident.modern()),
_ => None,
});
self.in_scope_lifetimes.extend(lt_def_names);
{
let old_len = self.in_scope_lifetimes.len();
let lt_def_names = params.iter().filter_map(|param| match param.kind {
- hir::GenericParamKind::Lifetime { .. } => Some(param.name.name()),
+ hir::GenericParamKind::Lifetime { .. } => Some(param.name.ident().modern()),
_ => None,
});
self.in_scope_lifetimes.extend(lt_def_names);
}
}
- fn lower_ident(&mut self, ident: Ident) -> Name {
- let ident = ident.modern();
- if ident.span.ctxt() == SyntaxContext::empty() {
- return ident.name;
- }
- *self.name_map
- .entry(ident)
- .or_insert_with(|| Symbol::from_ident(ident))
- }
-
fn lower_label(&mut self, label: Option<Label>) -> Option<hir::Label> {
label.map(|label| hir::Label {
- name: label.ident.name,
- span: label.ident.span,
+ ident: label.ident,
})
}
fn lower_ty_binding(&mut self, b: &TypeBinding, itctx: ImplTraitContext) -> hir::TypeBinding {
hir::TypeBinding {
id: self.lower_node_id(b.id).node_id,
- name: self.lower_ident(b.ident),
+ ident: b.ident,
ty: self.lower_ty(&b.ty, itctx),
span: b.span,
}
-> hir::GenericArg {
match arg {
ast::GenericArg::Lifetime(lt) => GenericArg::Lifetime(self.lower_lifetime(<)),
- ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty(&ty, itctx)),
+ ast::GenericArg::Type(ty) => GenericArg::Type(self.lower_ty_direct(&ty, itctx)),
}
}
fn lower_ty(&mut self, t: &Ty, itctx: ImplTraitContext) -> P<hir::Ty> {
+ P(self.lower_ty_direct(t, itctx))
+ }
+
+ fn lower_ty_direct(&mut self, t: &Ty, itctx: ImplTraitContext) -> hir::Ty {
let kind = match t.node {
TyKind::Infer => hir::TyInfer,
TyKind::Err => hir::TyErr,
),
TyKind::Never => hir::TyNever,
TyKind::Tup(ref tys) => {
- hir::TyTup(tys.iter().map(|ty| self.lower_ty(ty, itctx)).collect())
+ hir::TyTup(tys.iter().map(|ty| self.lower_ty_direct(ty, itctx)).collect())
}
TyKind::Paren(ref ty) => {
- return self.lower_ty(ty, itctx);
+ return self.lower_ty_direct(ty, itctx);
}
TyKind::Path(ref qself, ref path) => {
let id = self.lower_node_id(t.id);
None,
P(hir::Path {
def: self.expect_full_def(t.id),
- segments: hir_vec![hir::PathSegment::from_name(keywords::SelfType.name())],
+ segments: hir_vec![hir::PathSegment::from_ident(keywords::SelfType.ident())],
span: t.span,
}),
)),
let hir_bounds = self.lower_param_bounds(bounds, itctx);
// Set the name to `impl Bound1 + Bound2`
- let name = Symbol::intern(&pprust::ty_to_string(t));
+ let ident = Ident::from_str(&pprust::ty_to_string(t)).with_span_pos(span);
self.in_band_ty_params.push(hir::GenericParam {
id: def_node_id,
- name: ParamName::Plain(name),
- span,
+ name: ParamName::Plain(ident),
pure_wrt_drop: false,
attrs: hir_vec![],
bounds: hir_bounds,
+ span,
kind: hir::GenericParamKind::Type {
default: None,
synthetic: Some(hir::SyntheticTyParamKind::ImplTrait),
P(hir::Path {
span,
def: Def::TyParam(DefId::local(def_index)),
- segments: hir_vec![hir::PathSegment::from_name(name)],
+ segments: hir_vec![hir::PathSegment::from_ident(ident)],
}),
))
}
};
let LoweredNodeId { node_id, hir_id } = self.lower_node_id(t.id);
- P(hir::Ty {
+ hir::Ty {
id: node_id,
node: kind,
span: t.span,
hir_id,
- })
+ }
}
fn lower_existential_impl_trait(
self.context.resolver.definitions().create_def_with_parent(
self.parent,
def_node_id,
- DefPathData::LifetimeParam(name.name().as_interned_str()),
+ DefPathData::LifetimeParam(name.ident().as_interned_str()),
DefIndexAddressSpace::High,
Mark::root(),
lifetime.span,
let name = match name {
hir::LifetimeName::Underscore => {
- hir::ParamName::Plain(keywords::UnderscoreLifetime.name())
+ hir::ParamName::Plain(keywords::UnderscoreLifetime.ident())
}
hir::LifetimeName::Param(param_name) => param_name,
_ => bug!("expected LifetimeName::Param or ParamName::Plain"),
// e.g. `Vec` in `Vec::new` or `<I as Iterator>::Item` in
// `<I as Iterator>::Item::default`.
let new_id = self.next_id();
- self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path))
+ P(self.ty_path(new_id, p.span, hir::QPath::Resolved(qself, path)))
};
// Anything after the base path are associated "extensions",
// Wrap the associated extension in another type node.
let new_id = self.next_id();
- ty = self.ty_path(new_id, p.span, qpath);
+ ty = P(self.ty_path(new_id, p.span, qpath));
}
// Should've returned in the for loop above.
&mut self,
def: Def,
p: &Path,
- name: Option<Name>,
+ ident: Option<Ident>,
param_mode: ParamMode,
) -> hir::Path {
hir::Path {
ImplTraitContext::Disallowed,
)
})
- .chain(name.map(|name| hir::PathSegment::from_name(name)))
+ .chain(ident.map(|ident| hir::PathSegment::from_ident(ident)))
.collect(),
span: p.span,
}
}
hir::PathSegment::new(
- self.lower_ident(segment.ident),
+ segment.ident,
generic_args,
infer_types,
)
|this| {
const DISALLOWED: ImplTraitContext = ImplTraitContext::Disallowed;
let &ParenthesisedArgs { ref inputs, ref output, span } = data;
- let inputs = inputs.iter().map(|ty| this.lower_ty(ty, DISALLOWED)).collect();
+ let inputs = inputs.iter().map(|ty| this.lower_ty_direct(ty, DISALLOWED)).collect();
let mk_tup = |this: &mut Self, tys, span| {
let LoweredNodeId { node_id, hir_id } = this.next_id();
- P(hir::Ty { node: hir::TyTup(tys), id: node_id, hir_id, span })
+ hir::Ty { node: hir::TyTup(tys), id: node_id, hir_id, span }
};
(
bindings: hir_vec![
hir::TypeBinding {
id: this.next_id().node_id,
- name: Symbol::intern(FN_OUTPUT_NAME),
+ ident: Ident::from_str(FN_OUTPUT_NAME),
ty: output
.as_ref()
.map(|ty| this.lower_ty(&ty, DISALLOWED))
- .unwrap_or_else(|| mk_tup(this, hir::HirVec::new(), span)),
+ .unwrap_or_else(|| P(mk_tup(this, hir::HirVec::new(), span))),
span: output.as_ref().map_or(span, |ty| ty.span),
}
],
}
}
- fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Spanned<Name>> {
+ fn lower_fn_args_to_names(&mut self, decl: &FnDecl) -> hir::HirVec<Ident> {
decl.inputs
.iter()
.map(|arg| match arg.pat.node {
- PatKind::Ident(_, ident, None) => respan(ident.span, ident.name),
- _ => respan(arg.pat.span, keywords::Invalid.name()),
+ PatKind::Ident(_, ident, _) => ident,
+ _ => Ident::new(keywords::Invalid.name(), arg.pat.span),
})
.collect()
}
.iter()
.map(|arg| {
if let Some(def_id) = fn_def_id {
- self.lower_ty(&arg.ty, ImplTraitContext::Universal(def_id))
+ self.lower_ty_direct(&arg.ty, ImplTraitContext::Universal(def_id))
} else {
- self.lower_ty(&arg.ty, ImplTraitContext::Disallowed)
+ self.lower_ty_direct(&arg.ty, ImplTraitContext::Disallowed)
}
})
.collect::<HirVec<_>>();
// fn_def_id: DefId of the parent function. Used to create child impl trait definition.
fn lower_async_fn_ret_ty(
&mut self,
- inputs: &[P<hir::Ty>],
+ inputs: &[hir::Ty],
output: &FunctionRetTy,
fn_def_id: DefId,
) -> hir::FunctionRetTy {
let future_params = P(hir::GenericArgs {
args: hir_vec![],
bindings: hir_vec![hir::TypeBinding {
- name: Symbol::intern(FN_OUTPUT_NAME),
+ ident: Ident::from_str(FN_OUTPUT_NAME),
ty: output_ty,
id: this.next_id().node_id,
span,
fn lower_lifetime(&mut self, l: &Lifetime) -> hir::Lifetime {
let span = l.ident.span;
- match self.lower_ident(l.ident) {
- x if x == "'static" => self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
- x if x == "'_" => match self.anonymous_lifetime_mode {
- AnonymousLifetimeMode::CreateParameter => {
- let fresh_name = self.collect_fresh_in_band_lifetime(span);
- self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name))
- }
+ match l.ident {
+ ident if ident.name == keywords::StaticLifetime.name() =>
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Static),
+ ident if ident.name == keywords::UnderscoreLifetime.name() =>
+ match self.anonymous_lifetime_mode {
+ AnonymousLifetimeMode::CreateParameter => {
+ let fresh_name = self.collect_fresh_in_band_lifetime(span);
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(fresh_name))
+ }
- AnonymousLifetimeMode::PassThrough => {
- self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
- }
- },
- name => {
- self.maybe_collect_in_band_lifetime(span, name);
- let param_name = ParamName::Plain(name);
+ AnonymousLifetimeMode::PassThrough => {
+ self.new_named_lifetime(l.id, span, hir::LifetimeName::Underscore)
+ }
+ },
+ ident => {
+ self.maybe_collect_in_band_lifetime(ident);
+ let param_name = ParamName::Plain(ident);
self.new_named_lifetime(l.id, span, hir::LifetimeName::Param(param_name))
}
}
let lt = self.lower_lifetime(&Lifetime { id: param.id, ident: param.ident });
let param_name = match lt.name {
hir::LifetimeName::Param(param_name) => param_name,
- _ => hir::ParamName::Plain(lt.name.name()),
+ _ => hir::ParamName::Plain(lt.name.ident()),
};
let param = hir::GenericParam {
id: lt.id,
param
}
GenericParamKind::Type { ref default, .. } => {
- let mut name = self.lower_ident(param.ident);
-
// Don't expose `Self` (recovered "keyword used as ident" parse error).
// `rustc::ty` expects `Self` to be only used for a trait's `Self`.
// Instead, use gensym("Self") to create a distinct name that looks the same.
- if name == keywords::SelfType.name() {
- name = Symbol::gensym("Self");
- }
+ let ident = if param.ident.name == keywords::SelfType.name() {
+ param.ident.gensym()
+ } else {
+ param.ident
+ };
let add_bounds = add_bounds.get(¶m.id).map_or(&[][..], |x| &x);
if !add_bounds.is_empty() {
hir::GenericParam {
id: self.lower_node_id(param.id).node_id,
- name: hir::ParamName::Plain(name),
- span: param.ident.span,
+ name: hir::ParamName::Plain(ident),
pure_wrt_drop: attr::contains_name(¶m.attrs, "may_dangle"),
attrs: self.lower_attrs(¶m.attrs),
bounds,
+ span: ident.span,
kind: hir::GenericParamKind::Type {
default: default.as_ref().map(|x| {
self.lower_ty(x, ImplTraitContext::Disallowed)
hir::TraitItem {
id: node_id,
hir_id,
- name: self.lower_ident(i.ident),
+ ident: i.ident,
attrs: self.lower_attrs(&i.attrs),
generics,
node,
};
hir::TraitItemRef {
id: hir::TraitItemId { node_id: i.id },
- name: self.lower_ident(i.ident),
+ ident: i.ident,
span: i.span,
defaultness: self.lower_defaultness(Defaultness::Default, has_default),
kind,
hir::ImplItem {
id: node_id,
hir_id,
- name: self.lower_ident(i.ident),
+ ident: i.ident,
attrs: self.lower_attrs(&i.attrs),
generics,
vis: self.lower_visibility(&i.vis, None),
fn lower_impl_item_ref(&mut self, i: &ImplItem) -> hir::ImplItemRef {
hir::ImplItemRef {
id: hir::ImplItemId { node_id: i.id },
- name: self.lower_ident(i.ident),
+ ident: i.ident,
span: i.span,
vis: self.lower_visibility(&i.vis, Some(i.id)),
defaultness: self.lower_defaultness(i.defaultness, true /* [1] */),
hir::PatKind::Binding(
self.lower_binding_mode(binding_mode),
canonical_id,
- respan(ident.span, ident.name),
+ ident,
sub.as_ref().map(|x| self.lower_pat(x)),
)
}
P(hir::Path {
span: ident.span,
def,
- segments: hir_vec![hir::PathSegment::from_name(ident.name)],
+ segments: hir_vec![hir::PathSegment::from_ident(ident)],
}),
)),
}
PatKind::Ref(ref inner, mutbl) => {
hir::PatKind::Ref(self.lower_pat(inner), self.lower_mutability(mutbl))
}
- PatKind::Range(ref e1, ref e2, ref end) => hir::PatKind::Range(
+ PatKind::Range(ref e1, ref e2, Spanned { node: ref end, .. }) => hir::PatKind::Range(
P(self.lower_expr(e1)),
P(self.lower_expr(e2)),
self.lower_range_end(end),
this.expr_block(block, ThinVec::new())
})
})
- },
+ }
ExprKind::Closure(
- capture_clause, asyncness, movability, ref decl, ref body, fn_decl_span) =>
- {
- self.with_new_scopes(|this| {
- if let IsAsync::Async(async_closure_node_id) = asyncness {
+ capture_clause, asyncness, movability, ref decl, ref body, fn_decl_span
+ ) => {
+ if let IsAsync::Async(async_closure_node_id) = asyncness {
+ let outer_decl = FnDecl {
+ inputs: decl.inputs.clone(),
+ output: FunctionRetTy::Default(fn_decl_span),
+ variadic: false,
+ };
+ // We need to lower the declaration outside the new scope, because we
+ // have to conserve the state of being inside a loop condition for the
+ // closure argument types.
+ let fn_decl = self.lower_fn_decl(&outer_decl, None, false, false);
+
+ self.with_new_scopes(|this| {
// FIXME(cramertj) allow `async` non-`move` closures with
if capture_clause == CaptureBy::Ref &&
!decl.inputs.is_empty()
// Transform `async |x: u8| -> X { ... }` into
// `|x: u8| future_from_generator(|| -> X { ... })`
- let outer_decl = FnDecl {
- inputs: decl.inputs.clone(),
- output: FunctionRetTy::Default(fn_decl_span),
- variadic: false,
- };
let body_id = this.lower_body(Some(&outer_decl), |this| {
let async_ret_ty = if let FunctionRetTy::Ty(ty) = &decl.output {
Some(&**ty)
});
hir::ExprClosure(
this.lower_capture_clause(capture_clause),
- this.lower_fn_decl(&outer_decl, None, false, false),
+ fn_decl,
body_id,
fn_decl_span,
None,
)
- } else {
+ })
+ } else {
+ // Lower outside new scope to preserve `is_in_loop_condition`.
+ let fn_decl = self.lower_fn_decl(decl, None, false, false);
+
+ self.with_new_scopes(|this| {
let mut is_generator = false;
let body_id = this.lower_body(Some(decl), |this| {
let e = this.lower_expr(body);
};
hir::ExprClosure(
this.lower_capture_clause(capture_clause),
- this.lower_fn_decl(decl, None, false, false),
+ fn_decl,
body_id,
fn_decl_span,
generator_option,
)
- }
- })
+ })
+ }
}
ExprKind::Block(ref blk, opt_label) => {
hir::ExprBlock(self.lower_block(blk,
let e1 = self.lower_expr(e1);
let e2 = self.lower_expr(e2);
let ty_path = P(self.std_path(span, &["ops", "RangeInclusive"], None, false));
- let ty = self.ty_path(id, span, hir::QPath::Resolved(None, ty_path));
- let new_seg = P(hir::PathSegment::from_name(Symbol::intern("new")));
+ let ty = P(self.ty_path(id, span, hir::QPath::Resolved(None, ty_path)));
+ let new_seg = P(hir::PathSegment::from_ident(Ident::from_str("new")));
let new_path = hir::QPath::TypeRelative(ty, new_seg);
let new = P(self.expr(span, hir::ExprPath(new_path), ThinVec::new()));
hir::ExprCall(new, hir_vec![e1, e2])
self.expr(span, hir::ExprCall(e, args), ThinVec::new())
}
- fn expr_ident(&mut self, span: Span, id: Name, binding: NodeId) -> hir::Expr {
- self.expr_ident_with_attrs(span, id, binding, ThinVec::new())
+ fn expr_ident(&mut self, span: Span, ident: Ident, binding: NodeId) -> hir::Expr {
+ self.expr_ident_with_attrs(span, ident, binding, ThinVec::new())
}
fn expr_ident_with_attrs(
&mut self,
span: Span,
- id: Name,
+ ident: Ident,
binding: NodeId,
attrs: ThinVec<Attribute>,
) -> hir::Expr {
P(hir::Path {
span,
def: Def::Local(binding),
- segments: hir_vec![hir::PathSegment::from_name(id)],
+ segments: hir_vec![hir::PathSegment::from_ident(ident)],
}),
));
&mut self,
sp: Span,
mutbl: bool,
- ident: Name,
+ ident: Ident,
ex: P<hir::Expr>,
) -> (hir::Stmt, NodeId) {
let pat = if mutbl {
self.pat(span, pt)
}
- fn pat_ident(&mut self, span: Span, name: Name) -> P<hir::Pat> {
- self.pat_ident_binding_mode(span, name, hir::BindingAnnotation::Unannotated)
+ fn pat_ident(&mut self, span: Span, ident: Ident) -> P<hir::Pat> {
+ self.pat_ident_binding_mode(span, ident, hir::BindingAnnotation::Unannotated)
}
fn pat_ident_binding_mode(
&mut self,
span: Span,
- name: Name,
+ ident: Ident,
bm: hir::BindingAnnotation,
) -> P<hir::Pat> {
let LoweredNodeId { node_id, hir_id } = self.next_id();
P(hir::Pat {
id: node_id,
hir_id,
- node: hir::PatKind::Binding(bm, node_id, Spanned { span, node: name }, None),
+ node: hir::PatKind::Binding(bm, node_id, ident.with_span_pos(span), None),
span,
})
}
.resolve_str_path(span, self.crate_root, components, params, is_value)
}
- fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> P<hir::Ty> {
+ fn ty_path(&mut self, id: LoweredNodeId, span: Span, qpath: hir::QPath) -> hir::Ty {
let mut id = id;
let node = match qpath {
hir::QPath::Resolved(None, path) => {
}
_ => hir::TyPath(qpath),
};
- P(hir::Ty {
+ hir::Ty {
id: id.node_id,
hir_id: id.hir_id,
node,
span,
- })
+ }
}
/// Invoked to create the lifetime argument for a type `&T`
use hir::map::{self, Node};
use hir::{Expr, FnDecl};
use hir::intravisit::FnKind;
-use syntax::ast::{Attribute, Name, NodeId};
+use syntax::ast::{Attribute, Ident, Name, NodeId};
use syntax_pos::Span;
/// An FnLikeNode is a Node that is like a fn, in that it has a decl
let closure = |c: ClosureParts<'a>| {
FnKind::Closure(c.attrs)
};
- let method = |_, name: Name, sig: &'a ast::MethodSig, vis, _, _, attrs| {
- FnKind::Method(name, sig, vis, attrs)
+ let method = |_, ident: Ident, sig: &'a ast::MethodSig, vis, _, _, attrs| {
+ FnKind::Method(ident, sig, vis, attrs)
};
self.handle(item, method, closure)
}
fn handle<A, I, M, C>(self, item_fn: I, method: M, closure: C) -> A where
I: FnOnce(ItemFnParts<'a>) -> A,
M: FnOnce(NodeId,
- Name,
+ Ident,
&'a ast::MethodSig,
Option<&'a ast::Visibility>,
ast::BodyId,
},
map::NodeTraitItem(ti) => match ti.node {
ast::TraitItemKind::Method(ref sig, ast::TraitMethod::Provided(body)) => {
- method(ti.id, ti.name, sig, None, body, ti.span, &ti.attrs)
+ method(ti.id, ti.ident, sig, None, body, ti.span, &ti.attrs)
}
_ => bug!("trait method FnLikeNode that is not fn-like"),
},
map::NodeImplItem(ii) => {
match ii.node {
ast::ImplItemKind::Method(ref sig, body) => {
- method(ii.id, ii.name, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
+ method(ii.id, ii.ident, sig, Some(&ii.vis), body, ii.span, &ii.attrs)
}
_ => {
bug!("impl method FnLikeNode that is not fn-like")
// map the actual nodes, not the duplicate ones in the *Ref.
let TraitItemRef {
id,
- name: _,
+ ident: _,
kind: _,
span: _,
defaultness: _,
// map the actual nodes, not the duplicate ones in the *Ref.
let ImplItemRef {
id,
- name: _,
+ ident: _,
kind: _,
span: _,
vis: _,
// information we encapsulate into, the better
let def_data = match i.node {
ItemKind::Impl(..) => DefPathData::Impl,
- ItemKind::Trait(..) => DefPathData::Trait(i.ident.name.as_interned_str()),
+ ItemKind::Trait(..) => DefPathData::Trait(i.ident.as_interned_str()),
ItemKind::Enum(..) | ItemKind::Struct(..) | ItemKind::Union(..) |
ItemKind::TraitAlias(..) |
ItemKind::ExternCrate(..) | ItemKind::ForeignMod(..) | ItemKind::Ty(..) =>
- DefPathData::TypeNs(i.ident.name.as_interned_str()),
+ DefPathData::TypeNs(i.ident.as_interned_str()),
ItemKind::Mod(..) if i.ident == keywords::Invalid.ident() => {
return visit::walk_item(self, i);
}
|this| visit::walk_item(this, i)
)
}
- ItemKind::Mod(..) => DefPathData::Module(i.ident.name.as_interned_str()),
+ ItemKind::Mod(..) => DefPathData::Module(i.ident.as_interned_str()),
ItemKind::Static(..) | ItemKind::Const(..) | ItemKind::Fn(..) =>
- DefPathData::ValueNs(i.ident.name.as_interned_str()),
- ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.name.as_interned_str()),
+ DefPathData::ValueNs(i.ident.as_interned_str()),
+ ItemKind::MacroDef(..) => DefPathData::MacroDef(i.ident.as_interned_str()),
ItemKind::Mac(..) => return self.visit_macro_invoc(i.id),
ItemKind::GlobalAsm(..) => DefPathData::Misc,
ItemKind::Use(..) => {
}
let def = self.create_def(foreign_item.id,
- DefPathData::ValueNs(foreign_item.ident.name.as_interned_str()),
+ DefPathData::ValueNs(foreign_item.ident.as_interned_str()),
REGULAR_SPACE,
foreign_item.span);
fn visit_variant(&mut self, v: &'a Variant, g: &'a Generics, item_id: NodeId) {
let def = self.create_def(v.node.data.id(),
- DefPathData::EnumVariant(v.node.ident
- .name.as_interned_str()),
+ DefPathData::EnumVariant(v.node.ident.as_interned_str()),
REGULAR_SPACE,
v.span);
self.with_parent(def, |this| visit::walk_variant(this, v, g, item_id));
}
fn visit_generic_param(&mut self, param: &'a GenericParam) {
- let name = param.ident.name.as_interned_str();
+ let name = param.ident.as_interned_str();
let def_path_data = match param.kind {
GenericParamKind::Lifetime { .. } => DefPathData::LifetimeParam(name),
GenericParamKind::Type { .. } => DefPathData::TypeParam(name),
fn visit_trait_item(&mut self, ti: &'a TraitItem) {
let def_data = match ti.node {
TraitItemKind::Method(..) | TraitItemKind::Const(..) =>
- DefPathData::ValueNs(ti.ident.name.as_interned_str()),
+ DefPathData::ValueNs(ti.ident.as_interned_str()),
TraitItemKind::Type(..) => {
- DefPathData::AssocTypeInTrait(ti.ident.name.as_interned_str())
+ DefPathData::AssocTypeInTrait(ti.ident.as_interned_str())
},
TraitItemKind::Macro(..) => return self.visit_macro_invoc(ti.id),
};
)
}
ImplItemKind::Method(..) | ImplItemKind::Const(..) =>
- DefPathData::ValueNs(ii.ident.name.as_interned_str()),
- ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.name.as_interned_str()),
+ DefPathData::ValueNs(ii.ident.as_interned_str()),
+ ImplItemKind::Type(..) => DefPathData::AssocTypeInImpl(ii.ident.as_interned_str()),
ImplItemKind::Macro(..) => return self.visit_macro_invoc(ii.id),
};
NodeItem(&Item { node: ItemTrait(..), .. }) => {
keywords::SelfType.name()
}
- NodeGenericParam(param) => param.name.name(),
+ NodeGenericParam(param) => param.name.ident().name,
_ => bug!("ty_param_name: {} not a type parameter", self.node_to_string(id)),
}
}
match self.get(id) {
NodeItem(i) => i.name,
NodeForeignItem(i) => i.name,
- NodeImplItem(ii) => ii.name,
- NodeTraitItem(ti) => ti.name,
+ NodeImplItem(ii) => ii.ident.name,
+ NodeTraitItem(ti) => ti.ident.name,
NodeVariant(v) => v.node.name,
NodeField(f) => f.ident.name,
- NodeLifetime(lt) => lt.name.name(),
- NodeGenericParam(param) => param.name.name(),
- NodeBinding(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.node,
+ NodeLifetime(lt) => lt.name.ident().name,
+ NodeGenericParam(param) => param.name.ident().name,
+ NodeBinding(&Pat { node: PatKind::Binding(_,_,l,_), .. }) => l.name,
NodeStructCtor(_) => self.name(self.get_parent(id)),
_ => bug!("no name for {}", self.node_to_string(id))
}
impl Named for ForeignItem { fn name(&self) -> Name { self.name } }
impl Named for Variant_ { fn name(&self) -> Name { self.name } }
impl Named for StructField { fn name(&self) -> Name { self.ident.name } }
-impl Named for TraitItem { fn name(&self) -> Name { self.name } }
-impl Named for ImplItem { fn name(&self) -> Name { self.name } }
+impl Named for TraitItem { fn name(&self) -> Name { self.ident.name } }
+impl Named for ImplItem { fn name(&self) -> Name { self.ident.name } }
pub fn map_crate<'hir>(sess: &::session::Session,
Some(NodeImplItem(ii)) => {
match ii.node {
ImplItemKind::Const(..) => {
- format!("assoc const {} in {}{}", ii.name, path_str(), id_str)
+ format!("assoc const {} in {}{}", ii.ident, path_str(), id_str)
}
ImplItemKind::Method(..) => {
- format!("method {} in {}{}", ii.name, path_str(), id_str)
+ format!("method {} in {}{}", ii.ident, path_str(), id_str)
}
ImplItemKind::Type(_) => {
- format!("assoc type {} in {}{}", ii.name, path_str(), id_str)
+ format!("assoc type {} in {}{}", ii.ident, path_str(), id_str)
}
}
}
TraitItemKind::Type(..) => "assoc type",
};
- format!("{} {} in {}{}", kind, ti.name, path_str(), id_str)
+ format!("{} {} in {}{}", kind, ti.ident, path_str(), id_str)
}
Some(NodeVariant(ref variant)) => {
format!("variant {} in {}{}",
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub struct Label {
- pub name: Name,
- pub span: Span,
+ pub ident: Ident,
}
impl fmt::Debug for Label {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- write!(f, "label({:?})", self.name)
+ write!(f, "label({:?})", self.ident)
}
}
#[derive(Debug, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Copy)]
pub enum ParamName {
/// Some user-given name like `T` or `'x`.
- Plain(Name),
+ Plain(Ident),
/// Synthetic name generated when user elided a lifetime in an impl header,
/// e.g. the lifetimes in cases like these:
}
impl ParamName {
- pub fn name(&self) -> Name {
+ pub fn ident(&self) -> Ident {
+ match *self {
+ ParamName::Plain(ident) => ident,
+ ParamName::Fresh(_) => keywords::UnderscoreLifetime.ident(),
+ }
+ }
+
+ pub fn modern(&self) -> ParamName {
match *self {
- ParamName::Plain(name) => name,
- ParamName::Fresh(_) => keywords::UnderscoreLifetime.name(),
+ ParamName::Plain(ident) => ParamName::Plain(ident.modern()),
+ param_name => param_name,
}
}
}
}
impl LifetimeName {
- pub fn name(&self) -> Name {
- use self::LifetimeName::*;
+ pub fn ident(&self) -> Ident {
match *self {
- Implicit => keywords::Invalid.name(),
- Underscore => keywords::UnderscoreLifetime.name(),
- Static => keywords::StaticLifetime.name(),
- Param(param_name) => param_name.name(),
+ LifetimeName::Implicit => keywords::Invalid.ident(),
+ LifetimeName::Underscore => keywords::UnderscoreLifetime.ident(),
+ LifetimeName::Static => keywords::StaticLifetime.ident(),
+ LifetimeName::Param(param_name) => param_name.ident(),
}
}
pub fn is_elided(&self) -> bool {
- use self::LifetimeName::*;
match self {
- Implicit | Underscore => true,
+ LifetimeName::Implicit | LifetimeName::Underscore => true,
// It might seem surprising that `Fresh(_)` counts as
// *not* elided -- but this is because, as far as the code
// in the compiler is concerned -- `Fresh(_)` variants act
// equivalently to "some fresh name". They correspond to
// early-bound regions on an impl, in other words.
- Param(_) | Static => false,
+ LifetimeName::Param(_) | LifetimeName::Static => false,
}
}
fn is_static(&self) -> bool {
self == &LifetimeName::Static
}
+
+ pub fn modern(&self) -> LifetimeName {
+ match *self {
+ LifetimeName::Param(param_name) => LifetimeName::Param(param_name.modern()),
+ lifetime_name => lifetime_name,
+ }
+ }
+}
+
+impl fmt::Display for Lifetime {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.name.ident().fmt(f)
+ }
}
impl fmt::Debug for Lifetime {
impl Path {
pub fn is_global(&self) -> bool {
- !self.segments.is_empty() && self.segments[0].name == keywords::CrateRoot.name()
+ !self.segments.is_empty() && self.segments[0].ident.name == keywords::CrateRoot.name()
}
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct PathSegment {
/// The identifier portion of this path segment.
- pub name: Name,
+ pub ident: Ident,
/// Type/lifetime parameters attached to this path. They come in
/// two flavors: `Path<A,B,C>` and `Path(A,B) -> C`. Note that
impl PathSegment {
/// Convert an identifier to the corresponding segment.
- pub fn from_name(name: Name) -> PathSegment {
+ pub fn from_ident(ident: Ident) -> PathSegment {
PathSegment {
- name,
+ ident,
infer_types: true,
args: None,
}
}
- pub fn new(name: Name, args: GenericArgs, infer_types: bool) -> Self {
+ pub fn new(ident: Ident, args: GenericArgs, infer_types: bool) -> Self {
PathSegment {
- name,
+ ident,
infer_types,
args: if args.is_empty() {
None
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum GenericArg {
Lifetime(Lifetime),
- Type(P<Ty>),
+ Type(Ty),
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
self.args.is_empty() && self.bindings.is_empty() && !self.parenthesized
}
- pub fn inputs(&self) -> &[P<Ty>] {
+ pub fn inputs(&self) -> &[Ty] {
if self.parenthesized {
for arg in &self.args {
match arg {
/// The `NodeId` is the canonical ID for the variable being bound,
/// e.g. in `Ok(x) | Err(x)`, both `x` use the same canonical ID,
/// which is the pattern ID of the first `x`.
- Binding(BindingAnnotation, NodeId, Spanned<Name>, Option<P<Pat>>),
+ Binding(BindingAnnotation, NodeId, Ident, Option<P<Pat>>),
/// A struct or struct variant pattern, e.g. `Variant {x, y, ..}`.
/// The `bool` is `true` in the presence of a `..`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItem {
pub id: NodeId,
- pub name: Name,
+ pub ident: Ident,
pub hir_id: HirId,
pub attrs: HirVec<Attribute>,
pub generics: Generics,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum TraitMethod {
/// No default body in the trait, just a signature.
- Required(HirVec<Spanned<Name>>),
+ Required(HirVec<Ident>),
/// Both signature and body are provided in the trait.
Provided(BodyId),
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItem {
pub id: NodeId,
- pub name: Name,
+ pub ident: Ident,
pub hir_id: HirId,
pub vis: Visibility,
pub defaultness: Defaultness,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TypeBinding {
pub id: NodeId,
- pub name: Name,
+ pub ident: Ident,
pub ty: P<Ty>,
pub span: Span,
}
pub abi: Abi,
pub generic_params: HirVec<GenericParam>,
pub decl: P<FnDecl>,
- pub arg_names: HirVec<Spanned<Name>>,
+ pub arg_names: HirVec<Ident>,
}
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
/// The never type (`!`)
TyNever,
/// A tuple (`(A, B, C, D,...)`)
- TyTup(HirVec<P<Ty>>),
+ TyTup(HirVec<Ty>),
/// A path to a type definition (`module::module::...::Type`), or an
/// associated type, e.g. `<Vec<T> as Trait>::Type` or `<T>::Target`.
///
/// Represents the header (not the body) of a function declaration
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct FnDecl {
- pub inputs: HirVec<P<Ty>>,
+ pub inputs: HirVec<Ty>,
pub output: FunctionRetTy,
pub variadic: bool,
/// True if this function has an `self`, `&self` or `&mut self` receiver
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct TraitItemRef {
pub id: TraitItemId,
- pub name: Name,
+ pub ident: Ident,
pub kind: AssociatedItemKind,
pub span: Span,
pub defaultness: Defaultness,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct ImplItemRef {
pub id: ImplItemId,
- pub name: Name,
+ pub ident: Ident,
pub kind: AssociatedItemKind,
pub span: Span,
pub vis: Visibility,
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum ForeignItem_ {
/// A foreign function
- ForeignItemFn(P<FnDecl>, HirVec<Spanned<Name>>, Generics),
+ ForeignItemFn(P<FnDecl>, HirVec<Ident>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
/// (the boolean is true when mutable)
ForeignItemStatic(P<Ty>, bool),
use hir::def_id::DefId;
use hir::{self, HirId, PatKind};
use syntax::ast;
-use syntax::codemap::Spanned;
use syntax_pos::Span;
use std::iter::{Enumerate, ExactSizeIterator};
/// Call `f` on every "binding" in a pattern, e.g., on `a` in
/// `match foo() { Some(a) => (), None => () }`
pub fn each_binding<F>(&self, mut f: F)
- where F: FnMut(hir::BindingAnnotation, HirId, Span, &Spanned<ast::Name>),
+ where F: FnMut(hir::BindingAnnotation, HirId, Span, ast::Ident),
{
self.walk(|p| {
- if let PatKind::Binding(binding_mode, _, ref pth, _) = p.node {
- f(binding_mode, p.hir_id, p.span, pth);
+ if let PatKind::Binding(binding_mode, _, ident, _) = p.node {
+ f(binding_mode, p.hir_id, p.span, ident);
}
true
});
contains_bindings
}
- pub fn simple_name(&self) -> Option<ast::Name> {
+ pub fn simple_ident(&self) -> Option<ast::Ident> {
match self.node {
- PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ref path1, None) |
- PatKind::Binding(hir::BindingAnnotation::Mutable, _, ref path1, None) =>
- Some(path1.node),
- _ => None,
- }
- }
-
- pub fn simple_span(&self) -> Option<Span> {
- match self.node {
- PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ref path1, None) |
- PatKind::Binding(hir::BindingAnnotation::Mutable, _, ref path1, None) =>
- Some(path1.span),
+ PatKind::Binding(hir::BindingAnnotation::Unannotated, _, ident, None) |
+ PatKind::Binding(hir::BindingAnnotation::Mutable, _, ident, None) => Some(ident),
_ => None,
}
}
use rustc_target::spec::abi::Abi;
use syntax::ast;
-use syntax::codemap::{CodeMap, Spanned};
+use syntax::codemap::CodeMap;
use syntax::parse::ParseSess;
use syntax::parse::lexer::comments;
use syntax::print::pp::{self, Breaks};
}
fn print_associated_const(&mut self,
- name: ast::Name,
+ ident: ast::Ident,
ty: &hir::Ty,
default: Option<hir::BodyId>,
vis: &hir::Visibility)
-> io::Result<()> {
self.s.word(&visibility_qualified(vis, ""))?;
self.word_space("const")?;
- self.print_name(name)?;
+ self.print_ident(ident)?;
self.word_space(":")?;
self.print_type(ty)?;
if let Some(expr) = default {
}
fn print_associated_type(&mut self,
- name: ast::Name,
+ ident: ast::Ident,
bounds: Option<&hir::GenericBounds>,
ty: Option<&hir::Ty>)
-> io::Result<()> {
self.word_space("type")?;
- self.print_name(name)?;
+ self.print_ident(ident)?;
if let Some(bounds) = bounds {
self.print_bounds(":", bounds)?;
}
match kind {
hir::UseKind::Single => {
- if path.segments.last().unwrap().name != item.name {
+ if path.segments.last().unwrap().ident.name != item.name {
self.s.space()?;
self.word_space("as")?;
self.print_name(item.name)?;
hir::Visibility::Crate(ast::CrateSugar::PubCrate) => self.word_nbsp("pub(crate)")?,
hir::Visibility::Restricted { ref path, .. } => {
self.s.word("pub(")?;
- if path.segments.len() == 1 && path.segments[0].name == keywords::Super.name() {
+ if path.segments.len() == 1 &&
+ path.segments[0].ident.name == keywords::Super.name() {
// Special case: `super` can print like `pub(super)`.
self.s.word("super")?;
} else {
Ok(())
}
pub fn print_method_sig(&mut self,
- name: ast::Name,
+ ident: ast::Ident,
m: &hir::MethodSig,
generics: &hir::Generics,
vis: &hir::Visibility,
- arg_names: &[Spanned<ast::Name>],
+ arg_names: &[ast::Ident],
body_id: Option<hir::BodyId>)
-> io::Result<()> {
self.print_fn(&m.decl,
m.header,
- Some(name),
+ Some(ident.name),
generics,
vis,
arg_names,
self.print_outer_attributes(&ti.attrs)?;
match ti.node {
hir::TraitItemKind::Const(ref ty, default) => {
- self.print_associated_const(ti.name, &ty, default, &hir::Inherited)?;
+ self.print_associated_const(ti.ident, &ty, default, &hir::Inherited)?;
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Required(ref arg_names)) => {
- self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, arg_names,
+ self.print_method_sig(ti.ident, sig, &ti.generics, &hir::Inherited, arg_names,
None)?;
self.s.word(";")?;
}
hir::TraitItemKind::Method(ref sig, hir::TraitMethod::Provided(body)) => {
self.head("")?;
- self.print_method_sig(ti.name, sig, &ti.generics, &hir::Inherited, &[],
+ self.print_method_sig(ti.ident, sig, &ti.generics, &hir::Inherited, &[],
Some(body))?;
self.nbsp()?;
self.end()?; // need to close a box
self.ann.nested(self, Nested::Body(body))?;
}
hir::TraitItemKind::Type(ref bounds, ref default) => {
- self.print_associated_type(ti.name,
+ self.print_associated_type(ti.ident,
Some(bounds),
default.as_ref().map(|ty| &**ty))?;
}
match ii.node {
hir::ImplItemKind::Const(ref ty, expr) => {
- self.print_associated_const(ii.name, &ty, Some(expr), &ii.vis)?;
+ self.print_associated_const(ii.ident, &ty, Some(expr), &ii.vis)?;
}
hir::ImplItemKind::Method(ref sig, body) => {
self.head("")?;
- self.print_method_sig(ii.name, sig, &ii.generics, &ii.vis, &[], Some(body))?;
+ self.print_method_sig(ii.ident, sig, &ii.generics, &ii.vis, &[], Some(body))?;
self.nbsp()?;
self.end()?; // need to close a box
self.end()?; // need to close a box
self.ann.nested(self, Nested::Body(body))?;
}
hir::ImplItemKind::Type(ref ty) => {
- self.print_associated_type(ii.name, None, Some(ty))?;
+ self.print_associated_type(ii.ident, None, Some(ty))?;
}
}
self.ann.post(self, NodeSubItem(ii.id))
let base_args = &args[1..];
self.print_expr_maybe_paren(&args[0], parser::PREC_POSTFIX)?;
self.s.word(".")?;
- self.print_name(segment.name)?;
+ self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
if !generic_args.args.is_empty() || !generic_args.bindings.is_empty() {
}
hir::ExprWhile(ref test, ref blk, opt_label) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("while")?;
}
hir::ExprLoop(ref blk, opt_label, _) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
self.head("loop")?;
}
hir::ExprBlock(ref blk, opt_label) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
// containing cbox, will be closed by print-block at }
self.s.word("break")?;
self.s.space()?;
if let Some(label) = destination.label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.s.space()?;
}
if let Some(ref expr) = *opt_expr {
self.s.word("continue")?;
self.s.space()?;
if let Some(label) = destination.label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.s.space()?
}
}
}
pub fn print_name(&mut self, name: ast::Name) -> io::Result<()> {
- self.print_ident(name.to_ident())
+ self.print_ident(ast::Ident::with_empty_ctxt(name))
}
pub fn print_for_decl(&mut self, loc: &hir::Local, coll: &hir::Expr) -> io::Result<()> {
if i > 0 {
self.s.word("::")?
}
- if segment.name != keywords::CrateRoot.name() &&
- segment.name != keywords::DollarCrate.name() {
- self.print_name(segment.name)?;
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name() {
+ self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args, segment.infer_types,
colons_before_params)
if i > 0 {
self.s.word("::")?
}
- if segment.name != keywords::CrateRoot.name() &&
- segment.name != keywords::DollarCrate.name() {
- self.print_name(segment.name)?;
+ if segment.ident.name != keywords::CrateRoot.name() &&
+ segment.ident.name != keywords::DollarCrate.name() {
+ self.print_ident(segment.ident)?;
segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
segment.infer_types,
self.s.word(">")?;
self.s.word("::")?;
let item_segment = path.segments.last().unwrap();
- self.print_name(item_segment.name)?;
+ self.print_ident(item_segment.ident)?;
item_segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
item_segment.infer_types,
self.print_type(qself)?;
self.s.word(">")?;
self.s.word("::")?;
- self.print_name(item_segment.name)?;
+ self.print_ident(item_segment.ident)?;
item_segment.with_generic_args(|generic_args| {
self.print_generic_args(generic_args,
item_segment.infer_types,
for binding in generic_args.bindings.iter() {
start_or_comma(self)?;
- self.print_name(binding.name)?;
+ self.print_ident(binding.ident)?;
self.s.space()?;
self.word_space("=")?;
self.print_type(&binding.ty)?;
// is that it doesn't matter
match pat.node {
PatKind::Wild => self.s.word("_")?,
- PatKind::Binding(binding_mode, _, ref path1, ref sub) => {
+ PatKind::Binding(binding_mode, _, ident, ref sub) => {
match binding_mode {
hir::BindingAnnotation::Ref => {
self.word_nbsp("ref")?;
self.word_nbsp("mut")?;
}
}
- self.print_name(path1.node)?;
+ self.print_ident(ident)?;
if let Some(ref p) = *sub {
self.s.word("@")?;
self.print_pat(&p)?;
match arm.body.node {
hir::ExprBlock(ref blk, opt_label) => {
if let Some(label) = opt_label {
- self.print_name(label.name)?;
+ self.print_ident(label.ident)?;
self.word_space(":")?;
}
// the block will close the pattern's ibox
name: Option<ast::Name>,
generics: &hir::Generics,
vis: &hir::Visibility,
- arg_names: &[Spanned<ast::Name>],
+ arg_names: &[ast::Ident],
body_id: Option<hir::BodyId>)
-> io::Result<()> {
self.print_fn_header_info(header, vis)?;
assert!(arg_names.is_empty() || body_id.is_none());
self.commasep(Inconsistent, &decl.inputs, |s, ty| {
s.ibox(indent_unit)?;
- if let Some(name) = arg_names.get(i) {
- s.s.word(&name.node.as_str())?;
+ if let Some(arg_name) = arg_names.get(i) {
+ s.s.word(&arg_name.as_str())?;
s.s.word(":")?;
s.s.space()?;
} else if let Some(body_id) = body_id {
}
pub fn print_generic_param(&mut self, param: &GenericParam) -> io::Result<()> {
- self.print_name(param.name.name())?;
+ self.print_ident(param.name.ident())?;
match param.kind {
GenericParamKind::Lifetime { .. } => {
let mut sep = ":";
}
pub fn print_lifetime(&mut self, lifetime: &hir::Lifetime) -> io::Result<()> {
- self.print_name(lifetime.name.name())
+ self.print_ident(lifetime.name.ident())
}
pub fn print_where_clause(&mut self, where_clause: &hir::WhereClause) -> io::Result<()> {
decl: &hir::FnDecl,
name: Option<ast::Name>,
generic_params: &[hir::GenericParam],
- arg_names: &[Spanned<ast::Name>])
+ arg_names: &[ast::Ident])
-> io::Result<()> {
self.ibox(indent_unit)?;
if !generic_params.is_empty() {
pub fn encode_opaque(&self, encoder: &mut Encoder) -> EncodeResult {
let bytes: [u8; 16] = unsafe { mem::transmute([self.0.to_le(), self.1.to_le()]) };
- encoder.emit_raw_bytes(&bytes)
+ encoder.emit_raw_bytes(&bytes);
+ Ok(())
}
pub fn decode_opaque<'a>(decoder: &mut Decoder<'a>) -> Result<Fingerprint, String> {
impl serialize::UseSpecializedDecodable for Fingerprint { }
-impl<'a> serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder<'a> {
+impl serialize::SpecializedEncoder<Fingerprint> for serialize::opaque::Encoder {
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(self)
}
});
impl_stable_hash_for!(struct hir::Label {
- span,
- name
+ ident
});
impl_stable_hash_for!(struct hir::Lifetime {
});
impl_stable_hash_for!(struct hir::PathSegment {
- name,
+ ident -> (ident.name),
infer_types,
args
});
impl_stable_hash_for!(struct hir::GenericParam {
id,
name,
- span,
pure_wrt_drop,
attrs,
bounds,
+ span,
kind
});
impl_stable_hash_for!(struct hir::TypeBinding {
id,
- name,
+ ident -> (ident.name),
ty,
span
});
Return(t)
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitRef {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::TraitRef {
- ref path,
- // Don't hash the ref_id. It is tracked via the thing it is used to access
- ref_id: _,
- } = *self;
-
- path.hash_stable(hcx, hasher);
- }
-}
-
+impl_stable_hash_for!(struct hir::TraitRef {
+ // Don't hash the ref_id. It is tracked via the thing it is used to access
+ ref_id -> _,
+ path,
+});
impl_stable_hash_for!(struct hir::PolyTraitRef {
bound_generic_params,
body
});
+impl_stable_hash_for!(struct hir::Block {
+ stmts,
+ expr,
+ id -> _,
+ hir_id -> _,
+ rules,
+ span,
+ targeted_by_break,
+ recovered,
+});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Block {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Block {
- ref stmts,
- ref expr,
- id: _,
- hir_id: _,
- rules,
- span,
- targeted_by_break,
- recovered,
- } = *self;
-
- stmts.hash_stable(hcx, hasher);
- expr.hash_stable(hcx, hasher);
- rules.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- recovered.hash_stable(hcx, hasher);
- targeted_by_break.hash_stable(hcx, hasher);
- }
-}
-
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Pat {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Pat {
- id: _,
- hir_id: _,
- ref node,
- ref span
- } = *self;
-
-
- node.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::Pat {
+ id -> _,
+ hir_id -> _,
+ node,
+ span,
+});
impl_stable_hash_for_spanned!(hir::FieldPat);
-impl<'a> HashStable<StableHashingContext<'a>> for hir::FieldPat {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::FieldPat {
- id: _,
- ident,
- ref pat,
- is_shorthand,
- } = *self;
-
- ident.hash_stable(hcx, hasher);
- pat.hash_stable(hcx, hasher);
- is_shorthand.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::FieldPat {
+ id -> _,
+ ident -> (ident.name),
+ pat,
+ is_shorthand,
+});
impl_stable_hash_for!(enum hir::BindingAnnotation {
Unannotated,
body
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Field {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Field {
- id: _,
- ident,
- ref expr,
- span,
- is_shorthand,
- } = *self;
-
- ident.hash_stable(hcx, hasher);
- expr.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- is_shorthand.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::Field {
+ id -> _,
+ ident,
+ expr,
+ span,
+ is_shorthand,
+});
impl_stable_hash_for_spanned!(ast::Name);
UnresolvedLabel
});
-impl<'a> HashStable<StableHashingContext<'a>> for ast::Ident {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ast::Ident {
- name,
- span,
- } = *self;
-
- name.hash_stable(hcx, hasher);
- span.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ast::Ident {
+ name,
+ span,
+});
impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitItem {
fn hash_stable<W: StableHasherResult>(&self,
let hir::TraitItem {
id: _,
hir_id: _,
- name,
+ ident,
ref attrs,
ref generics,
ref node,
} = *self;
hcx.hash_hir_item_like(|hcx| {
- name.hash_stable(hcx, hasher);
+ ident.name.hash_stable(hcx, hasher);
attrs.hash_stable(hcx, hasher);
generics.hash_stable(hcx, hasher);
node.hash_stable(hcx, hasher);
let hir::ImplItem {
id: _,
hir_id: _,
- name,
+ ident,
ref vis,
defaultness,
ref attrs,
} = *self;
hcx.hash_hir_item_like(|hcx| {
- name.hash_stable(hcx, hasher);
+ ident.name.hash_stable(hcx, hasher);
vis.hash_stable(hcx, hasher);
defaultness.hash_stable(hcx, hasher);
attrs.hash_stable(hcx, hasher);
Negative
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::Mod {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::Mod {
- inner,
- // We are not hashing the IDs of the items contained in the module.
- // This is harmless and matches the current behavior but it's not
- // actually correct. See issue #40876.
- item_ids: _,
- } = *self;
-
- inner.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::Mod {
+ inner,
+ // We are not hashing the IDs of the items contained in the module.
+ // This is harmless and matches the current behavior but it's not
+ // actually correct. See issue #40876.
+ item_ids -> _,
+});
impl_stable_hash_for!(struct hir::ForeignMod {
abi,
impl_stable_hash_for!(struct hir::StructField {
span,
- ident,
+ ident -> (ident.name),
vis,
id,
ty,
impl_stable_hash_for!(struct hir::TraitItemRef {
id,
- name,
+ ident -> (ident.name),
kind,
span,
defaultness
impl_stable_hash_for!(struct hir::ImplItemRef {
id,
- name,
+ ident -> (ident.name),
kind,
span,
vis,
defaultness
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::AssociatedItemKind {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::AssociatedItemKind {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
is_indirect
});
-impl<'a> HashStable<StableHashingContext<'a>> for hir::GlobalAsm {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::GlobalAsm {
- asm,
- ctxt: _
- } = *self;
-
- asm.hash_stable(hcx, hasher);
- }
-}
-
-impl<'a> HashStable<StableHashingContext<'a>> for hir::InlineAsm {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let hir::InlineAsm {
- asm,
- asm_str_style,
- ref outputs,
- ref inputs,
- ref clobbers,
- volatile,
- alignstack,
- dialect,
- ctxt: _, // This is used for error reporting
- } = *self;
+impl_stable_hash_for!(struct hir::GlobalAsm {
+ asm,
+ ctxt -> _, // This is used for error reporting
+});
- asm.hash_stable(hcx, hasher);
- asm_str_style.hash_stable(hcx, hasher);
- outputs.hash_stable(hcx, hasher);
- inputs.hash_stable(hcx, hasher);
- clobbers.hash_stable(hcx, hasher);
- volatile.hash_stable(hcx, hasher);
- alignstack.hash_stable(hcx, hasher);
- dialect.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::InlineAsm {
+ asm,
+ asm_str_style,
+ outputs,
+ inputs,
+ clobbers,
+ volatile,
+ alignstack,
+ dialect,
+ ctxt -> _, // This is used for error reporting
+});
impl_stable_hash_for!(enum hir::def::CtorKind {
Fn,
NotConst
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::def_id::DefIndex {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::def_id::DefIndex {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
span
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for ::middle::lang_items::LangItem {
+impl<'a> HashStable<StableHashingContext<'a>> for ::middle::lang_items::LangItem {
fn hash_stable<W: StableHasherResult>(&self,
_: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
missing
});
-impl<'a> HashStable<StableHashingContext<'a>>
-for hir::TraitCandidate {
+impl<'a> HashStable<StableHashingContext<'a>> for hir::TraitCandidate {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
}
}
-impl<'hir> HashStable<StableHashingContext<'hir>> for hir::CodegenFnAttrs
-{
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'hir>,
- hasher: &mut StableHasher<W>) {
- let hir::CodegenFnAttrs {
- flags,
- inline,
- export_name,
- ref target_features,
- linkage,
- } = *self;
-
- flags.hash_stable(hcx, hasher);
- inline.hash_stable(hcx, hasher);
- export_name.hash_stable(hcx, hasher);
- target_features.hash_stable(hcx, hasher);
- linkage.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct hir::CodegenFnAttrs {
+ flags,
+ inline,
+ export_name,
+ target_features,
+ linkage,
+});
impl<'hir> HashStable<StableHashingContext<'hir>> for hir::CodegenFnAttrFlags
{
format,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
edition
});
src_hash.hash_stable(hcx, hasher);
// We only hash the relative position within this filemap
- lines.with_lock(|lines| {
- lines.len().hash_stable(hcx, hasher);
- for &line in lines.iter() {
- stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
- }
- });
+ lines.len().hash_stable(hcx, hasher);
+ for &line in lines.iter() {
+ stable_byte_pos(line, start_pos).hash_stable(hcx, hasher);
+ }
// We only hash the relative position within this filemap
- multibyte_chars.with_lock(|multibyte_chars| {
- multibyte_chars.len().hash_stable(hcx, hasher);
- for &char_pos in multibyte_chars.iter() {
- stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
- }
- });
+ multibyte_chars.len().hash_stable(hcx, hasher);
+ for &char_pos in multibyte_chars.iter() {
+ stable_multibyte_char(char_pos, start_pos).hash_stable(hcx, hasher);
+ }
- non_narrow_chars.with_lock(|non_narrow_chars| {
- non_narrow_chars.len().hash_stable(hcx, hasher);
- for &char_pos in non_narrow_chars.iter() {
- stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
- }
- });
+ non_narrow_chars.len().hash_stable(hcx, hasher);
+ for &char_pos in non_narrow_chars.iter() {
+ stable_non_narrow_char(char_pos, start_pos).hash_stable(hcx, hasher);
+ }
}
}
Relative(distance)
});
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::FieldDef {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::FieldDef { did, ident, vis } = *self;
-
- did.hash_stable(hcx, hasher);
- ident.name.hash_stable(hcx, hasher);
- vis.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::FieldDef {
+ did,
+ ident -> (ident.name),
+ vis,
+});
impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::middle::const_val::ConstVal<'gcx> {
+for ::mir::interpret::ConstValue<'gcx> {
fn hash_stable<W: StableHasherResult>(&self,
hcx: &mut StableHashingContext<'a>,
hasher: &mut StableHasher<W>) {
- use middle::const_val::ConstVal::*;
+ use mir::interpret::ConstValue::*;
mem::discriminant(self).hash_stable(hcx, hasher);
def_id.hash_stable(hcx, hasher);
substs.hash_stable(hcx, hasher);
}
- Value(ref value) => {
- value.hash_stable(hcx, hasher);
- }
- }
- }
-}
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::mir::interpret::ConstValue<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- use mir::interpret::ConstValue::*;
-
- mem::discriminant(self).hash_stable(hcx, hasher);
-
- match *self {
Scalar(val) => {
val.hash_stable(hcx, hasher);
}
val
});
-impl_stable_hash_for!(struct ::middle::const_val::ConstEvalErr<'tcx> {
+impl_stable_hash_for!(struct ::mir::interpret::ConstEvalErr<'tcx> {
span,
- kind
+ stacktrace,
+ error
});
-impl_stable_hash_for!(struct ::middle::const_val::FrameInfo {
+impl_stable_hash_for!(struct ::mir::interpret::FrameInfo {
span,
lint_root,
location
});
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::middle::const_val::ErrKind<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- use middle::const_val::ErrKind::*;
-
- mem::discriminant(self).hash_stable(hcx, hasher);
-
- match *self {
- TypeckError |
- CouldNotResolve |
- CheckMatchError => {
- // nothing to do
- }
- Miri(ref err, ref trace) => {
- err.hash_stable(hcx, hasher);
- trace.hash_stable(hcx, hasher);
- },
- }
- }
-}
-
impl_stable_hash_for!(struct ty::ClosureSubsts<'tcx> { substs });
impl_stable_hash_for!(struct ty::GeneratorSubsts<'tcx> { substs });
predicates
});
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>>
-for ::mir::interpret::EvalError<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- self.kind.hash_stable(hcx, hasher)
- }
-}
+impl_stable_hash_for!(struct ::mir::interpret::EvalError<'tcx> { kind });
impl<'a, 'gcx, O: HashStable<StableHashingContext<'a>>> HashStable<StableHashingContext<'a>>
for ::mir::interpret::EvalErrorKind<'gcx, O> {
ReadFromReturnPointer |
UnimplementedTraitSelection |
TypeckError |
+ TooGeneric |
+ CheckMatchError |
DerefFunctionPointer |
ExecuteMemory |
OverflowNeg |
Struct(index)
});
-impl<'a> HashStable<StableHashingContext<'a>> for ty::Generics {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::Generics {
- parent,
- ref parent_count,
- ref params,
-
- // Reverse map to each param's `index` field, from its `def_id`.
- param_def_id_to_index: _, // Don't hash this
- has_self,
- has_late_bound_regions,
- } = *self;
-
- parent.hash_stable(hcx, hasher);
- parent_count.hash_stable(hcx, hasher);
- params.hash_stable(hcx, hasher);
- has_self.hash_stable(hcx, hasher);
- has_late_bound_regions.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::Generics {
+ parent,
+ parent_count,
+ params,
+ // Reverse map to each param's `index` field, from its `def_id`.
+ param_def_id_to_index -> _, // Don't hash this
+ has_self,
+ has_late_bound_regions,
+});
impl_stable_hash_for!(struct ty::GenericParamDef {
name,
}
}
-impl<'a> HashStable<StableHashingContext<'a>> for ty::TraitDef {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::TraitDef {
- // We already have the def_path_hash below, no need to hash it twice
- def_id: _,
- unsafety,
- paren_sugar,
- has_auto_impl,
- def_path_hash,
- } = *self;
-
- unsafety.hash_stable(hcx, hasher);
- paren_sugar.hash_stable(hcx, hasher);
- has_auto_impl.hash_stable(hcx, hasher);
- def_path_hash.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::TraitDef {
+ // We already have the def_path_hash below, no need to hash it twice
+ def_id -> _,
+ unsafety,
+ paren_sugar,
+ has_auto_impl,
+ def_path_hash,
+});
impl_stable_hash_for!(struct ty::Destructor {
did
});
-impl<'a> HashStable<StableHashingContext<'a>> for ty::CrateVariancesMap {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::CrateVariancesMap {
- ref variances,
- // This is just an irrelevant helper value.
- empty_variance: _,
- } = *self;
-
- variances.hash_stable(hcx, hasher);
- }
-}
-
-impl<'a, 'gcx> HashStable<StableHashingContext<'a>> for ty::CratePredicatesMap<'gcx> {
- fn hash_stable<W: StableHasherResult>(&self,
- hcx: &mut StableHashingContext<'a>,
- hasher: &mut StableHasher<W>) {
- let ty::CratePredicatesMap {
- ref predicates,
- // This is just an irrelevant helper value.
- empty_predicate: _,
- } = *self;
+impl_stable_hash_for!(struct ty::CrateVariancesMap {
+ variances,
+ // This is just an irrelevant helper value.
+ empty_variance -> _,
+});
- predicates.hash_stable(hcx, hasher);
- }
-}
+impl_stable_hash_for!(struct ty::CratePredicatesMap<'tcx> {
+ predicates,
+ // This is just an irrelevant helper value.
+ empty_predicate -> _,
+});
impl_stable_hash_for!(struct ty::AssociatedItem {
def_id,
- name,
+ ident -> (ident.name),
kind,
vis,
defaultness,
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! **Canonicalization** is the key to constructing a query in the
-//! middle of type inference. Ordinarily, it is not possible to store
-//! types from type inference in query keys, because they contain
-//! references to inference variables whose lifetimes are too short
-//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
-//! produces two things:
-//!
-//! - a value T2 where each unbound inference variable has been
-//! replaced with a **canonical variable**;
-//! - a map M (of type `CanonicalVarValues`) from those canonical
-//! variables back to the original.
-//!
-//! We can then do queries using T2. These will give back constriants
-//! on the canonical variables which can be translated, using the map
-//! M, into constraints in our source context. This process of
-//! translating the results back is done by the
-//! `instantiate_query_result` method.
-//!
-//! For a more detailed look at what is happening here, check
-//! out the [chapter in the rustc guide][c].
-//!
-//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits-canonicalization.html
-
-use infer::{InferCtxt, InferOk, InferResult, RegionVariableOrigin, TypeVariableOrigin};
-use rustc_data_structures::indexed_vec::Idx;
-use serialize::UseSpecializedDecodable;
-use std::fmt::Debug;
-use std::ops::Index;
-use std::sync::atomic::Ordering;
-use syntax::codemap::Span;
-use traits::{Obligation, ObligationCause, PredicateObligation};
-use ty::{self, CanonicalVar, Lift, Region, Slice, Ty, TyCtxt, TypeFlags};
-use ty::subst::{Kind, UnpackedKind};
-use ty::fold::{TypeFoldable, TypeFolder};
-
-use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::fx::FxHashMap;
-
-/// A "canonicalized" type `V` is one where all free inference
-/// variables have been rewriten to "canonical vars". These are
-/// numbered starting from 0 in order of first appearance.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct Canonical<'gcx, V> {
- pub variables: CanonicalVarInfos<'gcx>,
- pub value: V,
-}
-
-pub type CanonicalVarInfos<'gcx> = &'gcx Slice<CanonicalVarInfo>;
-
-impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> { }
-
-/// A set of values corresponding to the canonical variables from some
-/// `Canonical`. You can give these values to
-/// `canonical_value.substitute` to substitute them into the canonical
-/// value at the right places.
-///
-/// When you canonicalize a value `V`, you get back one of these
-/// vectors with the original values that were replaced by canonical
-/// variables.
-///
-/// You can also use `infcx.fresh_inference_vars_for_canonical_vars`
-/// to get back a `CanonicalVarValues` containing fresh inference
-/// variables.
-#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct CanonicalVarValues<'tcx> {
- pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
-}
-
-/// Information about a canonical variable that is included with the
-/// canonical value. This is sufficient information for code to create
-/// a copy of the canonical value in some other inference context,
-/// with fresh inference variables replacing the canonical values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub struct CanonicalVarInfo {
- pub kind: CanonicalVarKind,
-}
-
-/// Describes the "kind" of the canonical variable. This is a "kind"
-/// in the type-theory sense of the term -- i.e., a "meta" type system
-/// that analyzes type-like values.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub enum CanonicalVarKind {
- /// Some kind of type inference variable.
- Ty(CanonicalTyVarKind),
-
- /// Region variable `'?R`.
- Region,
-}
-
-/// Rust actually has more than one category of type variables;
-/// notably, the type variables we create for literals (e.g., 22 or
-/// 22.) can only be instantiated with integral/float types (e.g.,
-/// usize or f32). In order to faithfully reproduce a type, we need to
-/// know what set of types a given type variable can be unified with.
-#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
-pub enum CanonicalTyVarKind {
- /// General type variable `?T` that can be unified with arbitrary types.
- General,
-
- /// Integral type variable `?I` (that can only be unified with integral types).
- Int,
-
- /// Floating-point type variable `?F` (that can only be unified with float types).
- Float,
-}
-
-/// After we execute a query with a canonicalized key, we get back a
-/// `Canonical<QueryResult<..>>`. You can use
-/// `instantiate_query_result` to access the data in this result.
-#[derive(Clone, Debug)]
-pub struct QueryResult<'tcx, R> {
- pub var_values: CanonicalVarValues<'tcx>,
- pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
- pub certainty: Certainty,
- pub value: R,
-}
-
-/// Indicates whether or not we were able to prove the query to be
-/// true.
-#[derive(Copy, Clone, Debug)]
-pub enum Certainty {
- /// The query is known to be true, presuming that you apply the
- /// given `var_values` and the region-constraints are satisfied.
- Proven,
-
- /// The query is not known to be true, but also not known to be
- /// false. The `var_values` represent *either* values that must
- /// hold in order for the query to be true, or helpful tips that
- /// *might* make it true. Currently rustc's trait solver cannot
- /// distinguish the two (e.g., due to our preference for where
- /// clauses over impls).
- ///
- /// After some unifiations and things have been done, it makes
- /// sense to try and prove again -- of course, at that point, the
- /// canonical form will be different, making this a distinct
- /// query.
- Ambiguous,
-}
-
-impl Certainty {
- pub fn is_proven(&self) -> bool {
- match self {
- Certainty::Proven => true,
- Certainty::Ambiguous => false,
- }
- }
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
-}
-
-impl<'tcx, R> QueryResult<'tcx, R> {
- pub fn is_proven(&self) -> bool {
- self.certainty.is_proven()
- }
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
-}
-
-impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
- pub fn is_proven(&self) -> bool {
- self.value.is_proven()
- }
-
- pub fn is_ambiguous(&self) -> bool {
- !self.is_proven()
- }
-}
-
-pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
-
-/// Trait implemented by values that can be canonicalized. It mainly
-/// serves to identify the interning table we will use.
-pub trait Canonicalize<'gcx: 'tcx, 'tcx>: TypeFoldable<'tcx> + Lift<'gcx> {
- type Canonicalized: 'gcx + Debug;
-
- /// After a value has been fully canonicalized and lifted, this
- /// method will allocate it in a global arena.
- fn intern(
- gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized;
-}
-
-impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
- /// Creates a substitution S for the canonical value with fresh
- /// inference variables and applies it to the canonical value.
- /// Returns both the instantiated result *and* the substitution S.
- ///
- /// This is useful at the start of a query: it basically brings
- /// the canonical value "into scope" within your new infcx. At the
- /// end of processing, the substitution S (once canonicalized)
- /// then represents the values that you computed for each of the
- /// canonical inputs to your query.
- pub fn instantiate_canonical_with_fresh_inference_vars<T>(
- &self,
- span: Span,
- canonical: &Canonical<'tcx, T>,
- ) -> (T, CanonicalVarValues<'tcx>)
- where
- T: TypeFoldable<'tcx>,
- {
- let canonical_inference_vars =
- self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
- let result = canonical.substitute(self.tcx, &canonical_inference_vars);
- (result, canonical_inference_vars)
- }
-
- /// Given the "infos" about the canonical variables from some
- /// canonical, creates fresh inference variables with the same
- /// characteristics. You can then use `substitute` to instantiate
- /// the canonical variable with these inference variables.
- pub fn fresh_inference_vars_for_canonical_vars(
- &self,
- span: Span,
- variables: &Slice<CanonicalVarInfo>,
- ) -> CanonicalVarValues<'tcx> {
- let var_values: IndexVec<CanonicalVar, Kind<'tcx>> = variables
- .iter()
- .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
- .collect();
-
- CanonicalVarValues { var_values }
- }
-
- /// Given the "info" about a canonical variable, creates a fresh
- /// inference variable with the same characteristics.
- pub fn fresh_inference_var_for_canonical_var(
- &self,
- span: Span,
- cv_info: CanonicalVarInfo,
- ) -> Kind<'tcx> {
- match cv_info.kind {
- CanonicalVarKind::Ty(ty_kind) => {
- let ty = match ty_kind {
- CanonicalTyVarKind::General => {
- self.next_ty_var(
- TypeVariableOrigin::MiscVariable(span),
- )
- }
-
- CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
-
- CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
- };
- ty.into()
- }
-
- CanonicalVarKind::Region => {
- self.next_region_var(RegionVariableOrigin::MiscVariable(span)).into()
- }
- }
- }
-
- /// Given the (canonicalized) result to a canonical query,
- /// instantiates the result so it can be used, plugging in the
- /// values from the canonical query. (Note that the result may
- /// have been ambiguous; you should check the certainty level of
- /// the query before applying this function.)
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits-canonicalization.html#processing-the-canonicalized-query-result
- pub fn instantiate_query_result<R>(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- original_values: &CanonicalVarValues<'tcx>,
- query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
- ) -> InferResult<'tcx, R>
- where
- R: Debug + TypeFoldable<'tcx>,
- {
- debug!(
- "instantiate_query_result(original_values={:#?}, query_result={:#?})",
- original_values, query_result,
- );
-
- // Every canonical query result includes values for each of
- // the inputs to the query. Therefore, we begin by unifying
- // these values with the original inputs that were
- // canonicalized.
- let result_values = &query_result.value.var_values;
- assert_eq!(original_values.len(), result_values.len());
-
- // Quickly try to find initial values for the canonical
- // variables in the result in terms of the query. We do this
- // by iterating down the values that the query gave to each of
- // the canonical inputs. If we find that one of those values
- // is directly equal to one of the canonical variables in the
- // result, then we can type the corresponding value from the
- // input. See the example above.
- let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
- IndexVec::from_elem_n(None, query_result.variables.len());
-
- // In terms of our example above, we are iterating over pairs like:
- // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
- for (original_value, result_value) in original_values.iter().zip(result_values) {
- match result_value.unpack() {
- UnpackedKind::Type(result_value) => {
- // e.g., here `result_value` might be `?0` in the example above...
- if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
- // in which case we would set `canonical_vars[0]` to `Some(?U)`.
- opt_values[index] = Some(original_value);
- }
- }
- UnpackedKind::Lifetime(result_value) => {
- // e.g., here `result_value` might be `'?1` in the example above...
- if let &ty::RegionKind::ReCanonical(index) = result_value {
- // in which case we would set `canonical_vars[0]` to `Some('static)`.
- opt_values[index] = Some(original_value);
- }
- }
- }
- }
-
- // Create a result substitution: if we found a value for a
- // given variable in the loop above, use that. Otherwise, use
- // a fresh inference variable.
- let result_subst = &CanonicalVarValues {
- var_values: query_result
- .variables
- .iter()
- .enumerate()
- .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {
- Some(k) => k,
- None => self.fresh_inference_var_for_canonical_var(cause.span, *info),
- })
- .collect(),
- };
-
- // Unify the original values for the canonical variables in
- // the input with the value found in the query
- // post-substitution. Often, but not always, this is a no-op,
- // because we already found the mapping in the first step.
- let substituted_values = |index: CanonicalVar| -> Kind<'tcx> {
- query_result.substitute_projected(self.tcx, result_subst, |v| &v.var_values[index])
- };
- let mut obligations =
- self.unify_canonical_vars(cause, param_env, original_values, substituted_values)?
- .into_obligations();
-
- obligations.extend(self.query_region_constraints_into_obligations(
- cause,
- param_env,
- &query_result.value.region_constraints,
- result_subst,
- ));
-
- let user_result: R =
- query_result.substitute_projected(self.tcx, result_subst, |q_r| &q_r.value);
-
- Ok(InferOk {
- value: user_result,
- obligations,
- })
- }
-
- /// Converts the region constraints resulting from a query into an
- /// iterator of obligations.
- fn query_region_constraints_into_obligations<'a>(
- &'a self,
- cause: &'a ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
- result_subst: &'a CanonicalVarValues<'tcx>,
- ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
- Box::new(unsubstituted_region_constraints.iter().map(move |constraint| {
- let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
- let k1 = substitute_value(self.tcx, result_subst, k1);
- let r2 = substitute_value(self.tcx, result_subst, r2);
- match k1.unpack() {
- UnpackedKind::Lifetime(r1) =>
- Obligation::new(
- cause.clone(),
- param_env,
- ty::Predicate::RegionOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(r1, r2))),
- ),
-
- UnpackedKind::Type(t1) =>
- Obligation::new(
- cause.clone(),
- param_env,
- ty::Predicate::TypeOutlives(
- ty::Binder::dummy(ty::OutlivesPredicate(t1, r2))),
- ),
- }
- })) as Box<dyn Iterator<Item = _>>
- }
-
- /// Given two sets of values for the same set of canonical variables, unify them.
- /// The second set is produced lazilly by supplying indices from the first set.
- fn unify_canonical_vars(
- &self,
- cause: &ObligationCause<'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- variables1: &CanonicalVarValues<'tcx>,
- variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
- ) -> InferResult<'tcx, ()> {
- self.commit_if_ok(|_| {
- let mut obligations = vec![];
- for (index, value1) in variables1.var_values.iter_enumerated() {
- let value2 = variables2(index);
-
- match (value1.unpack(), value2.unpack()) {
- (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
- obligations
- .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
- }
- (
- UnpackedKind::Lifetime(ty::ReErased),
- UnpackedKind::Lifetime(ty::ReErased),
- ) => {
- // no action needed
- }
- (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
- obligations
- .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
- }
- _ => {
- bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
- }
- }
- }
- Ok(InferOk {
- value: (),
- obligations,
- })
- })
- }
-
- /// Canonicalizes a query value `V`. When we canonicalize a query,
- /// we not only canonicalize unbound inference variables, but we
- /// *also* replace all free regions whatsoever. So for example a
- /// query like `T: Trait<'static>` would be canonicalized to
- ///
- /// ```text
- /// T: Trait<'?0>
- /// ```
- ///
- /// with a mapping M that maps `'?0` to `'static`.
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits-canonicalization.html#canonicalizing-the-query
- pub fn canonicalize_query<V>(&self, value: &V) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
- where
- V: Canonicalize<'gcx, 'tcx>,
- {
- self.tcx.sess.perf_stats.queries_canonicalized.fetch_add(1, Ordering::Relaxed);
-
- Canonicalizer::canonicalize(
- value,
- Some(self),
- self.tcx,
- CanonicalizeAllFreeRegions(true),
- )
- }
-
- /// Canonicalizes a query *response* `V`. When we canonicalize a
- /// query response, we only canonicalize unbound inference
- /// variables, and we leave other free regions alone. So,
- /// continuing with the example from `canonicalize_query`, if
- /// there was an input query `T: Trait<'static>`, it would have
- /// been canonicalized to
- ///
- /// ```text
- /// T: Trait<'?0>
- /// ```
- ///
- /// with a mapping M that maps `'?0` to `'static`. But if we found that there
- /// exists only one possible impl of `Trait`, and it looks like
- ///
- /// impl<T> Trait<'static> for T { .. }
- ///
- /// then we would prepare a query result R that (among other
- /// things) includes a mapping to `'?0 := 'static`. When
- /// canonicalizing this query result R, we would leave this
- /// reference to `'static` alone.
- ///
- /// To get a good understanding of what is happening here, check
- /// out the [chapter in the rustc guide][c].
- ///
- /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits-canonicalization.html#canonicalizing-the-query-result
- pub fn canonicalize_response<V>(
- &self,
- value: &V,
- ) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
- where
- V: Canonicalize<'gcx, 'tcx>,
- {
- Canonicalizer::canonicalize(
- value,
- Some(self),
- self.tcx,
- CanonicalizeAllFreeRegions(false),
- )
- }
-}
-
-/// If this flag is true, then all free regions will be replaced with
-/// a canonical var. This is used to make queries as generic as
-/// possible. For example, the query `F: Foo<'static>` would be
-/// canonicalized to `F: Foo<'0>`.
-struct CanonicalizeAllFreeRegions(bool);
-
-struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- variables: IndexVec<CanonicalVar, CanonicalVarInfo>,
- indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
- var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
- canonicalize_all_free_regions: CanonicalizeAllFreeRegions,
- needs_canonical_flags: TypeFlags,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
- fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
- self.tcx
- }
-
- fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
- match *r {
- ty::ReLateBound(..) => {
- // leave bound regions alone
- r
- }
-
- ty::ReVar(vid) => {
- let r = self.infcx
- .unwrap()
- .borrow_region_constraints()
- .opportunistic_resolve_var(self.tcx, vid);
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
- debug!(
- "canonical: region var found with vid {:?}, \
- opportunistically resolved to {:?}",
- vid, r
- );
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- }
-
- ty::ReStatic
- | ty::ReEarlyBound(..)
- | ty::ReFree(_)
- | ty::ReScope(_)
- | ty::ReSkolemized(..)
- | ty::ReEmpty
- | ty::ReErased => {
- if self.canonicalize_all_free_regions.0 {
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Region,
- };
- let cvar = self.canonical_var(info, r.into());
- self.tcx().mk_region(ty::ReCanonical(cvar))
- } else {
- r
- }
- }
-
- ty::ReClosureBound(..) | ty::ReCanonical(_) => {
- bug!("canonical region encountered during canonicalization")
- }
- }
- }
-
- fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
- match t.sty {
- ty::TyInfer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),
-
- ty::TyInfer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),
-
- ty::TyInfer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),
-
- ty::TyInfer(ty::FreshTy(_))
- | ty::TyInfer(ty::FreshIntTy(_))
- | ty::TyInfer(ty::FreshFloatTy(_)) => {
- bug!("encountered a fresh type during canonicalization")
- }
-
- ty::TyInfer(ty::CanonicalTy(_)) => {
- bug!("encountered a canonical type during canonicalization")
- }
-
- ty::TyClosure(..)
- | ty::TyGenerator(..)
- | ty::TyGeneratorWitness(..)
- | ty::TyBool
- | ty::TyChar
- | ty::TyInt(..)
- | ty::TyUint(..)
- | ty::TyFloat(..)
- | ty::TyAdt(..)
- | ty::TyStr
- | ty::TyError
- | ty::TyArray(..)
- | ty::TySlice(..)
- | ty::TyRawPtr(..)
- | ty::TyRef(..)
- | ty::TyFnDef(..)
- | ty::TyFnPtr(_)
- | ty::TyDynamic(..)
- | ty::TyNever
- | ty::TyTuple(..)
- | ty::TyProjection(..)
- | ty::TyForeign(..)
- | ty::TyParam(..)
- | ty::TyAnon(..) => {
- if t.flags.intersects(self.needs_canonical_flags) {
- t.super_fold_with(self)
- } else {
- t
- }
- }
- }
- }
-}
-
-impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
- /// The main `canonicalize` method, shared impl of
- /// `canonicalize_query` and `canonicalize_response`.
- fn canonicalize<V>(
- value: &V,
- infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- canonicalize_all_free_regions: CanonicalizeAllFreeRegions,
- ) -> (V::Canonicalized, CanonicalVarValues<'tcx>)
- where
- V: Canonicalize<'gcx, 'tcx>,
- {
- debug_assert!(
- !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
- "canonicalizing a canonical value: {:?}",
- value,
- );
-
- let needs_canonical_flags = if canonicalize_all_free_regions.0 {
- TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
- } else {
- TypeFlags::KEEP_IN_LOCAL_TCX
- };
-
- let gcx = tcx.global_tcx();
-
- // Fast path: nothing that needs to be canonicalized.
- if !value.has_type_flags(needs_canonical_flags) {
- let out_value = gcx.lift(value).unwrap();
- let canon_value = V::intern(
- gcx,
- Canonical {
- variables: Slice::empty(),
- value: out_value,
- },
- );
- let values = CanonicalVarValues {
- var_values: IndexVec::default(),
- };
- return (canon_value, values);
- }
-
- let mut canonicalizer = Canonicalizer {
- infcx,
- tcx,
- canonicalize_all_free_regions,
- needs_canonical_flags,
- variables: IndexVec::default(),
- indices: FxHashMap::default(),
- var_values: IndexVec::default(),
- };
- let out_value = value.fold_with(&mut canonicalizer);
-
- // Once we have canonicalized `out_value`, it should not
- // contain anything that ties it to this inference context
- // anymore, so it should live in the global arena.
- let out_value = gcx.lift(&out_value).unwrap_or_else(|| {
- bug!(
- "failed to lift `{:?}`, canonicalized from `{:?}`",
- out_value,
- value
- )
- });
-
- let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables.raw);
-
- let canonical_value = V::intern(
- gcx,
- Canonical {
- variables: canonical_variables,
- value: out_value,
- },
- );
- let canonical_var_values = CanonicalVarValues {
- var_values: canonicalizer.var_values,
- };
- (canonical_value, canonical_var_values)
- }
-
- /// Creates a canonical variable replacing `kind` from the input,
- /// or returns an existing variable if `kind` has already been
- /// seen. `kind` is expected to be an unbound variable (or
- /// potentially a free region).
- fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
- let Canonicalizer {
- indices,
- variables,
- var_values,
- ..
- } = self;
-
- indices
- .entry(kind)
- .or_insert_with(|| {
- let cvar1 = variables.push(info);
- let cvar2 = var_values.push(kind);
- assert_eq!(cvar1, cvar2);
- cvar1
- })
- .clone()
- }
-
- /// Given a type variable `ty_var` of the given kind, first check
- /// if `ty_var` is bound to anything; if so, canonicalize
- /// *that*. Otherwise, create a new canonical variable for
- /// `ty_var`.
- fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {
- let infcx = self.infcx.expect("encountered ty-var without infcx");
- let bound_to = infcx.shallow_resolve(ty_var);
- if bound_to != ty_var {
- self.fold_ty(bound_to)
- } else {
- let info = CanonicalVarInfo {
- kind: CanonicalVarKind::Ty(ty_kind),
- };
- let cvar = self.canonical_var(info, ty_var.into());
- self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))
- }
- }
-}
-
-impl<'tcx, V> Canonical<'tcx, V> {
- /// Instantiate the wrapped value, replacing each canonical value
- /// with the value given in `var_values`.
- fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
- where
- V: TypeFoldable<'tcx>,
- {
- self.substitute_projected(tcx, var_values, |value| value)
- }
-
- /// Invoke `projection_fn` with `self.value` to get a value V that
- /// is expressed in terms of the same canonical variables bound in
- /// `self`. Apply the substitution `var_values` to this value V,
- /// replacing each of the canonical variables.
- fn substitute_projected<T>(
- &self,
- tcx: TyCtxt<'_, '_, 'tcx>,
- var_values: &CanonicalVarValues<'tcx>,
- projection_fn: impl FnOnce(&V) -> &T,
- ) -> T
- where
- T: TypeFoldable<'tcx>,
- {
- assert_eq!(self.variables.len(), var_values.var_values.len());
- let value = projection_fn(&self.value);
- substitute_value(tcx, var_values, value)
- }
-}
-
-/// Substitute the values from `var_values` into `value`. `var_values`
-/// must be values for the set of cnaonical variables that appear in
-/// `value`.
-fn substitute_value<'a, 'tcx, T>(
- tcx: TyCtxt<'_, '_, 'tcx>,
- var_values: &CanonicalVarValues<'tcx>,
- value: &'a T,
-) -> T
-where
- T: TypeFoldable<'tcx>,
-{
- if var_values.var_values.is_empty() {
- debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
- value.clone()
- } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
- value.clone()
- } else {
- value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
- }
-}
-
-struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
- tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- var_values: &'cx CanonicalVarValues<'tcx>,
-}
-
-impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
- fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
- self.tcx
- }
-
- fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
- match t.sty {
- ty::TyInfer(ty::InferTy::CanonicalTy(c)) => {
- match self.var_values.var_values[c].unpack() {
- UnpackedKind::Type(ty) => ty,
- r => bug!("{:?} is a type but value is {:?}", c, r),
- }
- }
- _ => {
- if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
- t
- } else {
- t.super_fold_with(self)
- }
- }
- }
- }
-
- fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
- match r {
- ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
- UnpackedKind::Lifetime(l) => l,
- r => bug!("{:?} is a region but value is {:?}", c, r),
- },
- _ => r.super_fold_with(self),
- }
- }
-}
-
-CloneTypeFoldableAndLiftImpls! {
- ::infer::canonical::Certainty,
- ::infer::canonical::CanonicalVarInfo,
- ::infer::canonical::CanonicalVarKind,
-}
-
-CloneTypeFoldableImpls! {
- for <'tcx> {
- ::infer::canonical::CanonicalVarInfos<'tcx>,
- }
-}
-
-BraceStructTypeFoldableImpl! {
- impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
- variables,
- value,
- } where C: TypeFoldable<'tcx>
-}
-
-BraceStructLiftImpl! {
- impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
- type Lifted = Canonical<'tcx, T::Lifted>;
- variables, value
- } where T: Lift<'tcx>
-}
-
-impl<'tcx> CanonicalVarValues<'tcx> {
- fn iter<'a>(&'a self) -> impl Iterator<Item = Kind<'tcx>> + 'a {
- self.var_values.iter().cloned()
- }
-
- fn len(&self) -> usize {
- self.var_values.len()
- }
-}
-
-impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
- type Item = Kind<'tcx>;
- type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>;
-
- fn into_iter(self) -> Self::IntoIter {
- self.var_values.iter().cloned()
- }
-}
-
-BraceStructLiftImpl! {
- impl<'a, 'tcx> Lift<'tcx> for CanonicalVarValues<'a> {
- type Lifted = CanonicalVarValues<'tcx>;
- var_values,
- }
-}
-
-BraceStructTypeFoldableImpl! {
- impl<'tcx> TypeFoldable<'tcx> for CanonicalVarValues<'tcx> {
- var_values,
- }
-}
-
-BraceStructTypeFoldableImpl! {
- impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
- var_values, region_constraints, certainty, value
- } where R: TypeFoldable<'tcx>,
-}
-
-BraceStructLiftImpl! {
- impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
- type Lifted = QueryResult<'tcx, R::Lifted>;
- var_values, region_constraints, certainty, value
- } where R: Lift<'tcx>
-}
-
-impl<'tcx> Index<CanonicalVar> for CanonicalVarValues<'tcx> {
- type Output = Kind<'tcx>;
-
- fn index(&self, value: CanonicalVar) -> &Kind<'tcx> {
- &self.var_values[value]
- }
-}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the "canonicalizer" itself.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::{
+ Canonical, CanonicalTyVarKind, CanonicalVarInfo, CanonicalVarKind, CanonicalVarValues,
+ Canonicalized,
+};
+use infer::InferCtxt;
+use std::sync::atomic::Ordering;
+use ty::fold::{TypeFoldable, TypeFolder};
+use ty::subst::Kind;
+use ty::{self, CanonicalVar, Lift, Slice, Ty, TyCtxt, TypeFlags};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::indexed_vec::IndexVec;
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// Canonicalizes a query value `V`. When we canonicalize a query,
+ /// we not only canonicalize unbound inference variables, but we
+ /// *also* replace all free regions whatsoever. So for example a
+ /// query like `T: Trait<'static>` would be canonicalized to
+ ///
+ /// ```text
+ /// T: Trait<'?0>
+ /// ```
+ ///
+ /// with a mapping M that maps `'?0` to `'static`.
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query
+ pub fn canonicalize_query<V>(
+ &self,
+ value: &V,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ self.tcx
+ .sess
+ .perf_stats
+ .queries_canonicalized
+ .fetch_add(1, Ordering::Relaxed);
+
+ Canonicalizer::canonicalize(
+ value,
+ Some(self),
+ self.tcx,
+ CanonicalizeRegionMode {
+ static_region: true,
+ other_free_regions: true,
+ },
+ )
+ }
+
+ /// Canonicalizes a query *response* `V`. When we canonicalize a
+ /// query response, we only canonicalize unbound inference
+ /// variables, and we leave other free regions alone. So,
+ /// continuing with the example from `canonicalize_query`, if
+ /// there was an input query `T: Trait<'static>`, it would have
+ /// been canonicalized to
+ ///
+ /// ```text
+ /// T: Trait<'?0>
+ /// ```
+ ///
+ /// with a mapping M that maps `'?0` to `'static`. But if we found that there
+ /// exists only one possible impl of `Trait`, and it looks like
+ ///
+ /// impl<T> Trait<'static> for T { .. }
+ ///
+ /// then we would prepare a query result R that (among other
+ /// things) includes a mapping to `'?0 := 'static`. When
+ /// canonicalizing this query result R, we would leave this
+ /// reference to `'static` alone.
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#canonicalizing-the-query-result
+ pub fn canonicalize_response<V>(
+ &self,
+ value: &V,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ Canonicalizer::canonicalize(
+ value,
+ Some(self),
+ self.tcx,
+ CanonicalizeRegionMode {
+ static_region: false,
+ other_free_regions: false,
+ },
+ )
+ }
+
+ /// A hacky variant of `canonicalize_query` that does not
+ /// canonicalize `'static`. Unfortunately, the existing leak
+ /// check treaks `'static` differently in some cases (see also
+ /// #33684), so if we are performing an operation that may need to
+ /// prove "leak-check" related things, we leave `'static`
+ /// alone.
+ ///
+ /// FIXME(#48536) -- once we have universes, we can remove this and just use
+ /// `canonicalize_query`.
+ pub fn canonicalize_hr_query_hack<V>(
+ &self,
+ value: &V,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ self.tcx
+ .sess
+ .perf_stats
+ .queries_canonicalized
+ .fetch_add(1, Ordering::Relaxed);
+
+ Canonicalizer::canonicalize(
+ value,
+ Some(self),
+ self.tcx,
+ CanonicalizeRegionMode {
+ static_region: false,
+ other_free_regions: true,
+ },
+ )
+ }
+}
+
+/// If this flag is true, then all free regions will be replaced with
+/// a canonical var. This is used to make queries as generic as
+/// possible. For example, the query `F: Foo<'static>` would be
+/// canonicalized to `F: Foo<'0>`.
+struct CanonicalizeRegionMode {
+ static_region: bool,
+ other_free_regions: bool,
+}
+
+impl CanonicalizeRegionMode {
+ fn any(&self) -> bool {
+ self.static_region || self.other_free_regions
+ }
+}
+
+struct Canonicalizer<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+ infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ variables: IndexVec<CanonicalVar, CanonicalVarInfo>,
+ indices: FxHashMap<Kind<'tcx>, CanonicalVar>,
+ var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
+ canonicalize_region_mode: CanonicalizeRegionMode,
+ needs_canonical_flags: TypeFlags,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for Canonicalizer<'cx, 'gcx, 'tcx> {
+ fn tcx<'b>(&'b self) -> TyCtxt<'b, 'gcx, 'tcx> {
+ self.tcx
+ }
+
+ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ match *r {
+ ty::ReLateBound(..) => {
+ // leave bound regions alone
+ r
+ }
+
+ ty::ReVar(vid) => {
+ let r = self
+ .infcx
+ .unwrap()
+ .borrow_region_constraints()
+ .opportunistic_resolve_var(self.tcx, vid);
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ debug!(
+ "canonical: region var found with vid {:?}, \
+ opportunistically resolved to {:?}",
+ vid, r
+ );
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ }
+
+ ty::ReStatic => {
+ if self.canonicalize_region_mode.static_region {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ } else {
+ r
+ }
+ }
+
+ ty::ReEarlyBound(..)
+ | ty::ReFree(_)
+ | ty::ReScope(_)
+ | ty::ReSkolemized(..)
+ | ty::ReEmpty
+ | ty::ReErased => {
+ if self.canonicalize_region_mode.other_free_regions {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Region,
+ };
+ let cvar = self.canonical_var(info, r.into());
+ self.tcx().mk_region(ty::ReCanonical(cvar))
+ } else {
+ r
+ }
+ }
+
+ ty::ReClosureBound(..) | ty::ReCanonical(_) => {
+ bug!("canonical region encountered during canonicalization")
+ }
+ }
+ }
+
+ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+ match t.sty {
+ ty::TyInfer(ty::TyVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::General, t),
+
+ ty::TyInfer(ty::IntVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Int, t),
+
+ ty::TyInfer(ty::FloatVar(_)) => self.canonicalize_ty_var(CanonicalTyVarKind::Float, t),
+
+ ty::TyInfer(ty::FreshTy(_))
+ | ty::TyInfer(ty::FreshIntTy(_))
+ | ty::TyInfer(ty::FreshFloatTy(_)) => {
+ bug!("encountered a fresh type during canonicalization")
+ }
+
+ ty::TyInfer(ty::CanonicalTy(_)) => {
+ bug!("encountered a canonical type during canonicalization")
+ }
+
+ ty::TyClosure(..)
+ | ty::TyGenerator(..)
+ | ty::TyGeneratorWitness(..)
+ | ty::TyBool
+ | ty::TyChar
+ | ty::TyInt(..)
+ | ty::TyUint(..)
+ | ty::TyFloat(..)
+ | ty::TyAdt(..)
+ | ty::TyStr
+ | ty::TyError
+ | ty::TyArray(..)
+ | ty::TySlice(..)
+ | ty::TyRawPtr(..)
+ | ty::TyRef(..)
+ | ty::TyFnDef(..)
+ | ty::TyFnPtr(_)
+ | ty::TyDynamic(..)
+ | ty::TyNever
+ | ty::TyTuple(..)
+ | ty::TyProjection(..)
+ | ty::TyForeign(..)
+ | ty::TyParam(..)
+ | ty::TyAnon(..) => {
+ if t.flags.intersects(self.needs_canonical_flags) {
+ t.super_fold_with(self)
+ } else {
+ t
+ }
+ }
+ }
+ }
+}
+
+impl<'cx, 'gcx, 'tcx> Canonicalizer<'cx, 'gcx, 'tcx> {
+ /// The main `canonicalize` method, shared impl of
+ /// `canonicalize_query` and `canonicalize_response`.
+ fn canonicalize<V>(
+ value: &V,
+ infcx: Option<&'cx InferCtxt<'cx, 'gcx, 'tcx>>,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ canonicalize_region_mode: CanonicalizeRegionMode,
+ ) -> (Canonicalized<'gcx, V>, CanonicalVarValues<'tcx>)
+ where
+ V: TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ debug_assert!(
+ !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS),
+ "canonicalizing a canonical value: {:?}",
+ value,
+ );
+
+ let needs_canonical_flags = if canonicalize_region_mode.any() {
+ TypeFlags::HAS_FREE_REGIONS | TypeFlags::KEEP_IN_LOCAL_TCX
+ } else {
+ TypeFlags::KEEP_IN_LOCAL_TCX
+ };
+
+ let gcx = tcx.global_tcx();
+
+ // Fast path: nothing that needs to be canonicalized.
+ if !value.has_type_flags(needs_canonical_flags) {
+ let out_value = gcx.lift(value).unwrap();
+ let canon_value = Canonical {
+ variables: Slice::empty(),
+ value: out_value,
+ };
+ let values = CanonicalVarValues {
+ var_values: IndexVec::default(),
+ };
+ return (canon_value, values);
+ }
+
+ let mut canonicalizer = Canonicalizer {
+ infcx,
+ tcx,
+ canonicalize_region_mode,
+ needs_canonical_flags,
+ variables: IndexVec::default(),
+ indices: FxHashMap::default(),
+ var_values: IndexVec::default(),
+ };
+ let out_value = value.fold_with(&mut canonicalizer);
+
+ // Once we have canonicalized `out_value`, it should not
+ // contain anything that ties it to this inference context
+ // anymore, so it should live in the global arena.
+ let out_value = gcx.lift(&out_value).unwrap_or_else(|| {
+ bug!(
+ "failed to lift `{:?}`, canonicalized from `{:?}`",
+ out_value,
+ value
+ )
+ });
+
+ let canonical_variables = tcx.intern_canonical_var_infos(&canonicalizer.variables.raw);
+
+ let canonical_value = Canonical {
+ variables: canonical_variables,
+ value: out_value,
+ };
+ let canonical_var_values = CanonicalVarValues {
+ var_values: canonicalizer.var_values,
+ };
+ (canonical_value, canonical_var_values)
+ }
+
+ /// Creates a canonical variable replacing `kind` from the input,
+ /// or returns an existing variable if `kind` has already been
+ /// seen. `kind` is expected to be an unbound variable (or
+ /// potentially a free region).
+ fn canonical_var(&mut self, info: CanonicalVarInfo, kind: Kind<'tcx>) -> CanonicalVar {
+ let Canonicalizer {
+ indices,
+ variables,
+ var_values,
+ ..
+ } = self;
+
+ indices
+ .entry(kind)
+ .or_insert_with(|| {
+ let cvar1 = variables.push(info);
+ let cvar2 = var_values.push(kind);
+ assert_eq!(cvar1, cvar2);
+ cvar1
+ })
+ .clone()
+ }
+
+ /// Given a type variable `ty_var` of the given kind, first check
+ /// if `ty_var` is bound to anything; if so, canonicalize
+ /// *that*. Otherwise, create a new canonical variable for
+ /// `ty_var`.
+ fn canonicalize_ty_var(&mut self, ty_kind: CanonicalTyVarKind, ty_var: Ty<'tcx>) -> Ty<'tcx> {
+ let infcx = self.infcx.expect("encountered ty-var without infcx");
+ let bound_to = infcx.shallow_resolve(ty_var);
+ if bound_to != ty_var {
+ self.fold_ty(bound_to)
+ } else {
+ let info = CanonicalVarInfo {
+ kind: CanonicalVarKind::Ty(ty_kind),
+ };
+ let cvar = self.canonical_var(info, ty_var.into());
+ self.tcx().mk_infer(ty::InferTy::CanonicalTy(cvar))
+ }
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! **Canonicalization** is the key to constructing a query in the
+//! middle of type inference. Ordinarily, it is not possible to store
+//! types from type inference in query keys, because they contain
+//! references to inference variables whose lifetimes are too short
+//! and so forth. Canonicalizing a value T1 using `canonicalize_query`
+//! produces two things:
+//!
+//! - a value T2 where each unbound inference variable has been
+//! replaced with a **canonical variable**;
+//! - a map M (of type `CanonicalVarValues`) from those canonical
+//! variables back to the original.
+//!
+//! We can then do queries using T2. These will give back constriants
+//! on the canonical variables which can be translated, using the map
+//! M, into constraints in our source context. This process of
+//! translating the results back is done by the
+//! `instantiate_query_result` method.
+//!
+//! For a more detailed look at what is happening here, check
+//! out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::{InferCtxt, RegionVariableOrigin, TypeVariableOrigin};
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use serialize::UseSpecializedDecodable;
+use std::ops::Index;
+use syntax::codemap::Span;
+use ty::fold::TypeFoldable;
+use ty::subst::Kind;
+use ty::{self, CanonicalVar, Lift, Region, Slice, TyCtxt};
+
+mod canonicalizer;
+
+pub mod query_result;
+
+mod substitute;
+
+/// A "canonicalized" type `V` is one where all free inference
+/// variables have been rewriten to "canonical vars". These are
+/// numbered starting from 0 in order of first appearance.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct Canonical<'gcx, V> {
+ pub variables: CanonicalVarInfos<'gcx>,
+ pub value: V,
+}
+
+pub type CanonicalVarInfos<'gcx> = &'gcx Slice<CanonicalVarInfo>;
+
+impl<'gcx> UseSpecializedDecodable for CanonicalVarInfos<'gcx> {}
+
+/// A set of values corresponding to the canonical variables from some
+/// `Canonical`. You can give these values to
+/// `canonical_value.substitute` to substitute them into the canonical
+/// value at the right places.
+///
+/// When you canonicalize a value `V`, you get back one of these
+/// vectors with the original values that were replaced by canonical
+/// variables. You will need to supply it later to instantiate the
+/// canonicalized query response.
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct CanonicalVarValues<'tcx> {
+ pub var_values: IndexVec<CanonicalVar, Kind<'tcx>>,
+}
+
+/// Information about a canonical variable that is included with the
+/// canonical value. This is sufficient information for code to create
+/// a copy of the canonical value in some other inference context,
+/// with fresh inference variables replacing the canonical values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub struct CanonicalVarInfo {
+ pub kind: CanonicalVarKind,
+}
+
+/// Describes the "kind" of the canonical variable. This is a "kind"
+/// in the type-theory sense of the term -- i.e., a "meta" type system
+/// that analyzes type-like values.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub enum CanonicalVarKind {
+ /// Some kind of type inference variable.
+ Ty(CanonicalTyVarKind),
+
+ /// Region variable `'?R`.
+ Region,
+}
+
+/// Rust actually has more than one category of type variables;
+/// notably, the type variables we create for literals (e.g., 22 or
+/// 22.) can only be instantiated with integral/float types (e.g.,
+/// usize or f32). In order to faithfully reproduce a type, we need to
+/// know what set of types a given type variable can be unified with.
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcDecodable, RustcEncodable)]
+pub enum CanonicalTyVarKind {
+ /// General type variable `?T` that can be unified with arbitrary types.
+ General,
+
+ /// Integral type variable `?I` (that can only be unified with integral types).
+ Int,
+
+ /// Floating-point type variable `?F` (that can only be unified with float types).
+ Float,
+}
+
+/// After we execute a query with a canonicalized key, we get back a
+/// `Canonical<QueryResult<..>>`. You can use
+/// `instantiate_query_result` to access the data in this result.
+#[derive(Clone, Debug)]
+pub struct QueryResult<'tcx, R> {
+ pub var_values: CanonicalVarValues<'tcx>,
+ pub region_constraints: Vec<QueryRegionConstraint<'tcx>>,
+ pub certainty: Certainty,
+ pub value: R,
+}
+
+pub type Canonicalized<'gcx, V> = Canonical<'gcx, <V as Lift<'gcx>>::Lifted>;
+
+pub type CanonicalizedQueryResult<'gcx, T> =
+ Lrc<Canonical<'gcx, QueryResult<'gcx, <T as Lift<'gcx>>::Lifted>>>;
+
+/// Indicates whether or not we were able to prove the query to be
+/// true.
+#[derive(Copy, Clone, Debug)]
+pub enum Certainty {
+ /// The query is known to be true, presuming that you apply the
+ /// given `var_values` and the region-constraints are satisfied.
+ Proven,
+
+ /// The query is not known to be true, but also not known to be
+ /// false. The `var_values` represent *either* values that must
+ /// hold in order for the query to be true, or helpful tips that
+ /// *might* make it true. Currently rustc's trait solver cannot
+ /// distinguish the two (e.g., due to our preference for where
+ /// clauses over impls).
+ ///
+ /// After some unifiations and things have been done, it makes
+ /// sense to try and prove again -- of course, at that point, the
+ /// canonical form will be different, making this a distinct
+ /// query.
+ Ambiguous,
+}
+
+impl Certainty {
+ pub fn is_proven(&self) -> bool {
+ match self {
+ Certainty::Proven => true,
+ Certainty::Ambiguous => false,
+ }
+ }
+
+ pub fn is_ambiguous(&self) -> bool {
+ !self.is_proven()
+ }
+}
+
+impl<'tcx, R> QueryResult<'tcx, R> {
+ pub fn is_proven(&self) -> bool {
+ self.certainty.is_proven()
+ }
+
+ pub fn is_ambiguous(&self) -> bool {
+ !self.is_proven()
+ }
+}
+
+impl<'tcx, R> Canonical<'tcx, QueryResult<'tcx, R>> {
+ pub fn is_proven(&self) -> bool {
+ self.value.is_proven()
+ }
+
+ pub fn is_ambiguous(&self) -> bool {
+ !self.is_proven()
+ }
+}
+
+pub type QueryRegionConstraint<'tcx> = ty::Binder<ty::OutlivesPredicate<Kind<'tcx>, Region<'tcx>>>;
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// Creates a substitution S for the canonical value with fresh
+ /// inference variables and applies it to the canonical value.
+ /// Returns both the instantiated result *and* the substitution S.
+ ///
+ /// This is useful at the start of a query: it basically brings
+ /// the canonical value "into scope" within your new infcx. At the
+ /// end of processing, the substitution S (once canonicalized)
+ /// then represents the values that you computed for each of the
+ /// canonical inputs to your query.
+ pub fn instantiate_canonical_with_fresh_inference_vars<T>(
+ &self,
+ span: Span,
+ canonical: &Canonical<'tcx, T>,
+ ) -> (T, CanonicalVarValues<'tcx>)
+ where
+ T: TypeFoldable<'tcx>,
+ {
+ let canonical_inference_vars =
+ self.fresh_inference_vars_for_canonical_vars(span, canonical.variables);
+ let result = canonical.substitute(self.tcx, &canonical_inference_vars);
+ (result, canonical_inference_vars)
+ }
+
+ /// Given the "infos" about the canonical variables from some
+ /// canonical, creates fresh inference variables with the same
+ /// characteristics. You can then use `substitute` to instantiate
+ /// the canonical variable with these inference variables.
+ fn fresh_inference_vars_for_canonical_vars(
+ &self,
+ span: Span,
+ variables: &Slice<CanonicalVarInfo>,
+ ) -> CanonicalVarValues<'tcx> {
+ let var_values: IndexVec<CanonicalVar, Kind<'tcx>> = variables
+ .iter()
+ .map(|info| self.fresh_inference_var_for_canonical_var(span, *info))
+ .collect();
+
+ CanonicalVarValues { var_values }
+ }
+
+ /// Given the "info" about a canonical variable, creates a fresh
+ /// inference variable with the same characteristics.
+ fn fresh_inference_var_for_canonical_var(
+ &self,
+ span: Span,
+ cv_info: CanonicalVarInfo,
+ ) -> Kind<'tcx> {
+ match cv_info.kind {
+ CanonicalVarKind::Ty(ty_kind) => {
+ let ty = match ty_kind {
+ CanonicalTyVarKind::General => {
+ self.next_ty_var(TypeVariableOrigin::MiscVariable(span))
+ }
+
+ CanonicalTyVarKind::Int => self.tcx.mk_int_var(self.next_int_var_id()),
+
+ CanonicalTyVarKind::Float => self.tcx.mk_float_var(self.next_float_var_id()),
+ };
+ ty.into()
+ }
+
+ CanonicalVarKind::Region => self
+ .next_region_var(RegionVariableOrigin::MiscVariable(span))
+ .into(),
+ }
+ }
+}
+
+CloneTypeFoldableAndLiftImpls! {
+ ::infer::canonical::Certainty,
+ ::infer::canonical::CanonicalVarInfo,
+ ::infer::canonical::CanonicalVarKind,
+}
+
+CloneTypeFoldableImpls! {
+ for <'tcx> {
+ ::infer::canonical::CanonicalVarInfos<'tcx>,
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx, C> TypeFoldable<'tcx> for Canonical<'tcx, C> {
+ variables,
+ value,
+ } where C: TypeFoldable<'tcx>
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx, T> Lift<'tcx> for Canonical<'a, T> {
+ type Lifted = Canonical<'tcx, T::Lifted>;
+ variables, value
+ } where T: Lift<'tcx>
+}
+
+impl<'tcx> CanonicalVarValues<'tcx> {
+ fn iter<'a>(&'a self) -> impl Iterator<Item = Kind<'tcx>> + 'a {
+ self.var_values.iter().cloned()
+ }
+
+ fn len(&self) -> usize {
+ self.var_values.len()
+ }
+}
+
+impl<'a, 'tcx> IntoIterator for &'a CanonicalVarValues<'tcx> {
+ type Item = Kind<'tcx>;
+ type IntoIter = ::std::iter::Cloned<::std::slice::Iter<'a, Kind<'tcx>>>;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.var_values.iter().cloned()
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for CanonicalVarValues<'a> {
+ type Lifted = CanonicalVarValues<'tcx>;
+ var_values,
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for CanonicalVarValues<'tcx> {
+ var_values,
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx, R> TypeFoldable<'tcx> for QueryResult<'tcx, R> {
+ var_values, region_constraints, certainty, value
+ } where R: TypeFoldable<'tcx>,
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx, R> Lift<'tcx> for QueryResult<'a, R> {
+ type Lifted = QueryResult<'tcx, R::Lifted>;
+ var_values, region_constraints, certainty, value
+ } where R: Lift<'tcx>
+}
+
+impl<'tcx> Index<CanonicalVar> for CanonicalVarValues<'tcx> {
+ type Output = Kind<'tcx>;
+
+ fn index(&self, value: CanonicalVar) -> &Kind<'tcx> {
+ &self.var_values[value]
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains the code to instantiate a "query result", and
+//! in particular to extract out the resulting region obligations and
+//! encode them therein.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::substitute::substitute_value;
+use infer::canonical::{Canonical, CanonicalVarKind, CanonicalVarValues, CanonicalizedQueryResult,
+ Certainty, QueryRegionConstraint, QueryResult};
+use infer::region_constraints::{Constraint, RegionConstraintData};
+use infer::InferCtxtBuilder;
+use infer::{InferCtxt, InferOk, InferResult, RegionObligation};
+use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::sync::Lrc;
+use std::fmt::Debug;
+use syntax::ast;
+use syntax_pos::DUMMY_SP;
+use traits::query::{Fallible, NoSolution};
+use traits::{FulfillmentContext, TraitEngine};
+use traits::{Obligation, ObligationCause, PredicateObligation};
+use ty::fold::TypeFoldable;
+use ty::subst::{Kind, UnpackedKind};
+use ty::{self, CanonicalVar, Lift, TyCtxt};
+
+impl<'cx, 'gcx, 'tcx> InferCtxtBuilder<'cx, 'gcx, 'tcx> {
+ /// The "main method" for a canonicalized trait query. Given the
+ /// canonical key `canonical_key`, this method will create a new
+ /// inference context, instantiate the key, and run your operation
+ /// `op`. The operation should yield up a result (of type `R`) as
+ /// well as a set of trait obligations that must be fully
+ /// satisfied. These obligations will be processed and the
+ /// canonical result created.
+ ///
+ /// Returns `NoSolution` in the event of any error.
+ ///
+ /// (It might be mildly nicer to implement this on `TyCtxt`, and
+ /// not `InferCtxtBuilder`, but that is a bit tricky right now.
+ /// In part because we would need a `for<'gcx: 'tcx>` sort of
+ /// bound for the closure and in part because it is convenient to
+ /// have `'tcx` be free on this function so that we can talk about
+ /// `K: TypeFoldable<'tcx>`.)
+ pub fn enter_canonical_trait_query<K, R>(
+ &'tcx mut self,
+ canonical_key: &Canonical<'tcx, K>,
+ operation: impl FnOnce(&InferCtxt<'_, 'gcx, 'tcx>, &mut FulfillmentContext<'tcx>, K)
+ -> Fallible<R>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, R>>
+ where
+ K: TypeFoldable<'tcx>,
+ R: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ {
+ self.enter(|ref infcx| {
+ let (key, canonical_inference_vars) =
+ infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &canonical_key);
+ let fulfill_cx = &mut FulfillmentContext::new();
+ let value = operation(infcx, fulfill_cx, key)?;
+ infcx.make_canonicalized_query_result(canonical_inference_vars, value, fulfill_cx)
+ })
+ }
+}
+
+impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
+ /// This method is meant to be invoked as the final step of a canonical query
+ /// implementation. It is given:
+ ///
+ /// - the instantiated variables `inference_vars` created from the query key
+ /// - the result `answer` of the query
+ /// - a fulfillment context `fulfill_cx` that may contain various obligations which
+ /// have yet to be proven.
+ ///
+ /// Given this, the function will process the obligations pending
+ /// in `fulfill_cx`:
+ ///
+ /// - If all the obligations can be proven successfully, it will
+ /// package up any resulting region obligations (extracted from
+ /// `infcx`) along with the fully resolved value `answer` into a
+ /// query result (which is then itself canonicalized).
+ /// - If some obligations can be neither proven nor disproven, then
+ /// the same thing happens, but the resulting query is marked as ambiguous.
+ /// - Finally, if any of the obligations result in a hard error,
+ /// then `Err(NoSolution)` is returned.
+ pub fn make_canonicalized_query_result<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, T>>
+ where
+ T: Debug + Lift<'gcx> + TypeFoldable<'tcx>,
+ {
+ let query_result = self.make_query_result(inference_vars, answer, fulfill_cx)?;
+ let (canonical_result, _) = self.canonicalize_response(&query_result);
+
+ debug!(
+ "make_canonicalized_query_result: canonical_result = {:#?}",
+ canonical_result
+ );
+
+ Ok(Lrc::new(canonical_result))
+ }
+
+ /// Helper for `make_canonicalized_query_result` that does
+ /// everything up until the final canonicalization.
+ fn make_query_result<T>(
+ &self,
+ inference_vars: CanonicalVarValues<'tcx>,
+ answer: T,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ ) -> Result<QueryResult<'tcx, T>, NoSolution>
+ where
+ T: Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+ {
+ let tcx = self.tcx;
+
+ debug!(
+ "make_query_result(\
+ inference_vars={:?}, \
+ answer={:?})",
+ inference_vars, answer,
+ );
+
+ // Select everything, returning errors.
+ let true_errors = match fulfill_cx.select_where_possible(self) {
+ Ok(()) => vec![],
+ Err(errors) => errors,
+ };
+ debug!("true_errors = {:#?}", true_errors);
+
+ if !true_errors.is_empty() {
+ // FIXME -- we don't indicate *why* we failed to solve
+ debug!("make_query_result: true_errors={:#?}", true_errors);
+ return Err(NoSolution);
+ }
+
+ // Anything left unselected *now* must be an ambiguity.
+ let ambig_errors = match fulfill_cx.select_all_or_error(self) {
+ Ok(()) => vec![],
+ Err(errors) => errors,
+ };
+ debug!("ambig_errors = {:#?}", ambig_errors);
+
+ let region_obligations = self.take_registered_region_obligations();
+ let region_constraints = self.with_region_constraints(|region_constraints| {
+ make_query_outlives(tcx, region_obligations, region_constraints)
+ });
+
+ let certainty = if ambig_errors.is_empty() {
+ Certainty::Proven
+ } else {
+ Certainty::Ambiguous
+ };
+
+ Ok(QueryResult {
+ var_values: inference_vars,
+ region_constraints,
+ certainty,
+ value: answer,
+ })
+ }
+
+ /// Given the (canonicalized) result to a canonical query,
+ /// instantiates the result so it can be used, plugging in the
+ /// values from the canonical query. (Note that the result may
+ /// have been ambiguous; you should check the certainty level of
+ /// the query before applying this function.)
+ ///
+ /// To get a good understanding of what is happening here, check
+ /// out the [chapter in the rustc guide][c].
+ ///
+ /// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html#processing-the-canonicalized-query-result
+ pub fn instantiate_query_result_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> InferResult<'tcx, R>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ let InferOk {
+ value: result_subst,
+ mut obligations,
+ } = self.query_result_substitution(cause, param_env, original_values, query_result)?;
+
+ obligations.extend(self.query_region_constraints_into_obligations(
+ cause,
+ param_env,
+ &query_result.value.region_constraints,
+ &result_subst,
+ ));
+
+ let user_result: R =
+ query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+ Ok(InferOk {
+ value: user_result,
+ obligations,
+ })
+ }
+
+ /// An alternative to
+ /// `instantiate_query_result_and_region_obligations` that is more
+ /// efficient for NLL. NLL is a bit more advanced in the
+ /// "transition to chalk" than the rest of the compiler. During
+ /// the NLL type check, all of the "processing" of types and
+ /// things happens in queries -- the NLL checker itself is only
+ /// interested in the region obligations (`'a: 'b` or `T: 'b`)
+ /// that come out of these queries, which it wants to convert into
+ /// MIR-based constraints and solve. Therefore, it is most
+ /// convenient for the NLL Type Checker to **directly consume**
+ /// the `QueryRegionConstraint` values that arise from doing a
+ /// query. This is contrast to other parts of the compiler, which
+ /// would prefer for those `QueryRegionConstraint` to be converted
+ /// into the older infcx-style constraints (e.g., calls to
+ /// `sub_regions` or `register_region_obligation`).
+ ///
+ /// Therefore, `instantiate_nll_query_result_and_region_obligations` performs the same
+ /// basic operations as `instantiate_query_result_and_region_obligations` but
+ /// it returns its result differently:
+ ///
+ /// - It creates a substitution `S` that maps from the original
+ /// query variables to the values computed in the query
+ /// result. If any errors arise, they are propagated back as an
+ /// `Err` result.
+ /// - In the case of a successful substitution, we will append
+ /// `QueryRegionConstraint` values onto the
+ /// `output_query_region_constraints` vector for the solver to
+ /// use (if an error arises, some values may also be pushed, but
+ /// they should be ignored).
+ /// - It **can happen** (though it rarely does currently) that
+ /// equating types and things will give rise to subobligations
+ /// that must be processed. In this case, those subobligations
+ /// are propagated back in the return value.
+ /// - Finally, the query result (of type `R`) is propagated back,
+ /// after applying the substitution `S`.
+ pub fn instantiate_nll_query_result_and_region_obligations<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+ ) -> InferResult<'tcx, R>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ // In an NLL query, there should be no type variables in the
+ // query, only region variables.
+ debug_assert!(query_result.variables.iter().all(|v| match v.kind {
+ CanonicalVarKind::Ty(_) => false,
+ CanonicalVarKind::Region => true,
+ }));
+
+ let result_subst =
+ self.query_result_substitution_guess(cause, original_values, query_result);
+
+ // Compute `QueryRegionConstraint` values that unify each of
+ // the original values `v_o` that was canonicalized into a
+ // variable...
+ let mut obligations = vec![];
+
+ for (index, original_value) in original_values.var_values.iter_enumerated() {
+ // ...with the value `v_r` of that variable from the query.
+ let result_value = query_result
+ .substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index]);
+ match (original_value.unpack(), result_value.unpack()) {
+ (UnpackedKind::Lifetime(ty::ReErased), UnpackedKind::Lifetime(ty::ReErased)) => {
+ // no action needed
+ }
+
+ (UnpackedKind::Lifetime(v_o), UnpackedKind::Lifetime(v_r)) => {
+ // To make `v_o = v_r`, we emit `v_o: v_r` and `v_r: v_o`.
+ if v_o != v_r {
+ output_query_region_constraints
+ .push(ty::Binder::dummy(ty::OutlivesPredicate(v_o.into(), v_r)));
+ output_query_region_constraints
+ .push(ty::Binder::dummy(ty::OutlivesPredicate(v_r.into(), v_o)));
+ }
+ }
+
+ (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+ let ok = self.at(cause, param_env).eq(v1, v2)?;
+ obligations.extend(ok.into_obligations());
+ }
+
+ _ => {
+ bug!(
+ "kind mismatch, cannot unify {:?} and {:?}",
+ original_value,
+ result_value
+ );
+ }
+ }
+ }
+
+ // ...also include the other query region constraints from the query.
+ output_query_region_constraints.reserve(query_result.value.region_constraints.len());
+ for r_c in query_result.value.region_constraints.iter() {
+ output_query_region_constraints.push(r_c.map_bound(|ty::OutlivesPredicate(k1, r2)| {
+ let k1 = substitute_value(self.tcx, &result_subst, &k1);
+ let r2 = substitute_value(self.tcx, &result_subst, &r2);
+ ty::OutlivesPredicate(k1, r2)
+ }));
+ }
+
+ let user_result: R =
+ query_result.substitute_projected(self.tcx, &result_subst, |q_r| &q_r.value);
+
+ Ok(InferOk {
+ value: user_result,
+ obligations,
+ })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a substitution that can be applied
+ /// to the query result to convert the result back into the
+ /// original namespace.
+ ///
+ /// The substitution also comes accompanied with subobligations
+ /// that arose from unification; these might occur if (for
+ /// example) we are doing lazy normalization and the value
+ /// assigned to a type variable is unified with an unnormalized
+ /// projection.
+ fn query_result_substitution<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> InferResult<'tcx, CanonicalVarValues<'tcx>>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ debug!(
+ "query_result_substitution(original_values={:#?}, query_result={:#?})",
+ original_values, query_result,
+ );
+
+ let result_subst =
+ self.query_result_substitution_guess(cause, original_values, query_result);
+
+ let obligations = self.unify_query_result_substitution_guess(
+ cause,
+ param_env,
+ original_values,
+ &result_subst,
+ query_result,
+ )?
+ .into_obligations();
+
+ Ok(InferOk {
+ value: result_subst,
+ obligations,
+ })
+ }
+
+ /// Given the original values and the (canonicalized) result from
+ /// computing a query, returns a **guess** at a substitution that
+ /// can be applied to the query result to convert the result back
+ /// into the original namespace. This is called a **guess**
+ /// because it uses a quick heuristic to find the values for each
+ /// canonical variable; if that quick heuristic fails, then we
+ /// will instantiate fresh inference variables for each canonical
+ /// variable instead. Therefore, the result of this method must be
+ /// properly unified
+ fn query_result_substitution_guess<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> CanonicalVarValues<'tcx>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ debug!(
+ "query_result_substitution_guess(original_values={:#?}, query_result={:#?})",
+ original_values, query_result,
+ );
+
+ // Every canonical query result includes values for each of
+ // the inputs to the query. Therefore, we begin by unifying
+ // these values with the original inputs that were
+ // canonicalized.
+ let result_values = &query_result.value.var_values;
+ assert_eq!(original_values.len(), result_values.len());
+
+ // Quickly try to find initial values for the canonical
+ // variables in the result in terms of the query. We do this
+ // by iterating down the values that the query gave to each of
+ // the canonical inputs. If we find that one of those values
+ // is directly equal to one of the canonical variables in the
+ // result, then we can type the corresponding value from the
+ // input. See the example above.
+ let mut opt_values: IndexVec<CanonicalVar, Option<Kind<'tcx>>> =
+ IndexVec::from_elem_n(None, query_result.variables.len());
+
+ // In terms of our example above, we are iterating over pairs like:
+ // [(?A, Vec<?0>), ('static, '?1), (?B, ?0)]
+ for (original_value, result_value) in original_values.iter().zip(result_values) {
+ match result_value.unpack() {
+ UnpackedKind::Type(result_value) => {
+ // e.g., here `result_value` might be `?0` in the example above...
+ if let ty::TyInfer(ty::InferTy::CanonicalTy(index)) = result_value.sty {
+ // in which case we would set `canonical_vars[0]` to `Some(?U)`.
+ opt_values[index] = Some(original_value);
+ }
+ }
+ UnpackedKind::Lifetime(result_value) => {
+ // e.g., here `result_value` might be `'?1` in the example above...
+ if let &ty::RegionKind::ReCanonical(index) = result_value {
+ // in which case we would set `canonical_vars[0]` to `Some('static)`.
+ opt_values[index] = Some(original_value);
+ }
+ }
+ }
+ }
+
+ // Create a result substitution: if we found a value for a
+ // given variable in the loop above, use that. Otherwise, use
+ // a fresh inference variable.
+ let result_subst = CanonicalVarValues {
+ var_values: query_result
+ .variables
+ .iter()
+ .enumerate()
+ .map(|(index, info)| match opt_values[CanonicalVar::new(index)] {
+ Some(k) => k,
+ None => self.fresh_inference_var_for_canonical_var(cause.span, *info),
+ })
+ .collect(),
+ };
+
+ result_subst
+ }
+
+ /// Given a "guess" at the values for the canonical variables in
+ /// the input, try to unify with the *actual* values found in the
+ /// query result. Often, but not always, this is a no-op, because
+ /// we already found the mapping in the "guessing" step.
+ ///
+ /// See also: `query_result_substitution_guess`
+ fn unify_query_result_substitution_guess<R>(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ original_values: &CanonicalVarValues<'tcx>,
+ result_subst: &CanonicalVarValues<'tcx>,
+ query_result: &Canonical<'tcx, QueryResult<'tcx, R>>,
+ ) -> InferResult<'tcx, ()>
+ where
+ R: Debug + TypeFoldable<'tcx>,
+ {
+ // A closure that yields the result value for the given
+ // canonical variable; this is taken from
+ // `query_result.var_values` after applying the substitution
+ // `result_subst`.
+ let substituted_query_result = |index: CanonicalVar| -> Kind<'tcx> {
+ query_result.substitute_projected(self.tcx, &result_subst, |v| &v.var_values[index])
+ };
+
+ // Unify the original value for each variable with the value
+ // taken from `query_result` (after applying `result_subst`).
+ Ok(self.unify_canonical_vars(cause, param_env, original_values, substituted_query_result)?)
+ }
+
+ /// Converts the region constraints resulting from a query into an
+ /// iterator of obligations.
+ fn query_region_constraints_into_obligations<'a>(
+ &'a self,
+ cause: &'a ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ unsubstituted_region_constraints: &'a [QueryRegionConstraint<'tcx>],
+ result_subst: &'a CanonicalVarValues<'tcx>,
+ ) -> impl Iterator<Item = PredicateObligation<'tcx>> + 'a {
+ Box::new(
+ unsubstituted_region_constraints
+ .iter()
+ .map(move |constraint| {
+ let ty::OutlivesPredicate(k1, r2) = constraint.skip_binder(); // restored below
+ let k1 = substitute_value(self.tcx, result_subst, k1);
+ let r2 = substitute_value(self.tcx, result_subst, r2);
+ match k1.unpack() {
+ UnpackedKind::Lifetime(r1) => Obligation::new(
+ cause.clone(),
+ param_env,
+ ty::Predicate::RegionOutlives(ty::Binder::dummy(
+ ty::OutlivesPredicate(r1, r2),
+ )),
+ ),
+
+ UnpackedKind::Type(t1) => Obligation::new(
+ cause.clone(),
+ param_env,
+ ty::Predicate::TypeOutlives(ty::Binder::dummy(ty::OutlivesPredicate(
+ t1, r2,
+ ))),
+ ),
+ }
+ }),
+ ) as Box<dyn Iterator<Item = _>>
+ }
+
+ /// Given two sets of values for the same set of canonical variables, unify them.
+ /// The second set is produced lazilly by supplying indices from the first set.
+ fn unify_canonical_vars(
+ &self,
+ cause: &ObligationCause<'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
+ variables1: &CanonicalVarValues<'tcx>,
+ variables2: impl Fn(CanonicalVar) -> Kind<'tcx>,
+ ) -> InferResult<'tcx, ()> {
+ self.commit_if_ok(|_| {
+ let mut obligations = vec![];
+ for (index, value1) in variables1.var_values.iter_enumerated() {
+ let value2 = variables2(index);
+
+ match (value1.unpack(), value2.unpack()) {
+ (UnpackedKind::Type(v1), UnpackedKind::Type(v2)) => {
+ obligations
+ .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ (
+ UnpackedKind::Lifetime(ty::ReErased),
+ UnpackedKind::Lifetime(ty::ReErased),
+ ) => {
+ // no action needed
+ }
+ (UnpackedKind::Lifetime(v1), UnpackedKind::Lifetime(v2)) => {
+ obligations
+ .extend(self.at(cause, param_env).eq(v1, v2)?.into_obligations());
+ }
+ _ => {
+ bug!("kind mismatch, cannot unify {:?} and {:?}", value1, value2,);
+ }
+ }
+ }
+ Ok(InferOk {
+ value: (),
+ obligations,
+ })
+ })
+ }
+}
+
+/// Given the region obligations and constraints scraped from the infcx,
+/// creates query region constraints.
+pub fn make_query_outlives<'tcx>(
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ region_obligations: Vec<(ast::NodeId, RegionObligation<'tcx>)>,
+ region_constraints: &RegionConstraintData<'tcx>,
+) -> Vec<QueryRegionConstraint<'tcx>> {
+ let RegionConstraintData {
+ constraints,
+ verifys,
+ givens,
+ } = region_constraints;
+
+ assert!(verifys.is_empty());
+ assert!(givens.is_empty());
+
+ let mut outlives: Vec<_> = constraints
+ .into_iter()
+ .map(|(k, _)| match *k {
+ // Swap regions because we are going from sub (<=) to outlives
+ // (>=).
+ Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
+ tcx.mk_region(ty::ReVar(v2)).into(),
+ tcx.mk_region(ty::ReVar(v1)),
+ ),
+ Constraint::VarSubReg(v1, r2) => {
+ ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
+ }
+ Constraint::RegSubVar(r1, v2) => {
+ ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
+ }
+ Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
+ })
+ .map(ty::Binder::dummy) // no bound regions in the code above
+ .collect();
+
+ outlives.extend(
+ region_obligations
+ .into_iter()
+ .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))
+ .map(ty::Binder::dummy), // no bound regions in the code above
+ );
+
+ outlives
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! This module contains code to substitute new values into a
+//! `Canonical<'tcx, T>`.
+//!
+//! For an overview of what canonicaliation is and how it fits into
+//! rustc, check out the [chapter in the rustc guide][c].
+//!
+//! [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+
+use infer::canonical::{Canonical, CanonicalVarValues};
+use ty::fold::{TypeFoldable, TypeFolder};
+use ty::subst::UnpackedKind;
+use ty::{self, Ty, TyCtxt, TypeFlags};
+
+impl<'tcx, V> Canonical<'tcx, V> {
+ /// Instantiate the wrapped value, replacing each canonical value
+ /// with the value given in `var_values`.
+ pub fn substitute(&self, tcx: TyCtxt<'_, '_, 'tcx>, var_values: &CanonicalVarValues<'tcx>) -> V
+ where
+ V: TypeFoldable<'tcx>,
+ {
+ self.substitute_projected(tcx, var_values, |value| value)
+ }
+
+ /// Allows one to apply a substitute to some subset of
+ /// `self.value`. Invoke `projection_fn` with `self.value` to get
+ /// a value V that is expressed in terms of the same canonical
+ /// variables bound in `self` (usually this extracts from subset
+ /// of `self`). Apply the substitution `var_values` to this value
+ /// V, replacing each of the canonical variables.
+ pub fn substitute_projected<T>(
+ &self,
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ var_values: &CanonicalVarValues<'tcx>,
+ projection_fn: impl FnOnce(&V) -> &T,
+ ) -> T
+ where
+ T: TypeFoldable<'tcx>,
+ {
+ assert_eq!(self.variables.len(), var_values.var_values.len());
+ let value = projection_fn(&self.value);
+ substitute_value(tcx, var_values, value)
+ }
+}
+
+/// Substitute the values from `var_values` into `value`. `var_values`
+/// must be values for the set of canonical variables that appear in
+/// `value`.
+pub(super) fn substitute_value<'a, 'tcx, T>(
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ var_values: &CanonicalVarValues<'tcx>,
+ value: &'a T,
+) -> T
+where
+ T: TypeFoldable<'tcx>,
+{
+ if var_values.var_values.is_empty() {
+ debug_assert!(!value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS));
+ value.clone()
+ } else if !value.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
+ value.clone()
+ } else {
+ value.fold_with(&mut CanonicalVarValuesSubst { tcx, var_values })
+ }
+}
+
+struct CanonicalVarValuesSubst<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ var_values: &'cx CanonicalVarValues<'tcx>,
+}
+
+impl<'cx, 'gcx, 'tcx> TypeFolder<'gcx, 'tcx> for CanonicalVarValuesSubst<'cx, 'gcx, 'tcx> {
+ fn tcx(&self) -> TyCtxt<'_, 'gcx, 'tcx> {
+ self.tcx
+ }
+
+ fn fold_ty(&mut self, t: Ty<'tcx>) -> Ty<'tcx> {
+ match t.sty {
+ ty::TyInfer(ty::InferTy::CanonicalTy(c)) => {
+ match self.var_values.var_values[c].unpack() {
+ UnpackedKind::Type(ty) => ty,
+ r => bug!("{:?} is a type but value is {:?}", c, r),
+ }
+ }
+ _ => {
+ if !t.has_type_flags(TypeFlags::HAS_CANONICAL_VARS) {
+ t
+ } else {
+ t.super_fold_with(self)
+ }
+ }
+ }
+ }
+
+ fn fold_region(&mut self, r: ty::Region<'tcx>) -> ty::Region<'tcx> {
+ match r {
+ ty::RegionKind::ReCanonical(c) => match self.var_values.var_values[*c].unpack() {
+ UnpackedKind::Lifetime(l) => l,
+ r => bug!("{:?} is a region but value is {:?}", c, r),
+ },
+ _ => r.super_fold_with(self),
+ }
+ }
+}
infer::LateBoundRegion(_, br, infer::AssocTypeProjection(def_id)) => format!(
" for lifetime parameter {}in trait containing associated type `{}`",
br_string(br),
- self.tcx.associated_item(def_id).name
+ self.tcx.associated_item(def_id).ident
),
infer::EarlyBoundRegion(_, name) => format!(" for lifetime parameter `{}`", name),
infer::BoundRegionInCoherence(name) => {
labels.clear();
labels.push((pattern.span, format!("consider giving this closure parameter a type")));
} else if let Some(pattern) = local_visitor.found_local_pattern {
- if let Some(simple_name) = pattern.simple_name() {
- labels.push((pattern.span, format!("consider giving `{}` a type", simple_name)));
+ if let Some(simple_ident) = pattern.simple_ident() {
+ labels.push((pattern.span, format!("consider giving `{}` a type", simple_ident)));
} else {
labels.push((pattern.span, format!("consider giving the pattern a type")));
}
let sub_is_ret_type =
self.is_return_type_anon(scope_def_id_sub, bregion_sub, ty_fndecl_sub);
- let span_label_var1 = if let Some(simple_name) = anon_arg_sup.pat.simple_name() {
- format!(" from `{}`", simple_name)
+ let span_label_var1 = if let Some(simple_ident) = anon_arg_sup.pat.simple_ident() {
+ format!(" from `{}`", simple_ident)
} else {
format!("")
};
- let span_label_var2 = if let Some(simple_name) = anon_arg_sub.pat.simple_name() {
- format!(" into `{}`", simple_name)
+ let span_label_var2 = if let Some(simple_ident) = anon_arg_sub.pat.simple_ident() {
+ format!(" into `{}`", simple_ident)
} else {
format!("")
};
mod find_anon_type;
mod named_anon_conflict;
mod outlives_closure;
+mod static_impl_trait;
mod util;
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
self.try_report_named_anon_conflict()
.or_else(|| self.try_report_anon_anon_conflict())
.or_else(|| self.try_report_outlives_closure())
+ .or_else(|| self.try_report_static_impl_trait())
}
pub fn get_regions(&self) -> (Span, ty::Region<'tcx>, ty::Region<'tcx>) {
}
}
- let (error_var, span_label_var) = if let Some(simple_name) = arg.pat.simple_name() {
+ let (error_var, span_label_var) = if let Some(simple_ident) = arg.pat.simple_ident() {
(
- format!("the type of `{}`", simple_name),
- format!("the type of `{}`", simple_name),
+ format!("the type of `{}`", simple_ident),
+ format!("the type of `{}`", simple_ident),
)
} else {
("parameter type".to_owned(), "type".to_owned())
--- /dev/null
+// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+//! Error Reporting for static impl Traits.
+
+use infer::error_reporting::nice_region_error::NiceRegionError;
+use infer::lexical_region_resolve::RegionResolutionError;
+use ty::{BoundRegion, FreeRegion, RegionKind};
+use util::common::ErrorReported;
+
+impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
+ /// Print the error message for lifetime errors when the return type is a static impl Trait.
+ pub(super) fn try_report_static_impl_trait(&self) -> Option<ErrorReported> {
+ if let Some(ref error) = self.error {
+ match error.clone() {
+ RegionResolutionError::SubSupConflict(
+ var_origin,
+ sub_origin,
+ sub_r,
+ sup_origin,
+ sup_r,
+ ) => {
+ let anon_reg_sup = self.is_suitable_region(sup_r)?;
+ if sub_r == &RegionKind::ReStatic &&
+ self.is_return_type_impl_trait(anon_reg_sup.def_id)
+ {
+ let sp = var_origin.span();
+ let return_sp = sub_origin.span();
+ let mut err = self.tcx.sess.struct_span_err(
+ sp,
+ "cannot infer an appropriate lifetime",
+ );
+ err.span_label(
+ return_sp,
+ "this return type evaluates to the `'static` lifetime...",
+ );
+ err.span_label(
+ sup_origin.span(),
+ "...but this borrow...",
+ );
+
+ let (lifetime, lt_sp_opt) = self.tcx.msg_span_from_free_region(sup_r);
+ if let Some(lifetime_sp) = lt_sp_opt {
+ err.span_note(
+ lifetime_sp,
+ &format!("...can't outlive {}", lifetime),
+ );
+ }
+
+ let lifetime_name = match sup_r {
+ RegionKind::ReFree(FreeRegion {
+ bound_region: BoundRegion::BrNamed(_, ref name), ..
+ }) => format!("{}", name),
+ _ => "'_".to_owned(),
+ };
+ if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(return_sp) {
+ err.span_suggestion(
+ return_sp,
+ &format!(
+ "you can add a constraint to the return type to make it last \
+ less than `'static` and match {}",
+ lifetime,
+ ),
+ format!("{} + {}", snippet, lifetime_name),
+ );
+ }
+ err.emit();
+ return Some(ErrorReported);
+ }
+ }
+ _ => {}
+ }
+ }
+ None
+ }
+}
}
None
}
+
+ pub(super) fn is_return_type_impl_trait(
+ &self,
+ scope_def_id: DefId,
+ ) -> bool {
+ let ret_ty = self.tcx.type_of(scope_def_id);
+ match ret_ty.sty {
+ ty::TyFnDef(_, _) => {
+ let sig = ret_ty.fn_sig(self.tcx);
+ let output = self.tcx.erase_late_bound_regions(&sig.output());
+ return output.is_impl_trait();
+ }
+ _ => {}
+ }
+ false
+ }
+
// Here we check for the case where anonymous region
// corresponds to self and if yes, we display E0312.
// FIXME(#42700) - Need to format self properly to
/// For more information about how skolemization for HRTBs works, see
/// the [rustc guide].
///
- /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-hrtb.html
+ /// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/traits/hrtb.html
pub fn skolemize_late_bound_regions<T>(&self,
binder: &ty::Binder<T>)
-> (T, SkolemizationMap<'tcx>)
> WARNING: This README is obsolete and will be removed soon! For
> more info on how the current borrowck works, see the [rustc guide].
-[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir-borrowck.html
+[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir/borrowck.html
## Terminology
use ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
use ty::fold::TypeFoldable;
use ty::relate::RelateResult;
-use traits::{self, ObligationCause, PredicateObligations};
+use traits::{self, ObligationCause, PredicateObligations, TraitEngine};
use rustc_data_structures::unify as ut;
use std::cell::{Cell, RefCell, Ref, RefMut};
use std::collections::BTreeMap;
pub fn unit(self) -> InferOk<'tcx, ()> {
InferOk { value: (), obligations: self.obligations }
}
+
+ /// Extract `value`, registering any obligations into `fulfill_cx`
+ pub fn into_value_registering_obligations(
+ self,
+ infcx: &InferCtxt<'_, '_, 'tcx>,
+ fulfill_cx: &mut impl TraitEngine<'tcx>,
+ ) -> T {
+ let InferOk { value, obligations } = self;
+ for obligation in obligations {
+ fulfill_cx.register_predicate_obligation(infcx, obligation);
+ }
+ value
+ }
}
impl<'tcx> InferOk<'tcx, ()> {
use infer::InferCtxt;
use syntax::ast;
use syntax::codemap::Span;
-use traits::{FulfillmentContext, TraitEngine};
+use traits::{FulfillmentContext, TraitEngine, TraitEngineExt};
use ty::{self, Ty, TypeFoldable};
use ty::outlives::Component;
use ty::wf;
pub mod env;
pub mod free_region_map;
pub mod bounds;
-mod obligations;
+pub mod obligations;
use hir::def_id::DefId;
use infer::{self, GenericKind, InferCtxt, RegionObligation, SubregionOrigin, VerifyBound};
+use syntax::ast;
use traits;
-use ty::{self, Ty, TyCtxt, TypeFoldable};
-use ty::subst::{Subst, Substs};
use ty::outlives::Component;
-use syntax::ast;
+use ty::subst::{Subst, Substs};
+use ty::{self, Ty, TyCtxt, TypeFoldable};
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
/// Registers that the given region obligation must be resolved
) {
debug!(
"register_region_obligation(body_id={:?}, obligation={:?})",
- body_id,
- obligation
+ body_id, obligation
);
self.region_obligations
}
/// Trait queries just want to pass back type obligations "as is"
- pub fn take_registered_region_obligations(
- &self,
- ) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
- ::std::mem::replace(
- &mut *self.region_obligations.borrow_mut(),
- vec![],
- )
+ pub fn take_registered_region_obligations(&self) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
+ ::std::mem::replace(&mut *self.region_obligations.borrow_mut(), vec![])
}
/// Process the region obligations that must be proven (during
}
}
- let outlives =
- TypeOutlives::new(self, region_bound_pairs, implicit_region_bound, param_env);
+ let outlives = &mut TypeOutlives::new(
+ self,
+ self.tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ );
for RegionObligation {
sup_type,
{
debug!(
"process_registered_region_obligations: sup_type={:?} sub_region={:?} cause={:?}",
- sup_type,
- sub_region,
- cause
+ sup_type, sub_region, cause
);
- let origin = SubregionOrigin::from_obligation_cause(
- &cause,
- || infer::RelateParamBound(cause.span, sup_type),
- );
+ let origin = SubregionOrigin::from_obligation_cause(&cause, || {
+ infer::RelateParamBound(cause.span, sup_type)
+ });
+ let sup_type = self.resolve_type_vars_if_possible(&sup_type);
outlives.type_must_outlive(origin, sup_type, sub_region);
}
}
ty: Ty<'tcx>,
region: ty::Region<'tcx>,
) {
- let outlives =
- TypeOutlives::new(self, region_bound_pairs, implicit_region_bound, param_env);
+ let outlives = &mut TypeOutlives::new(
+ self,
+ self.tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ );
+ let ty = self.resolve_type_vars_if_possible(&ty);
outlives.type_must_outlive(origin, ty, region);
}
}
-#[must_use] // you ought to invoke `into_accrued_obligations` when you are done =)
-struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx> {
+/// The `TypeOutlives` struct has the job of "lowering" a `T: 'a`
+/// obligation into a series of `'a: 'b` constraints and "verifys", as
+/// described on the module comment. The final constraints are emitted
+/// via a "delegate" of type `D` -- this is usually the `infcx`, which
+/// accrues them into the `region_obligations` code, but for NLL we
+/// use something else.
+pub struct TypeOutlives<'cx, 'gcx: 'tcx, 'tcx: 'cx, D>
+where
+ D: TypeOutlivesDelegate<'tcx>,
+{
// See the comments on `process_registered_region_obligations` for the meaning
// of these fields.
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+ delegate: D,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
region_bound_pairs: &'cx [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
}
-impl<'cx, 'gcx, 'tcx> TypeOutlives<'cx, 'gcx, 'tcx> {
- fn new(
- infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
+pub trait TypeOutlivesDelegate<'tcx> {
+ fn push_sub_region_constraint(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ );
+
+ fn push_verify(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ kind: GenericKind<'tcx>,
+ a: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
+ );
+}
+
+impl<'cx, 'gcx, 'tcx, D> TypeOutlives<'cx, 'gcx, 'tcx, D>
+where
+ D: TypeOutlivesDelegate<'tcx>,
+{
+ pub fn new(
+ delegate: D,
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
region_bound_pairs: &'cx [(ty::Region<'tcx>, GenericKind<'tcx>)],
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
) -> Self {
Self {
- infcx,
+ delegate,
+ tcx,
region_bound_pairs,
implicit_region_bound,
param_env,
/// - `origin`, the reason we need this constraint
/// - `ty`, the type `T`
/// - `region`, the region `'a`
- fn type_must_outlive(
- &self,
+ pub fn type_must_outlive(
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
ty: Ty<'tcx>,
region: ty::Region<'tcx>,
) {
- let ty = self.infcx.resolve_type_vars_if_possible(&ty);
-
debug!(
"type_must_outlive(ty={:?}, region={:?}, origin={:?})",
- ty,
- region,
- origin
+ ty, region, origin
);
assert!(!ty.has_escaping_regions());
- let components = self.tcx().outlives_components(ty);
+ let components = self.tcx.outlives_components(ty);
self.components_must_outlive(origin, components, region);
}
- fn tcx(&self) -> TyCtxt<'cx, 'gcx, 'tcx> {
- self.infcx.tcx
- }
-
fn components_must_outlive(
- &self,
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
components: Vec<Component<'tcx>>,
region: ty::Region<'tcx>,
let origin = origin.clone();
match component {
Component::Region(region1) => {
- self.infcx.sub_regions(origin, region, region1);
+ self.delegate.push_sub_region_constraint(origin, region, region1);
}
Component::Param(param_ty) => {
self.param_ty_must_outlive(origin, region, param_ty);
// ignore this, we presume it will yield an error
// later, since if a type variable is not resolved by
// this point it never will be
- self.infcx.tcx.sess.delay_span_bug(
+ self.tcx.sess.delay_span_bug(
origin.span(),
&format!("unresolved inference variable in outlives: {:?}", v),
);
}
fn param_ty_must_outlive(
- &self,
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
region: ty::Region<'tcx>,
param_ty: ty::ParamTy,
) {
debug!(
"param_ty_must_outlive(region={:?}, param_ty={:?}, origin={:?})",
- region,
- param_ty,
- origin
+ region, param_ty, origin
);
let verify_bound = self.param_bound(param_ty);
let generic = GenericKind::Param(param_ty);
- self.infcx
- .verify_generic_bound(origin, generic, region, verify_bound);
+ self.delegate
+ .push_verify(origin, generic, region, verify_bound);
}
fn projection_must_outlive(
- &self,
+ &mut self,
origin: infer::SubregionOrigin<'tcx>,
region: ty::Region<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
) {
debug!(
"projection_must_outlive(region={:?}, projection_ty={:?}, origin={:?})",
- region,
- projection_ty,
- origin
+ region, projection_ty, origin
);
// This case is thorny for inference. The fundamental problem is
}
for r in projection_ty.substs.regions() {
- self.infcx.sub_regions(origin.clone(), region, r);
+ self.delegate.push_sub_region_constraint(origin.clone(), region, r);
}
return;
.any(|r| env_bounds.contains(&r))
{
debug!("projection_must_outlive: unique declared bound appears in trait ref");
- self.infcx.sub_regions(origin.clone(), region, unique_bound);
+ self.delegate
+ .push_sub_region_constraint(origin.clone(), region, unique_bound);
return;
}
}
// even though a satisfactory solution exists.
let verify_bound = self.projection_bound(env_bounds, projection_ty);
let generic = GenericKind::Projection(projection_ty);
- self.infcx
- .verify_generic_bound(origin, generic.clone(), region, verify_bound);
+ self.delegate
+ .push_verify(origin, generic.clone(), region, verify_bound);
}
fn type_bound(&self, ty: Ty<'tcx>) -> VerifyBound<'tcx> {
) -> VerifyBound<'tcx> {
debug!(
"projection_bound(declared_bounds={:?}, projection_ty={:?})",
- declared_bounds,
- projection_ty
+ declared_bounds, projection_ty
);
// see the extensive comment in projection_must_outlive
- let ty = self.infcx
+ let ty = self
.tcx
.mk_projection(projection_ty.item_def_id, projection_ty.substs);
let recursive_bound = self.recursive_type_bound(ty);
&self,
generic: GenericKind<'tcx>,
) -> Vec<ty::Region<'tcx>> {
- let tcx = self.tcx();
+ let tcx = self.tcx;
// To start, collect bounds from user environment. Note that
// parameter environments are already elaborated, so we don't
debug!("projection_bounds(projection_ty={:?})", projection_ty);
let mut bounds = self.region_bounds_declared_on_associated_item(projection_ty.item_def_id);
for r in &mut bounds {
- *r = r.subst(self.tcx(), projection_ty.substs);
+ *r = r.subst(self.tcx, projection_ty.substs);
}
bounds
}
&self,
assoc_item_def_id: DefId,
) -> Vec<ty::Region<'tcx>> {
- let tcx = self.tcx();
+ let tcx = self.tcx;
let assoc_item = tcx.associated_item(assoc_item_def_id);
let trait_def_id = assoc_item.container.assert_trait();
let trait_predicates = tcx.predicates_of(trait_def_id);
.collect()
}
}
+
+impl<'cx, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx> for &'cx InferCtxt<'cx, 'gcx, 'tcx> {
+ fn push_sub_region_constraint(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ ) {
+ self.sub_regions(origin, a, b)
+ }
+
+ fn push_verify(
+ &mut self,
+ origin: SubregionOrigin<'tcx>,
+ kind: GenericKind<'tcx>,
+ a: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
+ ) {
+ self.verify_generic_bound(origin, kind, a, bound)
+ }
+}
+
> WARNING: This README is obsolete and will be removed soon! For
> more info on how the current borrowck works, see the [rustc guide].
-[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir-borrowck.html
+[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir/borrowck.html
## Terminology
pub mod allocator;
pub mod borrowck;
pub mod expr_use_visitor;
- pub mod const_val;
pub mod cstore;
pub mod dataflow;
pub mod dead;
"detects name collision with an existing but unstable method"
}
+declare_lint! {
+ pub IRREFUTABLE_LET_PATTERNS,
+ Deny,
+ "detects irrefutable patterns in if-let and while-let statements"
+}
+
declare_lint! {
pub UNUSED_LABELS,
Allow,
"warn about documentation intra links resolution failure"
}
+declare_lint! {
+ pub WHERE_CLAUSES_OBJECT_SAFETY,
+ Warn,
+ "checks the object safety of where clauses"
+}
+
/// Does nothing as a lint pass, but registers some `Lint`s
/// which are used by other parts of the compiler.
#[derive(Copy, Clone)]
BARE_TRAIT_OBJECTS,
ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE,
UNSTABLE_NAME_COLLISIONS,
+ IRREFUTABLE_LET_PATTERNS,
DUPLICATE_ASSOCIATED_TYPE_BINDINGS,
DUPLICATE_MACRO_EXPORTS,
INTRA_DOC_LINK_RESOLUTION_FAILURE,
+ WHERE_CLAUSES_OBJECT_SAFETY,
)
}
}
sess: Option<&Session>,
from_plugin: bool,
pass: &Box<P>) {
- for &lint in pass.get_lints() {
- self.lints.push((*lint, from_plugin));
+ for lint in pass.get_lints() {
+ self.lints.push((lint, from_plugin));
- let id = LintId::of(*lint);
+ let id = LintId::of(lint);
if self.by_name.insert(lint.name_lower(), Id(id)).is_some() {
let msg = format!("duplicate specification of lint {}", lint.name_lower());
match (sess, from_plugin) {
use errors::{DiagnosticBuilder, DiagnosticId};
use hir::def_id::{CrateNum, LOCAL_CRATE};
-use hir::intravisit::{self, FnKind};
+use hir::intravisit;
use hir;
use lint::builtin::BuiltinLintDiagnostics;
use session::{Session, DiagnosticMessageId};
#[macro_export]
macro_rules! lint_array {
($( $lint:expr ),* $(,)?) => {{
- static ARRAY: LintArray = &[ $( &$lint ),* ];
- ARRAY
+ vec![$($lint),*]
}}
}
-pub type LintArray = &'static [&'static &'static Lint];
+pub type LintArray = Vec<&'static Lint>;
pub trait LintPass {
/// Get descriptions of the lints this `LintPass` object can emit.
fn get_lints(&self) -> LintArray;
}
+#[macro_export]
+macro_rules! late_lint_methods {
+ ($macro:path, $args:tt, [$hir:tt]) => (
+ $macro!($args, [$hir], [
+ fn check_body(a: &$hir hir::Body);
+ fn check_body_post(a: &$hir hir::Body);
+ fn check_name(a: Span, b: ast::Name);
+ fn check_crate(a: &$hir hir::Crate);
+ fn check_crate_post(a: &$hir hir::Crate);
+ fn check_mod(a: &$hir hir::Mod, b: Span, c: ast::NodeId);
+ fn check_mod_post(a: &$hir hir::Mod, b: Span, c: ast::NodeId);
+ fn check_foreign_item(a: &$hir hir::ForeignItem);
+ fn check_foreign_item_post(a: &$hir hir::ForeignItem);
+ fn check_item(a: &$hir hir::Item);
+ fn check_item_post(a: &$hir hir::Item);
+ fn check_local(a: &$hir hir::Local);
+ fn check_block(a: &$hir hir::Block);
+ fn check_block_post(a: &$hir hir::Block);
+ fn check_stmt(a: &$hir hir::Stmt);
+ fn check_arm(a: &$hir hir::Arm);
+ fn check_pat(a: &$hir hir::Pat);
+ fn check_decl(a: &$hir hir::Decl);
+ fn check_expr(a: &$hir hir::Expr);
+ fn check_expr_post(a: &$hir hir::Expr);
+ fn check_ty(a: &$hir hir::Ty);
+ fn check_generic_param(a: &$hir hir::GenericParam);
+ fn check_generics(a: &$hir hir::Generics);
+ fn check_where_predicate(a: &$hir hir::WherePredicate);
+ fn check_poly_trait_ref(a: &$hir hir::PolyTraitRef, b: hir::TraitBoundModifier);
+ fn check_fn(
+ a: hir::intravisit::FnKind<$hir>,
+ b: &$hir hir::FnDecl,
+ c: &$hir hir::Body,
+ d: Span,
+ e: ast::NodeId);
+ fn check_fn_post(
+ a: hir::intravisit::FnKind<$hir>,
+ b: &$hir hir::FnDecl,
+ c: &$hir hir::Body,
+ d: Span,
+ e: ast::NodeId
+ );
+ fn check_trait_item(a: &$hir hir::TraitItem);
+ fn check_trait_item_post(a: &$hir hir::TraitItem);
+ fn check_impl_item(a: &$hir hir::ImplItem);
+ fn check_impl_item_post(a: &$hir hir::ImplItem);
+ fn check_struct_def(
+ a: &$hir hir::VariantData,
+ b: ast::Name,
+ c: &$hir hir::Generics,
+ d: ast::NodeId
+ );
+ fn check_struct_def_post(
+ a: &$hir hir::VariantData,
+ b: ast::Name,
+ c: &$hir hir::Generics,
+ d: ast::NodeId
+ );
+ fn check_struct_field(a: &$hir hir::StructField);
+ fn check_variant(a: &$hir hir::Variant, b: &$hir hir::Generics);
+ fn check_variant_post(a: &$hir hir::Variant, b: &$hir hir::Generics);
+ fn check_lifetime(a: &$hir hir::Lifetime);
+ fn check_path(a: &$hir hir::Path, b: ast::NodeId);
+ fn check_attribute(a: &$hir ast::Attribute);
+
+ /// Called when entering a syntax node that can have lint attributes such
+ /// as `#[allow(...)]`. Called with *all* the attributes of that node.
+ fn enter_lint_attrs(a: &$hir [ast::Attribute]);
+
+ /// Counterpart to `enter_lint_attrs`.
+ fn exit_lint_attrs(a: &$hir [ast::Attribute]);
+ ]);
+ )
+}
/// Trait for types providing lint checks.
///
//
// FIXME: eliminate the duplication with `Visitor`. But this also
// contains a few lint-specific methods with no equivalent in `Visitor`.
-pub trait LateLintPass<'a, 'tcx>: LintPass {
- fn check_body(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
- fn check_body_post(&mut self, _: &LateContext, _: &'tcx hir::Body) { }
- fn check_name(&mut self, _: &LateContext, _: Span, _: ast::Name) { }
- fn check_crate(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
- fn check_crate_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Crate) { }
- fn check_mod(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: &'tcx hir::Mod,
- _: Span,
- _: ast::NodeId) { }
- fn check_mod_post(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: &'tcx hir::Mod,
- _: Span,
- _: ast::NodeId) { }
- fn check_foreign_item(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::ForeignItem) { }
- fn check_foreign_item_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::ForeignItem) { }
- fn check_item(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Item) { }
- fn check_item_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Item) { }
- fn check_local(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Local) { }
- fn check_block(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Block) { }
- fn check_block_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Block) { }
- fn check_stmt(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Stmt) { }
- fn check_arm(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Arm) { }
- fn check_pat(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Pat) { }
- fn check_decl(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Decl) { }
- fn check_expr(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Expr) { }
- fn check_expr_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Expr) { }
- fn check_ty(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Ty) { }
- fn check_generic_param(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::GenericParam) { }
- fn check_generics(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Generics) { }
- fn check_where_predicate(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::WherePredicate) { }
- fn check_poly_trait_ref(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::PolyTraitRef,
- _: hir::TraitBoundModifier) { }
- fn check_fn(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: FnKind<'tcx>,
- _: &'tcx hir::FnDecl,
- _: &'tcx hir::Body,
- _: Span,
- _: ast::NodeId) { }
- fn check_fn_post(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: FnKind<'tcx>,
- _: &'tcx hir::FnDecl,
- _: &'tcx hir::Body,
- _: Span,
- _: ast::NodeId) { }
- fn check_trait_item(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::TraitItem) { }
- fn check_trait_item_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::TraitItem) { }
- fn check_impl_item(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::ImplItem) { }
- fn check_impl_item_post(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::ImplItem) { }
- fn check_struct_def(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: &'tcx hir::VariantData,
- _: ast::Name,
- _: &'tcx hir::Generics,
- _: ast::NodeId) { }
- fn check_struct_def_post(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: &'tcx hir::VariantData,
- _: ast::Name,
- _: &'tcx hir::Generics,
- _: ast::NodeId) { }
- fn check_struct_field(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::StructField) { }
- fn check_variant(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: &'tcx hir::Variant,
- _: &'tcx hir::Generics) { }
- fn check_variant_post(&mut self,
- _: &LateContext<'a, 'tcx>,
- _: &'tcx hir::Variant,
- _: &'tcx hir::Generics) { }
- fn check_lifetime(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Lifetime) { }
- fn check_path(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx hir::Path, _: ast::NodeId) { }
- fn check_attribute(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx ast::Attribute) { }
- /// Called when entering a syntax node that can have lint attributes such
- /// as `#[allow(...)]`. Called with *all* the attributes of that node.
- fn enter_lint_attrs(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx [ast::Attribute]) { }
+macro_rules! expand_lint_pass_methods {
+ ($context:ty, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
+ $(#[inline(always)] fn $name(&mut self, $context, $(_: $arg),*) {})*
+ )
+}
- /// Counterpart to `enter_lint_attrs`.
- fn exit_lint_attrs(&mut self, _: &LateContext<'a, 'tcx>, _: &'tcx [ast::Attribute]) { }
+macro_rules! declare_late_lint_pass {
+ ([], [$hir:tt], [$($methods:tt)*]) => (
+ pub trait LateLintPass<'a, $hir>: LintPass {
+ expand_lint_pass_methods!(&LateContext<'a, $hir>, [$($methods)*]);
+ }
+ )
+}
+
+late_lint_methods!(declare_late_lint_pass, [], ['tcx]);
+
+#[macro_export]
+macro_rules! expand_combined_late_lint_pass_method {
+ ([$($passes:ident),*], $self: ident, $name: ident, $params:tt) => ({
+ $($self.$passes.$name $params;)*
+ })
+}
+
+#[macro_export]
+macro_rules! expand_combined_late_lint_pass_methods {
+ ($passes:tt, [$($(#[$attr:meta])* fn $name:ident($($param:ident: $arg:ty),*);)*]) => (
+ $(fn $name(&mut self, context: &LateContext<'a, 'tcx>, $($param: $arg),*) {
+ expand_combined_late_lint_pass_method!($passes, self, $name, (context, $($param),*));
+ })*
+ )
+}
+
+#[macro_export]
+macro_rules! declare_combined_late_lint_pass {
+ ([$name:ident, [$($passes:ident: $constructor:expr,)*]], [$hir:tt], $methods:tt) => (
+ #[allow(non_snake_case)]
+ struct $name {
+ $($passes: $passes,)*
+ }
+
+ impl $name {
+ fn new() -> Self {
+ Self {
+ $($passes: $constructor,)*
+ }
+ }
+ }
+
+ impl<'a, 'tcx> LateLintPass<'a, 'tcx> for $name {
+ expand_combined_late_lint_pass_methods!([$($passes),*], $methods);
+ }
+
+ impl LintPass for $name {
+ fn get_lints(&self) -> LintArray {
+ let mut lints = Vec::new();
+ $(lints.extend_from_slice(&self.$passes.get_lints());)*
+ lints
+ }
+ }
+ )
}
pub trait EarlyLintPass: LintPass {
#[macro_export]
macro_rules! __impl_stable_hash_field {
- (DECL IGNORED) => (_);
- (DECL $name:ident) => (ref $name);
- (USE IGNORED $ctx:expr, $hasher:expr) => ({});
- (USE $name:ident, $ctx:expr, $hasher:expr) => ($name.hash_stable($ctx, $hasher));
+ ($field:ident, $ctx:expr, $hasher:expr) => ($field.hash_stable($ctx, $hasher));
+ ($field:ident, $ctx:expr, $hasher:expr, _) => ({ let _ = $field; });
+ ($field:ident, $ctx:expr, $hasher:expr, $delegate:expr) => ($delegate.hash_stable($ctx, $hasher));
}
#[macro_export]
macro_rules! impl_stable_hash_for {
- (enum $enum_name:path { $( $variant:ident $( ( $($arg:ident),* ) )* ),* $(,)* }) => {
+ (enum $enum_name:path { $( $variant:ident $( ( $($field:ident $(-> $delegate:tt)?),* ) )* ),* $(,)? }) => {
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $enum_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
match *self {
$(
- $variant $( ( $( __impl_stable_hash_field!(DECL $arg) ),* ) )* => {
- $($( __impl_stable_hash_field!(USE $arg, __ctx, __hasher) );*)*
+ $variant $( ( $(ref $field),* ) )* => {
+ $($( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*)*
}
)*
}
}
}
};
- (struct $struct_name:path { $($field:ident),* }) => {
+ (struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
$(ref $field),*
} = *self;
- $( $field.hash_stable(__ctx, __hasher));*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
- (tuple_struct $struct_name:path { $($field:ident),* }) => {
+ (tuple_struct $struct_name:path { $($field:ident $(-> $delegate:tt)?),* $(,)? }) => {
impl<'a, 'tcx> ::rustc_data_structures::stable_hasher::HashStable<$crate::ich::StableHashingContext<'a>> for $struct_name {
#[inline]
fn hash_stable<W: ::rustc_data_structures::stable_hasher::StableHasherResult>(&self,
$(ref $field),*
) = *self;
- $( $field.hash_stable(__ctx, __hasher));*
+ $( __impl_stable_hash_field!($field, __ctx, __hasher $(, $delegate)?) );*
}
}
};
+++ /dev/null
-// Copyright 2012-2016 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use hir::def_id::DefId;
-use ty;
-use ty::subst::Substs;
-use ty::query::TyCtxtAt;
-use mir::interpret::ConstValue;
-use errors::DiagnosticBuilder;
-
-use graphviz::IntoCow;
-use syntax_pos::Span;
-use syntax::ast;
-
-use std::borrow::Cow;
-use rustc_data_structures::sync::Lrc;
-
-pub type EvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, ConstEvalErr<'tcx>>;
-
-#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)]
-pub enum ConstVal<'tcx> {
- Unevaluated(DefId, &'tcx Substs<'tcx>),
- Value(ConstValue<'tcx>),
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct ConstEvalErr<'tcx> {
- pub span: Span,
- pub kind: Lrc<ErrKind<'tcx>>,
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub enum ErrKind<'tcx> {
-
- CouldNotResolve,
- TypeckError,
- CheckMatchError,
- Miri(::mir::interpret::EvalError<'tcx>, Vec<FrameInfo>),
-}
-
-#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
-pub struct FrameInfo {
- pub span: Span,
- pub location: String,
- pub lint_root: Option<ast::NodeId>,
-}
-
-#[derive(Clone, Debug)]
-pub enum ConstEvalErrDescription<'a, 'tcx: 'a> {
- Simple(Cow<'a, str>),
- Backtrace(&'a ::mir::interpret::EvalError<'tcx>, &'a [FrameInfo]),
-}
-
-impl<'a, 'tcx> ConstEvalErrDescription<'a, 'tcx> {
- /// Return a one-line description of the error, for lints and such
- pub fn into_oneline(self) -> Cow<'a, str> {
- match self {
- ConstEvalErrDescription::Simple(simple) => simple,
- ConstEvalErrDescription::Backtrace(miri, _) => format!("{}", miri).into_cow(),
- }
- }
-}
-
-impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
- pub fn description(&'a self) -> ConstEvalErrDescription<'a, 'tcx> {
- use self::ErrKind::*;
- use self::ConstEvalErrDescription::*;
-
- macro_rules! simple {
- ($msg:expr) => ({ Simple($msg.into_cow()) });
- ($fmt:expr, $($arg:tt)+) => ({
- Simple(format!($fmt, $($arg)+).into_cow())
- })
- }
-
- match *self.kind {
- CouldNotResolve => simple!("could not resolve"),
- TypeckError => simple!("type-checking failed"),
- CheckMatchError => simple!("match-checking failed"),
- Miri(ref err, ref trace) => Backtrace(err, trace),
- }
- }
-
- pub fn struct_error(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str)
- -> Option<DiagnosticBuilder<'tcx>>
- {
- self.struct_generic(tcx, message, None, true)
- }
-
- pub fn report_as_error(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str
- ) {
- let err = self.struct_generic(tcx, message, None, true);
- if let Some(mut err) = err {
- err.emit();
- }
- }
-
- pub fn report_as_lint(&self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str,
- lint_root: ast::NodeId,
- ) {
- let lint = self.struct_generic(
- tcx,
- message,
- Some(lint_root),
- false,
- );
- if let Some(mut lint) = lint {
- lint.emit();
- }
- }
-
- fn struct_generic(
- &self,
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- message: &str,
- lint_root: Option<ast::NodeId>,
- as_err: bool,
- ) -> Option<DiagnosticBuilder<'tcx>> {
- let (msg, frames): (_, &[_]) = match *self.kind {
- ErrKind::TypeckError | ErrKind::CheckMatchError => return None,
- ErrKind::Miri(ref miri, ref frames) => {
- match miri.kind {
- ::mir::interpret::EvalErrorKind::TypeckError |
- ::mir::interpret::EvalErrorKind::Layout(_) => return None,
- ::mir::interpret::EvalErrorKind::ReferencedConstant(ref inner) => {
- inner.struct_generic(tcx, "referenced constant", lint_root, as_err)?.emit();
- (miri.to_string(), frames)
- },
- _ => (miri.to_string(), frames),
- }
- }
- _ => (self.description().into_oneline().to_string(), &[]),
- };
- trace!("reporting const eval failure at {:?}", self.span);
- let mut err = if as_err {
- struct_error(tcx, message)
- } else {
- let node_id = frames
- .iter()
- .rev()
- .filter_map(|frame| frame.lint_root)
- .next()
- .or(lint_root)
- .expect("some part of a failing const eval must be local");
- tcx.struct_span_lint_node(
- ::rustc::lint::builtin::CONST_ERR,
- node_id,
- tcx.span,
- message,
- )
- };
- err.span_label(self.span, msg);
- for FrameInfo { span, location, .. } in frames {
- err.span_label(*span, format!("inside call to `{}`", location));
- }
- Some(err)
- }
-}
-
-pub fn struct_error<'a, 'gcx, 'tcx>(
- tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
- msg: &str,
-) -> DiagnosticBuilder<'tcx> {
- struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
-}
if !self.symbol_is_live(impl_item.id, None) {
self.warn_dead_code(impl_item.id,
impl_item.span,
- impl_item.name,
+ impl_item.ident.name,
"associated const",
"used");
}
hir::ImplItemKind::Method(_, body_id) => {
if !self.symbol_is_live(impl_item.id, None) {
let span = self.tcx.sess.codemap().def_span(impl_item.span);
- self.warn_dead_code(impl_item.id, span, impl_item.name, "method", "used");
+ self.warn_dead_code(impl_item.id, span, impl_item.ident.name, "method", "used");
}
self.visit_nested_body(body_id)
}
let body = ir.tcx.hir.body(body_id);
for arg in &body.arguments {
- arg.pat.each_binding(|_bm, hir_id, _x, path1| {
+ arg.pat.each_binding(|_bm, hir_id, _x, ident| {
debug!("adding argument {:?}", hir_id);
- let name = path1.node;
- fn_maps.add_variable(Arg(hir_id, name));
+ fn_maps.add_variable(Arg(hir_id, ident.name));
})
};
}
}
- pat.each_binding(|_bm, hir_id, _sp, path1| {
- let name = path1.node;
- ir.add_live_node_for_node(hir_id, VarDefNode(path1.span));
+ pat.each_binding(|_bm, hir_id, _sp, ident| {
+ ir.add_live_node_for_node(hir_id, VarDefNode(ident.span));
ir.add_variable(Local(LocalInfo {
id: hir_id,
- name,
+ name: ident.name,
is_shorthand: shorthand_field_ids.contains(&hir_id)
}));
});
},
None => {
this.pat_bindings(&local.pat, |this, ln, var, sp, id| {
- let span = local.pat.simple_span().unwrap_or(sp);
+ let span = local.pat.simple_ident().map_or(sp, |ident| ident.span);
this.warn_about_unused(span, id, ln, var);
})
}
fn warn_about_unused_args(&self, body: &hir::Body, entry_ln: LiveNode) {
for arg in &body.arguments {
- arg.pat.each_binding(|_bm, hir_id, _, path1| {
- let sp = path1.span;
+ arg.pat.each_binding(|_bm, hir_id, _, ident| {
+ let sp = ident.span;
let var = self.variable(hir_id, sp);
// Ignore unused self.
- let name = path1.node;
- if name != keywords::SelfValue.name() {
+ if ident.name != keywords::SelfValue.name() {
if !self.warn_about_unused(sp, hir_id, entry_ln, var) {
if self.live_on_entry(entry_ln, var).is_none() {
self.report_dead_assign(hir_id, sp, var, true);
// and how it is located, as well as the mutability of the memory in
// which the value is stored.
//
-// *WARNING* The field `cmt.type` is NOT necessarily the same as the
+// *WARNING* The field `cmt.ty` is NOT necessarily the same as the
// result of `node_id_to_type(cmt.id)`. This is because the `id` is
// always the `id` of the node producing the type; in an expression
// like `*x`, the type of this deref node is the deref'd type (`T`),
//! For more information about how MIR-based region-checking works,
//! see the [rustc guide].
//!
-//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir-borrowck.html
+//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir/borrowck.html
use ich::{StableHashingContext, NodeIdHashingMode};
use util::nodemap::{FxHashMap, FxHashSet};
let def_id = hir_map.local_def_id(param.id);
let origin = LifetimeDefOrigin::from_param(param);
debug!("Region::early: index={} def_id={:?}", i, def_id);
- (param.name, Region::EarlyBound(i, def_id, origin))
+ (param.name.modern(), Region::EarlyBound(i, def_id, origin))
}
fn late(hir_map: &Map, param: &GenericParam) -> (ParamName, Region) {
def_id,
origin,
);
- (param.name, Region::LateBound(depth, def_id, origin))
+ (param.name.modern(), Region::LateBound(depth, def_id, origin))
}
fn late_anon(index: &Cell<u32>) -> Region {
is_in_fn_syntax: bool,
/// List of labels in the function/method currently under analysis.
- labels_in_fn: Vec<(ast::Name, Span)>,
+ labels_in_fn: Vec<ast::Ident>,
/// Cache for cross-crate per-definition object lifetime defaults.
xcrate_object_lifetime_defaults: DefIdMap<Vec<ObjectLifetimeDefault>>,
GenericParamKind::Lifetime { .. } => {
let (name, reg) = Region::early(&self.tcx.hir, &mut index, ¶m);
if let hir::ParamName::Plain(param_name) = name {
- if param_name == keywords::UnderscoreLifetime.name() {
+ if param_name.name == keywords::UnderscoreLifetime.name() {
// Pick the elided lifetime "definition" if one exists
// and use it to make an elision scope.
elision = Some(reg);
struct GatherLabels<'a, 'tcx: 'a> {
tcx: TyCtxt<'a, 'tcx, 'tcx>,
scope: ScopeRef<'a>,
- labels_in_fn: &'a mut Vec<(ast::Name, Span)>,
+ labels_in_fn: &'a mut Vec<ast::Ident>,
}
let mut gather = GatherLabels {
}
fn visit_expr(&mut self, ex: &hir::Expr) {
- if let Some((label, label_span)) = expression_label(ex) {
- for &(prior, prior_span) in &self.labels_in_fn[..] {
+ if let Some(label) = expression_label(ex) {
+ for prior_label in &self.labels_in_fn[..] {
// FIXME (#24278): non-hygienic comparison
- if label == prior {
+ if label.name == prior_label.name {
signal_shadowing_problem(
self.tcx,
- label,
- original_label(prior_span),
- shadower_label(label_span),
+ label.name,
+ original_label(prior_label.span),
+ shadower_label(label.span),
);
}
}
- check_if_label_shadows_lifetime(self.tcx, self.scope, label, label_span);
+ check_if_label_shadows_lifetime(self.tcx, self.scope, label);
- self.labels_in_fn.push((label, label_span));
+ self.labels_in_fn.push(label);
}
intravisit::walk_expr(self, ex)
}
}
- fn expression_label(ex: &hir::Expr) -> Option<(ast::Name, Span)> {
+ fn expression_label(ex: &hir::Expr) -> Option<ast::Ident> {
match ex.node {
- hir::ExprWhile(.., Some(label)) | hir::ExprLoop(_, Some(label), _) => {
- Some((label.name, label.span))
- }
+ hir::ExprWhile(.., Some(label)) |
+ hir::ExprLoop(_, Some(label), _) => Some(label.ident),
_ => None,
}
}
fn check_if_label_shadows_lifetime(
tcx: TyCtxt<'_, '_, '_>,
mut scope: ScopeRef<'_>,
- label: ast::Name,
- label_span: Span,
+ label: ast::Ident,
) {
loop {
match *scope {
ref lifetimes, s, ..
} => {
// FIXME (#24278): non-hygienic comparison
- let param_name = hir::ParamName::Plain(label);
- if let Some(def) = lifetimes.get(¶m_name) {
+ if let Some(def) = lifetimes.get(&hir::ParamName::Plain(label.modern())) {
let node_id = tcx.hir.as_local_node_id(def.id().unwrap()).unwrap();
signal_shadowing_problem(
tcx,
- label,
+ label.name,
original_lifetime(tcx.hir.span(node_id)),
- shadower_label(label_span),
+ shadower_label(label.span),
);
return;
}
generics.params.iter().find_map(|param| match param.kind {
GenericParamKind::Lifetime { .. } => {
if i == 0 {
- return Some(param.name.name().to_string());
+ return Some(param.name.ident().to_string());
}
i -= 1;
None
fn add_bounds(set: &mut Set1<hir::LifetimeName>, bounds: &[hir::GenericBound]) {
for bound in bounds {
if let hir::GenericBound::Outlives(ref lifetime) = *bound {
- set.insert(lifetime.name);
+ set.insert(lifetime.name.modern());
}
}
}
debug!("node id first={:?}", node_id);
if let Some((id, span, name)) = match self.tcx.hir.get(node_id) {
hir::map::NodeLifetime(hir_lifetime) => {
- Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.name()))
+ Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.ident()))
}
hir::map::NodeGenericParam(param) => {
- Some((param.id, param.span, param.name.name()))
+ Some((param.id, param.span, param.name.ident()))
}
_ => None,
} {
let node_id = self.tcx.hir.as_local_node_id(def_id).unwrap();
if let Some((id, span, name)) = match self.tcx.hir.get(node_id) {
hir::map::NodeLifetime(hir_lifetime) => {
- Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.name()))
+ Some((hir_lifetime.id, hir_lifetime.span, hir_lifetime.name.ident()))
}
hir::map::NodeGenericParam(param) => {
- Some((param.id, param.span, param.name.name()))
+ Some((param.id, param.span, param.name.ident()))
}
_ => None,
} {
LifetimeName::Param(param_name) => param_name,
_ => bug!("expected LifetimeName::Param"),
};
- if let Some(&def) = lifetimes.get(&name) {
+ if let Some(&def) = lifetimes.get(&name.modern()) {
break Some(def.shifted(late_depth));
} else {
late_depth += 1;
lifetime_ref.span,
E0261,
"use of undeclared lifetime name `{}`",
- lifetime_ref.name.name()
+ lifetime_ref
).span_label(lifetime_ref.span, "undeclared lifetime")
.emit();
}
fn visit_fn_like_elision(
&mut self,
- inputs: &'tcx [P<hir::Ty>],
+ inputs: &'tcx [hir::Ty],
output: Option<&'tcx P<hir::Ty>>,
) {
debug!("visit_fn_like_elision: enter");
}).collect();
for (i, (lifetime_i, lifetime_i_name)) in lifetimes.iter().enumerate() {
if let hir::ParamName::Plain(_) = lifetime_i_name {
- let name = lifetime_i_name.name();
+ let name = lifetime_i_name.ident().name;
if name == keywords::UnderscoreLifetime.name() ||
name == keywords::StaticLifetime.name() {
let mut err = struct_span_err!(
lifetime_i.span,
E0262,
"invalid lifetime parameter name: `{}`",
- name
+ lifetime_i.name.ident(),
);
err.span_label(
lifetime_i.span,
lifetime_j.span,
E0263,
"lifetime name `{}` declared twice in the same scope",
- lifetime_j.name.name()
+ lifetime_j.name.ident()
).span_label(lifetime_j.span, "declared twice")
.span_label(lifetime_i.span, "previous declaration here")
.emit();
lifetime_i.span.to(lt.span),
&format!(
"unnecessary lifetime parameter `{}`",
- lifetime_i.name.name(),
+ lifetime_i.name.ident(),
),
).help(&format!(
"you can use the `'static` lifetime directly, in place \
of `{}`",
- lifetime_i.name.name(),
+ lifetime_i.name.ident(),
)).emit();
}
hir::LifetimeName::Param(_)
mut old_scope: ScopeRef,
param: &'tcx hir::GenericParam,
) {
- for &(label, label_span) in &self.labels_in_fn {
+ for label in &self.labels_in_fn {
// FIXME (#24278): non-hygienic comparison
- if param.name.name() == label {
+ if param.name.ident().name == label.name {
signal_shadowing_problem(
self.tcx,
- label,
- original_label(label_span),
+ label.name,
+ original_label(label.span),
shadower_lifetime(¶m),
);
return;
Scope::Binder {
ref lifetimes, s, ..
} => {
- if let Some(&def) = lifetimes.get(¶m.name) {
+ if let Some(&def) = lifetimes.get(¶m.name.modern()) {
let node_id = self.tcx.hir.as_local_node_id(def.id().unwrap()).unwrap();
signal_shadowing_problem(
self.tcx,
- param.name.name(),
+ param.name.ident().name,
original_lifetime(self.tcx.hir.span(node_id)),
shadower_lifetime(¶m),
);
hir::GenericParamKind::Lifetime { .. } => {
if !param.bounds.is_empty() {
// `'a: 'b` means both `'a` and `'b` are referenced
- appears_in_where_clause.regions.insert(hir::LifetimeName::Param(param.name));
+ appears_in_where_clause
+ .regions.insert(hir::LifetimeName::Param(param.name.modern()));
}
}
hir::GenericParamKind::Type { .. } => {}
// - do not appear in the where-clauses
// - are not implicitly captured by `impl Trait`
for param in &generics.params {
- let lt_name = hir::LifetimeName::Param(param.name);
+ let lt_name = hir::LifetimeName::Param(param.name.modern());
// appears in the where clauses? early-bound.
if appears_in_where_clause.regions.contains(<_name) {
continue;
}
debug!("insert_late_bound_lifetimes: lifetime {:?} with id {:?} is late-bound",
- param.name.name(),
+ param.name.ident(),
param.id);
let inserted = map.late_bound.insert(param.id);
}
fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
- self.regions.insert(lifetime_ref.name);
+ self.regions.insert(lifetime_ref.name.modern());
}
}
}
fn visit_lifetime(&mut self, lifetime_ref: &'v hir::Lifetime) {
- self.regions.insert(lifetime_ref.name);
+ self.regions.insert(lifetime_ref.name.modern());
}
}
}
for impl_item_ref in impl_item_refs {
let impl_item = self.tcx.hir.impl_item(impl_item_ref.id);
let trait_item_def_id = self.tcx.associated_items(trait_did)
- .find(|item| item.name == impl_item.name).map(|item| item.def_id);
+ .find(|item| item.ident.name == impl_item.ident.name)
+ .map(|item| item.def_id);
if let Some(def_id) = trait_item_def_id {
// Pass `None` to skip deprecation warnings.
self.tcx.check_stability(def_id, None, impl_item.span);
use syntax::ast;
use syntax::symbol::Symbol;
use syntax_pos::Span;
+use hir::def_id::DefId;
use hir::intravisit::{Visitor, NestedVisitorMap};
use hir::intravisit;
use hir;
}
}
+impl<'a, 'tcx, 'gcx> TyCtxt<'a, 'tcx, 'gcx> {
+ pub fn is_weak_lang_item(&self, item_def_id: DefId) -> bool {
+ let lang_items = self.lang_items();
+ let did = Some(item_def_id);
+
+ $(lang_items.$name() == did)||+
+ }
+}
+
) }
weak_lang_items! {
use std::{fmt, env};
use mir;
-use middle::const_val::ConstEvalErr;
use ty::{FnSig, Ty, layout};
use ty::layout::{Size, Align};
+use rustc_data_structures::sync::Lrc;
use super::{
Pointer, Lock, AccessKind
use backtrace::Backtrace;
+use ty;
+use ty::query::TyCtxtAt;
+use errors::DiagnosticBuilder;
+
+use syntax_pos::Span;
+use syntax::ast;
+
+pub type ConstEvalResult<'tcx> = Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>>;
+
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct ConstEvalErr<'tcx> {
+ pub span: Span,
+ pub error: ::mir::interpret::EvalError<'tcx>,
+ pub stacktrace: Vec<FrameInfo>,
+}
+
+#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
+pub struct FrameInfo {
+ pub span: Span,
+ pub location: String,
+ pub lint_root: Option<ast::NodeId>,
+}
+
+impl<'a, 'gcx, 'tcx> ConstEvalErr<'tcx> {
+ pub fn struct_error(&self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str)
+ -> Option<DiagnosticBuilder<'tcx>>
+ {
+ self.struct_generic(tcx, message, None)
+ }
+
+ pub fn report_as_error(&self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str
+ ) {
+ let err = self.struct_generic(tcx, message, None);
+ if let Some(mut err) = err {
+ err.emit();
+ }
+ }
+
+ pub fn report_as_lint(&self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str,
+ lint_root: ast::NodeId,
+ ) {
+ let lint = self.struct_generic(
+ tcx,
+ message,
+ Some(lint_root),
+ );
+ if let Some(mut lint) = lint {
+ lint.emit();
+ }
+ }
+
+ fn struct_generic(
+ &self,
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ message: &str,
+ lint_root: Option<ast::NodeId>,
+ ) -> Option<DiagnosticBuilder<'tcx>> {
+ match self.error.kind {
+ ::mir::interpret::EvalErrorKind::TypeckError |
+ ::mir::interpret::EvalErrorKind::TooGeneric |
+ ::mir::interpret::EvalErrorKind::CheckMatchError |
+ ::mir::interpret::EvalErrorKind::Layout(_) => return None,
+ ::mir::interpret::EvalErrorKind::ReferencedConstant(ref inner) => {
+ inner.struct_generic(tcx, "referenced constant has errors", lint_root)?.emit();
+ },
+ _ => {},
+ }
+ trace!("reporting const eval failure at {:?}", self.span);
+ let mut err = if let Some(lint_root) = lint_root {
+ let node_id = self.stacktrace
+ .iter()
+ .rev()
+ .filter_map(|frame| frame.lint_root)
+ .next()
+ .unwrap_or(lint_root);
+ tcx.struct_span_lint_node(
+ ::rustc::lint::builtin::CONST_ERR,
+ node_id,
+ tcx.span,
+ message,
+ )
+ } else {
+ struct_error(tcx, message)
+ };
+ err.span_label(self.span, self.error.to_string());
+ for FrameInfo { span, location, .. } in &self.stacktrace {
+ err.span_label(*span, format!("inside call to `{}`", location));
+ }
+ Some(err)
+ }
+}
+
+pub fn struct_error<'a, 'gcx, 'tcx>(
+ tcx: TyCtxtAt<'a, 'gcx, 'tcx>,
+ msg: &str,
+) -> DiagnosticBuilder<'tcx> {
+ struct_span_err!(tcx.sess, tcx.span, E0080, "{}", msg)
+}
+
#[derive(Debug, Clone, RustcEncodable, RustcDecodable)]
pub struct EvalError<'tcx> {
pub kind: EvalErrorKind<'tcx, u64>,
UnimplementedTraitSelection,
/// Abort in case type errors are reached
TypeckError,
+ /// Resolution can fail if we are in a too generic context
+ TooGeneric,
+ CheckMatchError,
/// Cannot compute this constant because it depends on another one
/// which already produced an error
- ReferencedConstant(ConstEvalErr<'tcx>),
+ ReferencedConstant(Lrc<ConstEvalErr<'tcx>>),
GeneratorResumedAfterReturn,
GeneratorResumedAfterPanic,
}
"there were unresolved type arguments during trait selection",
TypeckError =>
"encountered constants with type errors, stopping evaluation",
+ TooGeneric =>
+ "encountered overly generic constant",
+ CheckMatchError =>
+ "match checking failed",
ReferencedConstant(_) =>
"referenced constant has errors",
Overflow(mir::BinOp::Add) => "attempt to add with overflow",
mod error;
mod value;
-pub use self::error::{EvalError, EvalResult, EvalErrorKind, AssertMessage};
+pub use self::error::{
+ EvalError, EvalResult, EvalErrorKind, AssertMessage, ConstEvalErr, struct_error,
+ FrameInfo, ConstEvalResult,
+};
pub use self::value::{Scalar, Value, ConstValue};
use ty::layout::{Align, HasDataLayout, Size};
use ty;
+use ty::subst::Substs;
+use hir::def_id::DefId;
use super::{EvalResult, Pointer, PointerArithmetic, Allocation};
/// Represents a constant value in Rust. ByVal and ScalarPair are optimizations which
/// matches Value's optimizations for easy conversions between these two types
-#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
pub enum ConstValue<'tcx> {
+ /// Never returned from the `const_eval` query, but the HIR contains these frequently in order
+ /// to allow HIR creation to happen for everything before needing to be able to run constant
+ /// evaluation
+ Unevaluated(DefId, &'tcx Substs<'tcx>),
/// Used only for types with layout::abi::Scalar ABI and ZSTs which use Scalar::undef()
Scalar(Scalar),
/// Used only for types with layout::abi::ScalarPair
#[inline]
pub fn to_byval_value(&self) -> Option<Value> {
match *self {
+ ConstValue::Unevaluated(..) |
ConstValue::ByRef(..) => None,
ConstValue::ScalarPair(a, b) => Some(Value::ScalarPair(a, b)),
ConstValue::Scalar(val) => Some(Value::Scalar(val)),
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
match *self {
- ConstValue::ByRef(..) => None,
+ ConstValue::Unevaluated(..) |
+ ConstValue::ByRef(..) |
ConstValue::ScalarPair(..) => None,
ConstValue::Scalar(val) => Some(val),
}
//! MIR datatypes and passes. See the [rustc guide] for more info.
//!
-//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir.html
+//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir/index.html
use graphviz::IntoCow;
use hir::def::CtorKind;
}
}
-/// Write a `ConstVal` in a way closer to the original source code than the `Debug` output.
+/// Write a `ConstValue` in a way closer to the original source code than the `Debug` output.
pub fn fmt_const_val<W: Write>(fmt: &mut W, const_val: &ty::Const) -> fmt::Result {
- use middle::const_val::ConstVal;
- match const_val.val {
- ConstVal::Unevaluated(..) => write!(fmt, "{:?}", const_val),
- ConstVal::Value(val) => {
- if let Some(value) = val.to_byval_value() {
- print_miri_value(value, const_val.ty, fmt)
- } else {
- write!(fmt, "{:?}:{}", val, const_val.ty)
- }
- }
+ if let Some(value) = const_val.to_byval_value() {
+ print_miri_value(value, const_val.ty, fmt)
+ } else {
+ write!(fmt, "{:?}:{}", const_val.val, const_val.ty)
}
}
//! See rustc guide chapters on [trait-resolution] and [trait-specialization] for more info on how
//! this works.
//!
-//! [trait-resolution]: https://rust-lang-nursery.github.io/rustc-guide/trait-resolution.html
-//! [trait-specialization]: https://rust-lang-nursery.github.io/rustc-guide/trait-specialization.html
+//! [trait-resolution]: https://rust-lang-nursery.github.io/rustc-guide/traits/resolution.html
+//! [trait-specialization]: https://rust-lang-nursery.github.io/rustc-guide/traits/specialization.html
use hir::def_id::{DefId, LOCAL_CRATE};
use syntax_pos::DUMMY_SP;
use super::{ObligationCause, PredicateObligation};
pub trait TraitEngine<'tcx>: 'tcx {
- fn normalize_projection_type<'a, 'gcx>(
+ fn normalize_projection_type(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
projection_ty: ty::ProjectionTy<'tcx>,
cause: ObligationCause<'tcx>,
) -> Ty<'tcx>;
- fn register_bound<'a, 'gcx>(
+ fn register_bound(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
ty: Ty<'tcx>,
def_id: DefId,
cause: ObligationCause<'tcx>,
);
- fn register_predicate_obligation<'a, 'gcx>(
+ fn register_predicate_obligation(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
obligation: PredicateObligation<'tcx>,
);
- fn select_all_or_error<'a, 'gcx>(
+ fn select_all_or_error(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>>;
- fn select_where_possible<'a, 'gcx>(
+ fn select_where_possible(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
) -> Result<(), Vec<FulfillmentError<'tcx>>>;
fn pending_obligations(&self) -> Vec<PredicateObligation<'tcx>>;
}
-impl<'a, 'gcx, 'tcx> dyn TraitEngine<'tcx> {
- pub fn new(_tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
- Box::new(FulfillmentContext::new())
- }
+pub trait TraitEngineExt<'tcx> {
+ fn register_predicate_obligations(
+ &mut self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
+ );
+}
- pub fn register_predicate_obligations<I>(
+impl<T: ?Sized + TraitEngine<'tcx>> TraitEngineExt<'tcx> for T {
+ fn register_predicate_obligations(
&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- obligations: I,
- ) where
- I: IntoIterator<Item = PredicateObligation<'tcx>>,
- {
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ obligations: impl IntoIterator<Item = PredicateObligation<'tcx>>,
+ ) {
for obligation in obligations {
self.register_predicate_obligation(infcx, obligation);
}
}
}
+
+impl dyn TraitEngine<'tcx> {
+ pub fn new(_tcx: TyCtxt<'_, '_, 'tcx>) -> Box<Self> {
+ Box::new(FulfillmentContext::new())
+ }
+}
..
}) => {
(self.tcx.sess.codemap().def_span(span), decl.inputs.iter()
- .map(|arg| match arg.clone().into_inner().node {
+ .map(|arg| match arg.clone().node {
hir::TyTup(ref tys) => ArgKind::Tuple(
Some(arg.span),
tys.iter()
use rustc_data_structures::obligation_forest::{ObligationProcessor, ProcessResult};
use std::marker::PhantomData;
use hir::def_id::DefId;
-use middle::const_val::{ConstEvalErr, ErrKind};
+use mir::interpret::ConstEvalErr;
+use mir::interpret::EvalErrorKind;
use super::CodeAmbiguity;
use super::CodeProjectionError;
use super::CodeSelectionError;
-use super::engine::TraitEngine;
+use super::engine::{TraitEngine, TraitEngineExt};
use super::{FulfillmentError, FulfillmentErrorCode};
use super::{ObligationCause, PredicateObligation, Obligation};
use super::project;
}
}
- pub fn register_predicate_obligations<I>(&mut self,
- infcx: &InferCtxt<'a, 'gcx, 'tcx>,
- obligations: I)
- where I: IntoIterator<Item = PredicateObligation<'tcx>>
- {
- for obligation in obligations {
- self.register_predicate_obligation(infcx, obligation);
- }
- }
-
/// Attempts to select obligations using `selcx`. If `only_new_obligations` is true, then it
/// only attempts to select obligations that haven't been seen before.
fn select(&mut self, selcx: &mut SelectionContext<'a, 'gcx, 'tcx>)
ProcessResult::Error(
CodeSelectionError(ConstEvalFailure(ConstEvalErr {
span: obligation.cause.span,
- kind: ErrKind::CouldNotResolve.into(),
- }))
+ error: EvalErrorKind::TooGeneric.into(),
+ stacktrace: vec![],
+ }.into()))
)
}
},
//! Trait Resolution. See [rustc guide] for more info on how this works.
//!
-//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-resolution.html
+//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/traits/resolution.html
pub use self::SelectionError::*;
pub use self::FulfillmentErrorCode::*;
use hir::def_id::DefId;
use infer::outlives::env::OutlivesEnvironment;
use middle::region;
-use middle::const_val::ConstEvalErr;
+use mir::interpret::ConstEvalErr;
use ty::subst::Substs;
use ty::{self, AdtKind, Slice, Ty, TyCtxt, GenericParamDefKind, ToPredicate};
use ty::error::{ExpectedFound, TypeError};
use ty::fold::{TypeFolder, TypeFoldable, TypeVisitor};
-use infer::canonical::{Canonical, Canonicalize};
use infer::{InferCtxt};
use rustc_data_structures::sync::Lrc;
pub use self::select::{EvaluationResult, IntercrateAmbiguityCause, OverflowError};
pub use self::specialize::{OverlapError, specialization_graph, translate_substs};
pub use self::specialize::{SpecializesCache, find_associated_item};
-pub use self::engine::TraitEngine;
+pub use self::engine::{TraitEngine, TraitEngineExt};
pub use self::util::elaborate_predicates;
pub use self::util::supertraits;
pub use self::util::Supertraits;
ty::PolyTraitRef<'tcx>,
ty::error::TypeError<'tcx>),
TraitNotObjectSafe(DefId),
- ConstEvalFailure(ConstEvalErr<'tcx>),
+ ConstEvalFailure(Lrc<ConstEvalErr<'tcx>>),
Overflow,
}
};
}
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, Goal<'tcx>> {
- // we ought to intern this, but I'm too lazy just now
- type Canonicalized = Canonical<'gcx, ty::ParamEnvAnd<'gcx, Goal<'gcx>>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
-
pub trait ExClauseFold<'tcx>
where
Self: chalk_engine::context::Context + Clone,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
) -> Option<Self::LiftedExClause>;
}
-
-impl<'gcx: 'tcx, 'tcx, C> Canonicalize<'gcx, 'tcx> for chalk_engine::ExClause<C>
-where
- C: chalk_engine::context::Context + Clone,
- C: ExClauseLift<'gcx> + ExClauseFold<'tcx>,
- C::Substitution: Clone,
- C::RegionConstraint: Clone,
-{
- type Canonicalized = Canonical<'gcx, C::LiftedExClause>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
use super::elaborate_predicates;
use hir::def_id::DefId;
+use lint;
use traits;
use ty::{self, Ty, TyCtxt, TypeFoldable};
use ty::subst::Substs;
use ty::util::ExplicitSelf;
use std::borrow::Cow;
use syntax::ast;
+use syntax_pos::Span;
-#[derive(Clone, Debug, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum ObjectSafetyViolation {
/// Self : Sized declared on the trait
SizedSelf,
ObjectSafetyViolation::Method(name, MethodViolationCode::ReferencesSelf) =>
format!("method `{}` references the `Self` type \
in its arguments or return type", name).into(),
+ ObjectSafetyViolation::Method(name,
+ MethodViolationCode::WhereClauseReferencesSelf(_)) =>
+ format!("method `{}` references the `Self` type in where clauses", name).into(),
ObjectSafetyViolation::Method(name, MethodViolationCode::Generic) =>
format!("method `{}` has generic type parameters", name).into(),
ObjectSafetyViolation::Method(name, MethodViolationCode::NonStandardSelfType) =>
/// e.g., `fn foo(&self, x: Self)` or `fn foo(&self) -> Self`
ReferencesSelf,
+ /// e.g. `fn foo(&self) where Self: Clone`
+ WhereClauseReferencesSelf(Span),
+
/// e.g., `fn foo<A>()`
Generic,
.filter(|item| item.kind == ty::AssociatedKind::Method)
.filter_map(|item| {
self.object_safety_violation_for_method(trait_def_id, &item)
- .map(|code| ObjectSafetyViolation::Method(item.name, code))
+ .map(|code| ObjectSafetyViolation::Method(item.ident.name, code))
+ }).filter(|violation| {
+ if let ObjectSafetyViolation::Method(_,
+ MethodViolationCode::WhereClauseReferencesSelf(span)) = violation {
+ // Using`CRATE_NODE_ID` is wrong, but it's hard to get a more precise id.
+ // It's also hard to get a use site span, so we use the method definition span.
+ self.lint_node_note(
+ lint::builtin::WHERE_CLAUSES_OBJECT_SAFETY,
+ ast::CRATE_NODE_ID,
+ *span,
+ &format!("the trait `{}` cannot be made into an object",
+ self.item_path_str(trait_def_id)),
+ &violation.error_msg());
+ false
+ } else {
+ true
+ }
}).collect();
// Check the trait itself.
violations.extend(self.associated_items(trait_def_id)
.filter(|item| item.kind == ty::AssociatedKind::Const)
- .map(|item| ObjectSafetyViolation::AssociatedConst(item.name)));
+ .map(|item| ObjectSafetyViolation::AssociatedConst(item.ident.name)));
debug!("object_safety_violations_for_trait(trait_def_id={:?}) = {:?}",
trait_def_id,
return false;
}
- self.virtual_call_violation_for_method(trait_def_id, method).is_none()
+ match self.virtual_call_violation_for_method(trait_def_id, method) {
+ None | Some(MethodViolationCode::WhereClauseReferencesSelf(_)) => true,
+ Some(_) => false,
+ }
}
/// Returns `Some(_)` if this method cannot be called on a trait
return Some(MethodViolationCode::Generic);
}
+ if self.predicates_of(method.def_id).predicates.into_iter()
+ // A trait object can't claim to live more than the concrete type,
+ // so outlives predicates will always hold.
+ .filter(|p| p.to_opt_type_outlives().is_none())
+ .collect::<Vec<_>>()
+ // Do a shallow visit so that `contains_illegal_self_type_reference`
+ // may apply it's custom visiting.
+ .visit_tys_shallow(|t| self.contains_illegal_self_type_reference(trait_def_id, t)) {
+ let span = self.def_span(method.def_id);
+ return Some(MethodViolationCode::WhereClauseReferencesSelf(span));
+ }
+
None
}
use hir::def_id::DefId;
use infer::{InferCtxt, InferOk};
use infer::type_variable::TypeVariableOrigin;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use mir::interpret::{GlobalId};
use rustc_data_structures::snapshot_map::{Snapshot, SnapshotMap};
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use ty::subst::{Subst, Substs};
use ty::{self, ToPredicate, ToPolyTraitRef, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder};
}
fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
- if let ConstVal::Unevaluated(def_id, substs) = constant.val {
+ if let ConstValue::Unevaluated(def_id, substs) = constant.val {
let tcx = self.selcx.tcx().global_tcx();
if let Some(param_env) = self.tcx().lift_to_global(&self.param_env) {
if substs.needs_infer() || substs.has_skol() {
obligation.predicate.self_ty(),
gen_sig)
.map_bound(|(trait_ref, yield_ty, return_ty)| {
- let name = tcx.associated_item(obligation.predicate.item_def_id).name;
- let ty = if name == Symbol::intern("Return") {
+ let name = tcx.associated_item(obligation.predicate.item_def_id).ident.name;
+ let ty = if name == "Return" {
return_ty
- } else if name == Symbol::intern("Yield") {
+ } else if name == "Yield" {
yield_ty
} else {
bug!()
projection_ty: ty::ProjectionTy::from_ref_and_name(
tcx,
trait_ref,
- Symbol::intern(FN_OUTPUT_NAME),
+ Ident::from_str(FN_OUTPUT_NAME),
),
ty: ret_type
}
// checker method `check_impl_items_against_trait`, so here we
// just return TyError.
debug!("confirm_impl_candidate: no associated type {:?} for {:?}",
- assoc_ty.item.name,
+ assoc_ty.item.ident,
obligation.predicate);
tcx.types.err
} else {
-> specialization_graph::NodeItem<ty::AssociatedItem>
{
let tcx = selcx.tcx();
- let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).name;
+ let assoc_ty_name = tcx.associated_item(assoc_ty_def_id).ident;
let trait_def_id = tcx.impl_trait_ref(impl_def_id).unwrap().def_id;
let trait_def = tcx.trait_def(trait_def_id);
let impl_node = specialization_graph::Node::Impl(impl_def_id);
for item in impl_node.items(tcx) {
if item.kind == ty::AssociatedKind::Type &&
- tcx.hygienic_eq(item.name, assoc_ty_name, trait_def_id) {
+ tcx.hygienic_eq(item.ident, assoc_ty_name, trait_def_id) {
return specialization_graph::NodeItem {
node: specialization_graph::Node::Impl(impl_def_id),
item,
// except according to those terms.
use infer::at::At;
-use infer::canonical::{Canonical, Canonicalize, QueryResult};
use infer::InferOk;
use std::iter::FromIterator;
-use traits::query::CanonicalTyGoal;
-use ty::{self, Ty, TyCtxt};
+use syntax::codemap::Span;
use ty::subst::Kind;
-use rustc_data_structures::sync::Lrc;
+use ty::{self, Ty, TyCtxt};
impl<'cx, 'gcx, 'tcx> At<'cx, 'gcx, 'tcx> {
/// Given a type `ty` of some value being dropped, computes a set
// any destructor.
let tcx = self.infcx.tcx;
if trivial_dropck_outlives(tcx, ty) {
- return InferOk { value: vec![], obligations: vec![] };
+ return InferOk {
+ value: vec![],
+ obligations: vec![],
+ };
}
let gcx = tcx.global_tcx();
debug!("c_ty = {:?}", c_ty);
match &gcx.dropck_outlives(c_ty) {
Ok(result) if result.is_proven() => {
- match self.infcx.instantiate_query_result(
+ match self.infcx.instantiate_query_result_and_region_obligations(
self.cause,
self.param_env,
&orig_values,
result,
) {
- Ok(InferOk {
- value: DropckOutlivesResult { kinds, overflows },
- obligations,
- }) => {
- for overflow_ty in overflows.into_iter().take(1) {
- let mut err = struct_span_err!(
- tcx.sess,
- span,
- E0320,
- "overflow while adding drop-check rules for {}",
- self.infcx.resolve_type_vars_if_possible(&ty),
- );
- err.note(&format!("overflowed on {}", overflow_ty));
- err.emit();
- }
-
+ Ok(InferOk { value, obligations }) => {
+ let ty = self.infcx.resolve_type_vars_if_possible(&ty);
+ let kinds = value.into_kinds_reporting_overflows(tcx, span, ty);
return InferOk {
value: kinds,
obligations,
}
}
-#[derive(Clone, Debug)]
+#[derive(Clone, Debug, Default)]
pub struct DropckOutlivesResult<'tcx> {
pub kinds: Vec<Kind<'tcx>>,
pub overflows: Vec<Ty<'tcx>>,
}
+impl<'tcx> DropckOutlivesResult<'tcx> {
+ pub fn report_overflows(
+ &self,
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ span: Span,
+ ty: Ty<'tcx>,
+ ) {
+ for overflow_ty in self.overflows.iter().take(1) {
+ let mut err = struct_span_err!(
+ tcx.sess,
+ span,
+ E0320,
+ "overflow while adding drop-check rules for {}",
+ ty,
+ );
+ err.note(&format!("overflowed on {}", overflow_ty));
+ err.emit();
+ }
+ }
+
+ pub fn into_kinds_reporting_overflows(
+ self,
+ tcx: TyCtxt<'_, '_, 'tcx>,
+ span: Span,
+ ty: Ty<'tcx>,
+ ) -> Vec<Kind<'tcx>> {
+ self.report_overflows(tcx, span, ty);
+ let DropckOutlivesResult { kinds, overflows: _ } = self;
+ kinds
+ }
+}
+
/// A set of constraints that need to be satisfied in order for
/// a type to be valid for destruction.
#[derive(Clone, Debug)]
result
}
}
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, Ty<'tcx>> {
- type Canonicalized = CanonicalTyGoal<'gcx>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
-
BraceStructTypeFoldableImpl! {
impl<'tcx> TypeFoldable<'tcx> for DropckOutlivesResult<'tcx> {
kinds, overflows
kinds, overflows
});
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for QueryResult<'tcx, DropckOutlivesResult<'tcx>> {
- // we ought to intern this, but I'm too lazy just now
- type Canonicalized = Lrc<Canonical<'gcx, QueryResult<'gcx, DropckOutlivesResult<'gcx>>>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- Lrc::new(value)
- }
-}
-
impl_stable_hash_for!(struct DtorckConstraint<'tcx> {
outlives,
dtorck_types,
///
/// Note also that `needs_drop` requires a "global" type (i.e., one
/// with erased regions), but this funtcion does not.
-fn trivial_dropck_outlives<'cx, 'tcx>(tcx: TyCtxt<'cx, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
+pub fn trivial_dropck_outlives<'tcx>(tcx: TyCtxt<'_, '_, 'tcx>, ty: Ty<'tcx>) -> bool {
match ty.sty {
// None of these types have a destructor and hence they do not
// require anything in particular to outlive the dtor's
// except according to those terms.
use infer::InferCtxt;
-use infer::canonical::{Canonical, Canonicalize};
use traits::{EvaluationResult, PredicateObligation, SelectionContext,
TraitQueryMode, OverflowError};
-use traits::query::CanonicalPredicateGoal;
-use ty::{ParamEnvAnd, Predicate, TyCtxt};
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
/// Evaluates whether the predicate can be satisfied (by any means)
}
}
}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ParamEnvAnd<'tcx, Predicate<'tcx>> {
- type Canonicalized = CanonicalPredicateGoal<'gcx>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
//! `librustc_traits`.
use infer::canonical::Canonical;
+use ty::error::TypeError;
use ty::{self, Ty};
pub mod dropck_outlives;
pub mod evaluate_obligation;
pub mod normalize;
pub mod normalize_erasing_regions;
+pub mod type_op;
pub type CanonicalProjectionGoal<'tcx> =
Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::ProjectionTy<'tcx>>>;
pub type CanonicalPredicateGoal<'tcx> =
Canonical<'tcx, ty::ParamEnvAnd<'tcx, ty::Predicate<'tcx>>>;
+pub type CanonicalTypeOpEqGoal<'tcx> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::eq::Eq<'tcx>>>;
+
+pub type CanonicalTypeOpSubtypeGoal<'tcx> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::subtype::Subtype<'tcx>>>;
+
+pub type CanonicalTypeOpProvePredicateGoal<'tcx> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::prove_predicate::ProvePredicate<'tcx>>>;
+
+pub type CanonicalTypeOpNormalizeGoal<'tcx, T> =
+ Canonical<'tcx, ty::ParamEnvAnd<'tcx, type_op::normalize::Normalize<T>>>;
+
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub struct NoSolution;
pub type Fallible<T> = Result<T, NoSolution>;
+impl<'tcx> From<TypeError<'tcx>> for NoSolution {
+ fn from(_: TypeError<'tcx>) -> NoSolution {
+ NoSolution
+ }
+}
+
impl_stable_hash_for!(struct NoSolution { });
use infer::{InferCtxt, InferOk};
use infer::at::At;
-use infer::canonical::{Canonical, Canonicalize, QueryResult};
-use middle::const_val::ConstVal;
-use mir::interpret::GlobalId;
-use rustc_data_structures::sync::Lrc;
+use mir::interpret::{GlobalId, ConstValue};
use traits::{Obligation, ObligationCause, PredicateObligation, Reveal};
-use traits::query::CanonicalProjectionGoal;
use traits::project::Normalized;
use ty::{self, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder};
let concrete_ty = generic_ty.subst(self.tcx(), substs);
self.anon_depth += 1;
if concrete_ty == ty {
- println!("generic_ty: {:#?}", generic_ty);
- println!("substs {:#?}", substs);
+ bug!("infinite recursion generic_ty: {:#?}, substs: {:#?}, \
+ concrete_ty: {:#?}, ty: {:#?}", generic_ty, substs, concrete_ty,
+ ty);
}
- assert_ne!(concrete_ty, ty, "infinite recursion");
let folded_ty = self.fold_ty(concrete_ty);
self.anon_depth -= 1;
folded_ty
return ty;
}
- match self.infcx.instantiate_query_result(
+ match self.infcx.instantiate_query_result_and_region_obligations(
self.cause,
self.param_env,
&orig_values,
}
fn fold_const(&mut self, constant: &'tcx ty::Const<'tcx>) -> &'tcx ty::Const<'tcx> {
- if let ConstVal::Unevaluated(def_id, substs) = constant.val {
+ if let ConstValue::Unevaluated(def_id, substs) = constant.val {
let tcx = self.infcx.tcx.global_tcx();
if let Some(param_env) = self.tcx().lift_to_global(&self.param_env) {
if substs.needs_infer() || substs.has_skol() {
}
}
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ty::ParamEnvAnd<'tcx, ty::ProjectionTy<'tcx>> {
- type Canonicalized = CanonicalProjectionGoal<'gcx>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- value
- }
-}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for QueryResult<'tcx, NormalizationResult<'tcx>> {
- // we ought to intern this, but I'm too lazy just now
- type Canonicalized = Lrc<Canonical<'gcx, QueryResult<'gcx, NormalizationResult<'gcx>>>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>,
- ) -> Self::Canonicalized {
- Lrc::new(value)
- }
-}
-
impl_stable_hash_for!(struct NormalizationResult<'tcx> {
normalized_ty
});
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::{InferCtxt, InferOk};
+use std::fmt;
+use traits::query::Fallible;
+
+use infer::canonical::query_result;
+use infer::canonical::QueryRegionConstraint;
+use std::rc::Rc;
+use syntax::codemap::DUMMY_SP;
+use traits::{ObligationCause, TraitEngine, TraitEngineExt};
+
+pub struct CustomTypeOp<F, G> {
+ closure: F,
+ description: G,
+}
+
+impl<F, G> CustomTypeOp<F, G> {
+ pub fn new<'gcx, 'tcx, R>(closure: F, description: G) -> Self
+ where
+ F: FnOnce(&InferCtxt<'_, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+ G: Fn() -> String,
+ {
+ CustomTypeOp {
+ closure,
+ description,
+ }
+ }
+}
+
+impl<'gcx, 'tcx, F, R, G> super::TypeOp<'gcx, 'tcx> for CustomTypeOp<F, G>
+where
+ F: for<'a, 'cx> FnOnce(&'a InferCtxt<'cx, 'gcx, 'tcx>) -> Fallible<InferOk<'tcx, R>>,
+ G: Fn() -> String,
+{
+ type Output = R;
+
+ /// Processes the operation and all resulting obligations,
+ /// returning the final result along with any region constraints
+ /// (they will be given over to the NLL region solver).
+ fn fully_perform(
+ self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+ if cfg!(debug_assertions) {
+ info!("fully_perform({:?})", self);
+ }
+
+ scrape_region_constraints(infcx, || Ok((self.closure)(infcx)?))
+ }
+}
+
+impl<F, G> fmt::Debug for CustomTypeOp<F, G>
+where
+ G: Fn() -> String,
+{
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ write!(f, "{}", (self.description)())
+ }
+}
+
+/// Executes `op` and then scrapes out all the "old style" region
+/// constraints that result, creating query-region-constraints.
+fn scrape_region_constraints<'gcx, 'tcx, R>(
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ op: impl FnOnce() -> Fallible<InferOk<'tcx, R>>,
+) -> Fallible<(R, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+ let mut fulfill_cx = TraitEngine::new(infcx.tcx);
+ let dummy_body_id = ObligationCause::dummy().body_id;
+ let InferOk { value, obligations } = infcx.commit_if_ok(|_| op())?;
+ debug_assert!(obligations.iter().all(|o| o.cause.body_id == dummy_body_id));
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+ if let Err(e) = fulfill_cx.select_all_or_error(infcx) {
+ infcx.tcx.sess.diagnostic().delay_span_bug(
+ DUMMY_SP,
+ &format!("errors selecting obligation during MIR typeck: {:?}", e),
+ );
+ }
+
+ let region_obligations = infcx.take_registered_region_obligations();
+
+ let region_constraint_data = infcx.take_and_reset_region_constraints();
+
+ let outlives =
+ query_result::make_query_outlives(infcx.tcx, region_obligations, ®ion_constraint_data);
+
+ if outlives.is_empty() {
+ Ok((value, None))
+ } else {
+ Ok((value, Some(Rc::new(outlives))))
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Eq<'tcx> {
+ pub a: Ty<'tcx>,
+ pub b: Ty<'tcx>,
+}
+
+impl<'tcx> Eq<'tcx> {
+ pub fn new(a: Ty<'tcx>, b: Ty<'tcx>) -> Self {
+ Self { a, b }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Eq<'tcx> {
+ type QueryResult = ();
+
+ fn try_fast_path(
+ _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ key: &ParamEnvAnd<'tcx, Eq<'tcx>>,
+ ) -> Option<Self::QueryResult> {
+ if key.value.a == key.value.b {
+ Some(())
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ tcx.type_op_eq(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for Eq<'tcx> {
+ a,
+ b,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for Eq<'a> {
+ type Lifted = Eq<'tcx>;
+ a,
+ b,
+ }
+}
+
+impl_stable_hash_for! {
+ struct Eq<'tcx> { a, b }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryRegionConstraint,
+ QueryResult};
+use infer::{InferCtxt, InferOk};
+use std::fmt;
+use std::rc::Rc;
+use traits::query::Fallible;
+use traits::ObligationCause;
+use ty::fold::TypeFoldable;
+use ty::{Lift, ParamEnvAnd, TyCtxt};
+
+pub mod custom;
+pub mod eq;
+pub mod normalize;
+pub mod outlives;
+pub mod prove_predicate;
+use self::prove_predicate::ProvePredicate;
+pub mod subtype;
+
+/// "Type ops" are used in NLL to perform some particular action and
+/// extract out the resulting region constraints (or an error if it
+/// cannot be completed).
+pub trait TypeOp<'gcx, 'tcx>: Sized + fmt::Debug {
+ type Output;
+
+ /// Processes the operation and all resulting obligations,
+ /// returning the final result along with any region constraints
+ /// (they will be given over to the NLL region solver).
+ fn fully_perform(
+ self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)>;
+}
+
+/// "Query type ops" are type ops that are implemented using a
+/// [canonical query][c]. The `Self` type here contains the kernel of
+/// information needed to do the operation -- `TypeOp` is actually
+/// implemented for `ParamEnvAnd<Self>`, since we always need to bring
+/// along a parameter environment as well. For query type-ops, we will
+/// first canonicalize the key and then invoke the query on the tcx,
+/// which produces the resulting query region constraints.
+///
+/// [c]: https://rust-lang-nursery.github.io/rustc-guide/traits/canonicalization.html
+pub trait QueryTypeOp<'gcx: 'tcx, 'tcx>:
+ fmt::Debug + Sized + TypeFoldable<'tcx> + Lift<'gcx>
+{
+ type QueryResult: TypeFoldable<'tcx> + Lift<'gcx>;
+
+ /// Give query the option for a simple fast path that never
+ /// actually hits the tcx cache lookup etc. Return `Some(r)` with
+ /// a final result or `None` to do the full path.
+ fn try_fast_path(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ key: &ParamEnvAnd<'tcx, Self>,
+ ) -> Option<Self::QueryResult>;
+
+ /// Performs the actual query with the canonicalized key -- the
+ /// real work happens here. This method is not given an `infcx`
+ /// because it shouldn't need one -- and if it had access to one,
+ /// it might do things like invoke `sub_regions`, which would be
+ /// bad, because it would create subregion relationships that are
+ /// not captured in the return value.
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>>;
+
+ /// Casts a lifted query result (which is in the gcx lifetime)
+ /// into the tcx lifetime. This is always just an identity cast,
+ /// but the generic code doesn't realize it -- put another way, in
+ /// the generic code, we have a `Lifted<'gcx, Self::QueryResult>`
+ /// and we want to convert that to a `Self::QueryResult`. This is
+ /// not a priori valid, so we can't do it -- but in practice, it
+ /// is always a no-op (e.g., the lifted form of a type,
+ /// `Ty<'gcx>`, is a subtype of `Ty<'tcx>`). So we have to push
+ /// the operation into the impls that know more specifically what
+ /// `QueryResult` is. This operation would (maybe) be nicer with
+ /// something like HKTs or GATs, since then we could make
+ /// `QueryResult` parametric and `'gcx` and `'tcx` etc.
+ fn shrink_to_tcx_lifetime(
+ lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>>;
+
+ fn fully_perform_into(
+ query_key: ParamEnvAnd<'tcx, Self>,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ output_query_region_constraints: &mut Vec<QueryRegionConstraint<'tcx>>,
+ ) -> Fallible<Self::QueryResult> {
+ if let Some(result) = QueryTypeOp::try_fast_path(infcx.tcx, &query_key) {
+ return Ok(result);
+ }
+
+ // FIXME(#33684) -- We need to use
+ // `canonicalize_hr_query_hack` here because of things
+ // like the subtype query, which go awry around
+ // `'static` otherwise.
+ let (canonical_self, canonical_var_values) = infcx.canonicalize_hr_query_hack(&query_key);
+ let canonical_result = Self::perform_query(infcx.tcx, canonical_self)?;
+ let canonical_result = Self::shrink_to_tcx_lifetime(&canonical_result);
+
+ let param_env = query_key.param_env;
+
+ let InferOk { value, obligations } = infcx
+ .instantiate_nll_query_result_and_region_obligations(
+ &ObligationCause::dummy(),
+ param_env,
+ &canonical_var_values,
+ canonical_result,
+ output_query_region_constraints,
+ )?;
+
+ // Typically, instantiating NLL query results does not
+ // create obligations. However, in some cases there
+ // are unresolved type variables, and unify them *can*
+ // create obligations. In that case, we have to go
+ // fulfill them. We do this via a (recursive) query.
+ for obligation in obligations {
+ let () = ProvePredicate::fully_perform_into(
+ obligation
+ .param_env
+ .and(ProvePredicate::new(obligation.predicate)),
+ infcx,
+ output_query_region_constraints,
+ )?;
+ }
+
+ Ok(value)
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx, Q> TypeOp<'gcx, 'tcx> for ParamEnvAnd<'tcx, Q>
+where
+ Q: QueryTypeOp<'gcx, 'tcx>,
+{
+ type Output = Q::QueryResult;
+
+ fn fully_perform(
+ self,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ ) -> Fallible<(Self::Output, Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>)> {
+ let mut qrc = vec![];
+ let r = Q::fully_perform_into(self, infcx, &mut qrc)?;
+
+ // Promote the final query-region-constraints into a
+ // (optional) ref-counted vector:
+ let opt_qrc = if qrc.is_empty() {
+ None
+ } else {
+ Some(Rc::new(qrc))
+ };
+
+ Ok((r, opt_qrc))
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use std::fmt;
+use traits::query::Fallible;
+use ty::fold::TypeFoldable;
+use ty::{self, Lift, ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Normalize<T> {
+ pub value: T,
+}
+
+impl<'tcx, T> Normalize<T>
+where
+ T: fmt::Debug + TypeFoldable<'tcx>,
+{
+ pub fn new(value: T) -> Self {
+ Self { value }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx, T> super::QueryTypeOp<'gcx, 'tcx> for Normalize<T>
+where
+ T: Normalizable<'gcx, 'tcx>,
+{
+ type QueryResult = T;
+
+ fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<T> {
+ if !key.value.value.has_projections() {
+ Some(key.value.value)
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ T::type_op_method(tcx, canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, T>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, T>> {
+ T::shrink_to_tcx_lifetime(v)
+ }
+}
+
+pub trait Normalizable<'gcx, 'tcx>: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx> + Copy {
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>>;
+
+ /// Convert from the `'gcx` (lifted) form of `Self` into the `tcx`
+ /// form of `Self`.
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>>;
+}
+
+impl Normalizable<'gcx, 'tcx> for Ty<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_ty(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::Predicate<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_predicate(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::PolyFnSig<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_poly_fn_sig(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+impl Normalizable<'gcx, 'tcx> for ty::FnSig<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ fn type_op_method(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self>> {
+ tcx.type_op_normalize_fn_sig(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, Self>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx, T> TypeFoldable<'tcx> for Normalize<T> {
+ value,
+ } where T: TypeFoldable<'tcx>,
+}
+
+BraceStructLiftImpl! {
+ impl<'tcx, T> Lift<'tcx> for Normalize<T> {
+ type Lifted = Normalize<T::Lifted>;
+ value,
+ } where T: Lift<'tcx>,
+}
+
+impl_stable_hash_for! {
+ impl<'tcx, T> for struct Normalize<T> {
+ value
+ }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::dropck_outlives::trivial_dropck_outlives;
+use traits::query::dropck_outlives::DropckOutlivesResult;
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug)]
+pub struct DropckOutlives<'tcx> {
+ dropped_ty: Ty<'tcx>,
+}
+
+impl<'tcx> DropckOutlives<'tcx> {
+ pub fn new(dropped_ty: Ty<'tcx>) -> Self {
+ DropckOutlives { dropped_ty }
+ }
+}
+
+impl super::QueryTypeOp<'gcx, 'tcx> for DropckOutlives<'tcx>
+where
+ 'gcx: 'tcx,
+{
+ type QueryResult = DropckOutlivesResult<'tcx>;
+
+ fn try_fast_path(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ key: &ParamEnvAnd<'tcx, Self>,
+ ) -> Option<Self::QueryResult> {
+ if trivial_dropck_outlives(tcx, key.value.dropped_ty) {
+ Some(DropckOutlivesResult::default())
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, Self::QueryResult>> {
+ // Subtle: note that we are not invoking
+ // `infcx.at(...).dropck_outlives(...)` here, but rather the
+ // underlying `dropck_outlives` query. This same underlying
+ // query is also used by the
+ // `infcx.at(...).dropck_outlives(...)` fn. Avoiding the
+ // wrapper means we don't need an infcx in this code, which is
+ // good because the interface doesn't give us one (so that we
+ // know we are not registering any subregion relations or
+ // other things).
+
+ // FIXME convert to the type expected by the `dropck_outlives`
+ // query. This should eventually be fixed by changing the
+ // *underlying query*.
+ let Canonical {
+ variables,
+ value:
+ ParamEnvAnd {
+ param_env,
+ value: DropckOutlives { dropped_ty },
+ },
+ } = canonicalized;
+ let canonicalized = Canonical {
+ variables,
+ value: param_env.and(dropped_ty),
+ };
+
+ tcx.dropck_outlives(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ lifted_query_result: &'a CanonicalizedQueryResult<'gcx, Self::QueryResult>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, Self::QueryResult>> {
+ lifted_query_result
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for DropckOutlives<'tcx> {
+ dropped_ty
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for DropckOutlives<'a> {
+ type Lifted = DropckOutlives<'tcx>;
+ dropped_ty
+ }
+}
+
+impl_stable_hash_for! {
+ struct DropckOutlives<'tcx> { dropped_ty }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Predicate, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct ProvePredicate<'tcx> {
+ pub predicate: Predicate<'tcx>,
+}
+
+impl<'tcx> ProvePredicate<'tcx> {
+ pub fn new(predicate: Predicate<'tcx>) -> Self {
+ ProvePredicate { predicate }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for ProvePredicate<'tcx> {
+ type QueryResult = ();
+
+ fn try_fast_path(
+ _tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ _key: &ParamEnvAnd<'tcx, Self>,
+ ) -> Option<Self::QueryResult> {
+ None
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ tcx.type_op_prove_predicate(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for ProvePredicate<'tcx> {
+ predicate,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for ProvePredicate<'a> {
+ type Lifted = ProvePredicate<'tcx>;
+ predicate,
+ }
+}
+
+impl_stable_hash_for! {
+ struct ProvePredicate<'tcx> { predicate }
+}
--- /dev/null
+// Copyright 2016 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use infer::canonical::{Canonical, Canonicalized, CanonicalizedQueryResult, QueryResult};
+use traits::query::Fallible;
+use ty::{ParamEnvAnd, Ty, TyCtxt};
+
+#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
+pub struct Subtype<'tcx> {
+ pub sub: Ty<'tcx>,
+ pub sup: Ty<'tcx>,
+}
+
+impl<'tcx> Subtype<'tcx> {
+ pub fn new(sub: Ty<'tcx>, sup: Ty<'tcx>) -> Self {
+ Self {
+ sub,
+ sup,
+ }
+ }
+}
+
+impl<'gcx: 'tcx, 'tcx> super::QueryTypeOp<'gcx, 'tcx> for Subtype<'tcx> {
+ type QueryResult = ();
+
+ fn try_fast_path(_tcx: TyCtxt<'_, 'gcx, 'tcx>, key: &ParamEnvAnd<'tcx, Self>) -> Option<()> {
+ if key.value.sub == key.value.sup {
+ Some(())
+ } else {
+ None
+ }
+ }
+
+ fn perform_query(
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
+ canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Self>>,
+ ) -> Fallible<CanonicalizedQueryResult<'gcx, ()>> {
+ tcx.type_op_subtype(canonicalized)
+ }
+
+ fn shrink_to_tcx_lifetime(
+ v: &'a CanonicalizedQueryResult<'gcx, ()>,
+ ) -> &'a Canonical<'tcx, QueryResult<'tcx, ()>> {
+ v
+ }
+}
+
+BraceStructTypeFoldableImpl! {
+ impl<'tcx> TypeFoldable<'tcx> for Subtype<'tcx> {
+ sub,
+ sup,
+ }
+}
+
+BraceStructLiftImpl! {
+ impl<'a, 'tcx> Lift<'tcx> for Subtype<'a> {
+ type Lifted = Subtype<'tcx>;
+ sub,
+ sup,
+ }
+}
+
+impl_stable_hash_for! {
+ struct Subtype<'tcx> { sub, sup }
+}
//! See [rustc guide] for more info on how this works.
//!
-//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-resolution.html#selection
+//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/traits/resolution.html#selection
use self::SelectionCandidate::*;
use self::EvaluationResult::*;
// candidates. See [rustc guide] for more details.
//
// [rustc guide]:
- // https://rust-lang-nursery.github.io/rustc-guide/trait-resolution.html#candidate-assembly
+ // https://rust-lang-nursery.github.io/rustc-guide/traits/resolution.html#candidate-assembly
fn candidate_from_obligation<'o>(&mut self,
stack: &TraitObligationStack<'o, 'tcx>)
// type error. See [rustc guide] for more details.
//
// [rustc guide]:
- // https://rust-lang-nursery.github.io/rustc-guide/trait-resolution.html#confirmation
+ // https://rust-lang-nursery.github.io/rustc-guide/traits/resolution.html#confirmation
fn confirm_candidate(&mut self,
obligation: &TraitObligation<'tcx>,
//! See the [rustc guide] for a bit more detail on how specialization
//! fits together with the rest of the trait machinery.
//!
-//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-specialization.html
+//! [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/traits/specialization.html
use super::{SelectionContext, FulfillmentContext};
use super::util::impl_trait_ref_and_oblig;
let trait_def = tcx.trait_def(trait_def_id);
let ancestors = trait_def.ancestors(tcx, impl_data.impl_def_id);
- match ancestors.defs(tcx, item.name, item.kind, trait_def_id).next() {
+ match ancestors.defs(tcx, item.ident, item.kind, trait_def_id).next() {
Some(node_item) => {
let substs = tcx.infer_ctxt().enter(|infcx| {
let param_env = ty::ParamEnv::reveal_all();
use ty::{self, TyCtxt, TypeFoldable};
use ty::fast_reject::{self, SimplifiedType};
use rustc_data_structures::sync::Lrc;
-use syntax::ast::Name;
+use syntax::ast::Ident;
use util::captures::Captures;
use util::nodemap::{DefIdMap, FxHashMap};
pub fn defs(
self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
- trait_item_name: Name,
+ trait_item_name: Ident,
trait_item_kind: ty::AssociatedKind,
trait_def_id: DefId,
) -> impl Iterator<Item = NodeItem<ty::AssociatedItem>> + Captures<'gcx> + Captures<'tcx> + 'a {
self.flat_map(move |node| {
node.items(tcx).filter(move |impl_item| {
impl_item.kind == trait_item_kind &&
- tcx.hygienic_eq(impl_item.name, trait_item_name, trait_def_id)
+ tcx.hygienic_eq(impl_item.ident, trait_item_name, trait_def_id)
}).map(move |item| NodeItem { node: node, item: item })
})
}
})
}
super::TraitNotObjectSafe(def_id) => Some(super::TraitNotObjectSafe(def_id)),
- super::ConstEvalFailure(ref err) => tcx.lift(err).map(super::ConstEvalFailure),
+ super::ConstEvalFailure(ref err) => tcx.lift(&**err).map(|err| super::ConstEvalFailure(
+ err.into(),
+ )),
super::Overflow => bug!(), // FIXME: ape ConstEvalFailure?
}
}
fn position(&self) -> usize;
}
-impl<'buf> TyEncoder for opaque::Encoder<'buf> {
+impl TyEncoder for opaque::Encoder {
#[inline]
fn position(&self) -> usize {
self.position()
use std::cmp::Ordering;
use std::collections::hash_map::{self, Entry};
use std::hash::{Hash, Hasher};
+use std::fmt;
use std::mem;
use std::ops::Deref;
use std::iter;
/// contain the TypeVariants key or if the address of the interned
/// pointer differs. The latter case is possible if a primitive type,
/// e.g. `()` or `u8`, was interned in a different context.
-pub trait Lift<'tcx> {
- type Lifted: 'tcx;
+pub trait Lift<'tcx>: fmt::Debug {
+ type Lifted: fmt::Debug + 'tcx;
fn lift_to_tcx<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Option<Self::Lifted>;
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use ty::subst::Substs;
use ty::{self, Ty, TypeFlags, TypeFoldable};
fn add_const(&mut self, constant: &ty::Const) {
self.add_ty(constant.ty);
- match constant.val {
- ConstVal::Value(_) => {}
- ConstVal::Unevaluated(_, substs) => {
- self.add_flags(TypeFlags::HAS_PROJECTION);
- self.add_substs(substs);
- }
+ if let ConstValue::Unevaluated(_, substs) = constant.val {
+ self.add_flags(TypeFlags::HAS_PROJECTION);
+ self.add_substs(substs);
}
}
//! These methods return true to indicate that the visitor has found what it is looking for
//! and does not need to visit anything else.
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use hir::def_id::DefId;
use ty::{self, Binder, Ty, TyCtxt, TypeFlags};
fn has_late_bound_regions(&self) -> bool {
self.has_type_flags(TypeFlags::HAS_RE_LATE_BOUND)
}
+
+ /// A visitor that does not recurse into types, works like `fn walk_shallow` in `Ty`.
+ fn visit_tys_shallow(&self, visit: impl FnMut(Ty<'tcx>) -> bool) -> bool {
+
+ pub struct Visitor<F>(F);
+
+ impl<'tcx, F: FnMut(Ty<'tcx>) -> bool> TypeVisitor<'tcx> for Visitor<F> {
+ fn visit_ty(&mut self, ty: Ty<'tcx>) -> bool {
+ self.0(ty)
+ }
+ }
+
+ self.visit_with(&mut Visitor(visit))
+ }
}
/// The TypeFolder trait defines the actual *folding*. There is a
}
fn visit_const(&mut self, c: &'tcx ty::Const<'tcx>) -> bool {
- if let ConstVal::Unevaluated(..) = c.val {
+ if let ConstValue::Unevaluated(..) = c.val {
let projection_flags = TypeFlags::HAS_NORMALIZABLE_PROJECTION |
TypeFlags::HAS_PROJECTION;
if projection_flags.intersects(self.flags) {
use hir::svh::Svh;
use ich::Fingerprint;
use ich::StableHashingContext;
-use infer::canonical::{Canonical, Canonicalize};
+use infer::canonical::Canonical;
use middle::lang_items::{FnTraitLangItem, FnMutTraitLangItem, FnOnceTraitLangItem};
use middle::privacy::AccessLevels;
use middle::resolve_lifetime::ObjectLifetimeDefault;
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
pub struct AssociatedItem {
pub def_id: DefId,
- pub name: Name,
+ pub ident: Ident,
pub kind: AssociatedKind,
pub vis: Visibility,
pub defaultness: hir::Defaultness,
// regions just fine, showing `fn(&MyType)`.
format!("{}", tcx.fn_sig(self.def_id).skip_binder())
}
- ty::AssociatedKind::Type => format!("type {};", self.name.to_string()),
+ ty::AssociatedKind::Type => format!("type {};", self.ident),
ty::AssociatedKind::Const => {
- format!("const {}: {:?};", self.name.to_string(), tcx.type_of(self.def_id))
+ format!("const {}: {:?};", self.ident, tcx.type_of(self.def_id))
}
}
}
pub type CanonicalTy<'gcx> = Canonical<'gcx, Ty<'gcx>>;
-impl <'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for Ty<'tcx> {
- type Canonicalized = CanonicalTy<'gcx>;
-
- fn intern(_gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, Self::Lifted>) -> Self::Canonicalized {
- value
- }
-}
-
extern {
/// A dummy type used to force Slice to by unsized without requiring fat pointers
type OpaqueSliceContents;
})
} else {
info!("invalid enum discriminant: {:#?}", val);
- ::middle::const_val::struct_error(
+ ::mir::interpret::struct_error(
tcx.at(tcx.def_span(expr_did)),
"constant evaluation of enum discriminant resulted in non-integer",
).emit();
};
AssociatedItem {
- name: trait_item_ref.name,
+ ident: trait_item_ref.ident,
kind,
// Visibility of trait items is inherited from their traits.
vis: Visibility::from_hir(parent_vis, trait_item_ref.id.node_id, self),
hir::AssociatedItemKind::Type => (ty::AssociatedKind::Type, false),
};
- ty::AssociatedItem {
- name: impl_item_ref.name,
+ AssociatedItem {
+ ident: impl_item_ref.ident,
kind,
// Visibility of trait impl items doesn't matter.
vis: ty::Visibility::from_hir(&impl_item_ref.vis, impl_item_ref.id.node_id, self),
pub fn associated_items(
self,
def_id: DefId,
- ) -> impl Iterator<Item = ty::AssociatedItem> + 'a {
+ ) -> impl Iterator<Item = AssociatedItem> + 'a {
let def_ids = self.associated_item_def_ids(def_id);
Box::new((0..def_ids.len()).map(move |i| self.associated_item(def_ids[i])))
- as Box<dyn Iterator<Item = ty::AssociatedItem> + 'a>
+ as Box<dyn Iterator<Item = AssociatedItem> + 'a>
}
/// Returns true if the impls are the same polarity and are implementing
// Hygienically compare a use-site name (`use_name`) for a field or an associated item with its
// supposed definition name (`def_name`). The method also needs `DefId` of the supposed
// definition's parent/scope to perform comparison.
- pub fn hygienic_eq(self, use_name: Name, def_name: Name, def_parent_def_id: DefId) -> bool {
- let (use_ident, def_ident) = (use_name.to_ident(), def_name.to_ident());
- self.adjust_ident(use_ident, def_parent_def_id, DUMMY_NODE_ID).0 == def_ident
+ pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
+ self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern()
}
pub fn adjust_ident(self, mut ident: Ident, scope: DefId, block: NodeId) -> (Ident, DefId) {
use dep_graph::SerializedDepNodeIndex;
use dep_graph::DepNode;
use hir::def_id::{CrateNum, DefId, DefIndex};
-use mir::interpret::{GlobalId, ConstValue};
-use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal};
+use mir::interpret::GlobalId;
+use traits::query::{
+ CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal, CanonicalTypeOpEqGoal,
+ CanonicalTypeOpNormalizeGoal, CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpSubtypeGoal,
+};
use ty::{self, ParamEnvAnd, Ty, TyCtxt};
use ty::subst::Substs;
use ty::query::queries;
}
}
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_eq<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpEqGoal<'tcx>) -> String {
+ format!("evaluating `type_op_eq` `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_subtype<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpSubtypeGoal<'tcx>) -> String {
+ format!("evaluating `type_op_subtype` `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_prove_predicate<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpProvePredicateGoal<'tcx>) -> String {
+ format!("evaluating `type_op_prove_predicate` `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_ty<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_predicate<'tcx> {
+ fn describe(
+ _tcx: TyCtxt,
+ goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>,
+ ) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_poly_fn_sig<'tcx> {
+ fn describe(
+ _tcx: TyCtxt,
+ goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>,
+ ) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
+impl<'tcx> QueryDescription<'tcx> for queries::type_op_normalize_fn_sig<'tcx> {
+ fn describe(_tcx: TyCtxt, goal: CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>) -> String {
+ format!("normalizing `{:?}`", goal)
+ }
+}
+
impl<'tcx> QueryDescription<'tcx> for queries::is_copy_raw<'tcx> {
fn describe(_tcx: TyCtxt, env: ty::ParamEnvAnd<'tcx, Ty<'tcx>>) -> String {
format!("computing whether `{}` is `Copy`", env.value)
}
impl<'tcx> QueryDescription<'tcx> for queries::const_value_to_allocation<'tcx> {
- fn describe(_tcx: TyCtxt, (val, ty): (ConstValue<'tcx>, Ty<'tcx>)) -> String {
- format!("converting value `{:?}` ({}) to an allocation", val, ty)
+ fn describe(_tcx: TyCtxt, val: &'tcx ty::Const<'tcx>) -> String {
+ format!("converting value `{:?}` to an allocation", val)
}
}
//! Defines the set of legal keys that can be used in queries.
+use infer::canonical::Canonical;
use hir::def_id::{CrateNum, DefId, LOCAL_CRATE, DefIndex};
-use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal, CanonicalTyGoal};
use ty::{self, Ty, TyCtxt};
use ty::subst::Substs;
use ty::fast_reject::SimplifiedType;
}
}
-impl<'tcx> Key for (mir::interpret::ConstValue<'tcx>, Ty<'tcx>) {
+impl<'tcx> Key for &'tcx ty::Const<'tcx> {
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
}
}
-impl<'tcx> Key for CanonicalProjectionGoal<'tcx> {
- fn query_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
-
- fn default_span(&self, _tcx: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl<'tcx> Key for CanonicalTyGoal<'tcx> {
- fn query_crate(&self) -> CrateNum {
- LOCAL_CRATE
- }
-
- fn default_span(&self, _tcx: TyCtxt) -> Span {
- DUMMY_SP
- }
-}
-
-impl<'tcx> Key for CanonicalPredicateGoal<'tcx> {
+/// Canonical query goals correspond to abstract trait operations that
+/// are not tied to any crate in particular.
+impl<'tcx, T> Key for Canonical<'tcx, T>
+where
+ T: Debug + Hash + Clone + Eq,
+{
fn query_crate(&self) -> CrateNum {
LOCAL_CRATE
}
use middle::stability::{self, DeprecationEntry};
use middle::lang_items::{LanguageItems, LangItem};
use middle::exported_symbols::{SymbolExportLevel, ExportedSymbol};
-use middle::const_val::EvalResult;
+use mir::interpret::ConstEvalResult;
use mir::mono::{CodegenUnit, Stats};
use mir;
-use mir::interpret::{GlobalId, Allocation, ConstValue};
+use mir::interpret::{GlobalId, Allocation};
use session::{CompileResult, CrateDisambiguator};
use session::config::OutputFilenames;
use traits::{self, Vtable};
use traits::query::{CanonicalPredicateGoal, CanonicalProjectionGoal,
- CanonicalTyGoal, NoSolution};
+ CanonicalTyGoal, CanonicalTypeOpEqGoal, CanonicalTypeOpSubtypeGoal,
+ CanonicalTypeOpProvePredicateGoal, CanonicalTypeOpNormalizeGoal, NoSolution};
use traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult};
use traits::query::normalize::NormalizationResult;
use traits::specialization_graph;
/// Results of evaluating const items or constants embedded in
/// other items (such as enum variant explicit discriminants).
[] fn const_eval: const_eval_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>)
- -> EvalResult<'tcx>,
+ -> ConstEvalResult<'tcx>,
/// Converts a constant value to an constant allocation
[] fn const_value_to_allocation: const_value_to_allocation(
- (ConstValue<'tcx>, Ty<'tcx>)
+ &'tcx ty::Const<'tcx>
) -> &'tcx Allocation,
[] fn check_match: CheckMatch(DefId)
CanonicalPredicateGoal<'tcx>
) -> Result<traits::EvaluationResult, traits::OverflowError>,
+ /// Do not call this query directly: part of the `Eq` type-op
+ [] fn type_op_eq: TypeOpEq(
+ CanonicalTypeOpEqGoal<'tcx>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Subtype` type-op
+ [] fn type_op_subtype: TypeOpSubtype(
+ CanonicalTypeOpSubtypeGoal<'tcx>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `ProvePredicate` type-op
+ [] fn type_op_prove_predicate: TypeOpProvePredicate(
+ CanonicalTypeOpProvePredicateGoal<'tcx>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ()>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_ty: TypeOpNormalizeTy(
+ CanonicalTypeOpNormalizeGoal<'tcx, Ty<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, Ty<'tcx>>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_predicate: TypeOpNormalizePredicate(
+ CanonicalTypeOpNormalizeGoal<'tcx, ty::Predicate<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::Predicate<'tcx>>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_poly_fn_sig: TypeOpNormalizePolyFnSig(
+ CanonicalTypeOpNormalizeGoal<'tcx, ty::PolyFnSig<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::PolyFnSig<'tcx>>>>,
+ NoSolution,
+ >,
+
+ /// Do not call this query directly: part of the `Normalize` type-op
+ [] fn type_op_normalize_fn_sig: TypeOpNormalizeFnSig(
+ CanonicalTypeOpNormalizeGoal<'tcx, ty::FnSig<'tcx>>
+ ) -> Result<
+ Lrc<Canonical<'tcx, canonical::QueryResult<'tcx, ty::FnSig<'tcx>>>>,
+ NoSolution,
+ >,
+
[] fn substitute_normalize_and_test_predicates:
substitute_normalize_and_test_predicates_node((DefId, &'tcx Substs<'tcx>)) -> bool,
}
fn const_value_to_allocation<'tcx>(
- (val, ty): (ConstValue<'tcx>, Ty<'tcx>)
+ val: &'tcx ty::Const<'tcx>,
) -> DepConstructor<'tcx> {
- DepConstructor::ConstValueToAllocation { val, ty }
+ DepConstructor::ConstValueToAllocation { val }
}
fn type_param_predicates<'tcx>((item_id, param_id): (DefId, DefId)) -> DepConstructor<'tcx> {
let len = BytePos::decode(self)?;
let file_lo = self.file_index_to_file(file_lo_index);
- let lo = file_lo.lines.borrow()[line_lo - 1] + col_lo;
+ let lo = file_lo.lines[line_lo - 1] + col_lo;
let hi = lo + len;
let expn_info_tag = u8::decode(self)?;
}
impl<'enc, 'a, 'tcx> SpecializedEncoder<Fingerprint>
-for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder<'enc>>
+for CacheEncoder<'enc, 'a, 'tcx, opaque::Encoder>
{
fn specialized_encode(&mut self, f: &Fingerprint) -> Result<(), Self::Error> {
f.encode_opaque(&mut self.encoder)
impl UseSpecializedEncodable for IntEncodedWithFixedSize {}
impl UseSpecializedDecodable for IntEncodedWithFixedSize {}
-impl<'enc> SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder<'enc> {
+impl SpecializedEncoder<IntEncodedWithFixedSize> for opaque::Encoder {
fn specialized_encode(&mut self, x: &IntEncodedWithFixedSize) -> Result<(), Self::Error> {
let start_pos = self.position();
for i in 0 .. IntEncodedWithFixedSize::ENCODED_SIZE {
DepKind::NormalizeTyAfterErasingRegions |
DepKind::DropckOutlives |
DepKind::EvaluateObligation |
+ DepKind::TypeOpEq |
+ DepKind::TypeOpSubtype |
+ DepKind::TypeOpProvePredicate |
+ DepKind::TypeOpNormalizeTy |
+ DepKind::TypeOpNormalizePredicate |
+ DepKind::TypeOpNormalizePolyFnSig |
+ DepKind::TypeOpNormalizeFnSig |
DepKind::SubstituteNormalizeAndTestPredicates |
DepKind::InstanceDefSizeEstimate |
DepKind::ProgramClausesForEnv |
//! type equality, etc.
use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use ty::subst::{Kind, UnpackedKind, Substs};
use ty::{self, Ty, TyCtxt, TypeFoldable};
use ty::error::{ExpectedFound, TypeError};
return Ok(s);
}
match x.val {
- ConstVal::Unevaluated(def_id, substs) => {
+ ConstValue::Unevaluated(def_id, substs) => {
// FIXME(eddyb) get the right param_env.
let param_env = ty::ParamEnv::empty();
match tcx.lift_to_global(&substs) {
//! hand, though we've recently added some macros (e.g.,
//! `BraceStructLiftImpl!`) to help with the tedium.
-use middle::const_val::{self, ConstVal, ConstEvalErr};
+use mir::interpret::{ConstValue, ConstEvalErr};
use ty::{self, Lift, Ty, TyCtxt};
use ty::fold::{TypeFoldable, TypeFolder, TypeVisitor};
use rustc_data_structures::accumulate_vec::AccumulateVec;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc_data_structures::sync::Lrc;
use mir::interpret;
use std::rc::Rc;
impl<'a, 'tcx> Lift<'tcx> for ConstEvalErr<'a> {
type Lifted = ConstEvalErr<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- tcx.lift(&*self.kind).map(|kind| {
+ tcx.lift(&self.error).map(|error| {
ConstEvalErr {
span: self.span,
- kind: Lrc::new(kind),
+ stacktrace: self.stacktrace.clone(),
+ error,
}
})
}
PathNotFound(ref v) => PathNotFound(v.clone()),
UnimplementedTraitSelection => UnimplementedTraitSelection,
TypeckError => TypeckError,
- ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(err)?),
+ TooGeneric => TooGeneric,
+ CheckMatchError => CheckMatchError,
+ ReferencedConstant(ref err) => ReferencedConstant(tcx.lift(&**err)?.into()),
OverflowNeg => OverflowNeg,
Overflow(op) => Overflow(op),
DivisionByZero => DivisionByZero,
}
}
-impl<'a, 'tcx> Lift<'tcx> for const_val::ErrKind<'a> {
- type Lifted = const_val::ErrKind<'tcx>;
- fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
- use middle::const_val::ErrKind::*;
-
- Some(match *self {
- CouldNotResolve => CouldNotResolve,
- TypeckError => TypeckError,
- CheckMatchError => CheckMatchError,
- Miri(ref e, ref frames) => return tcx.lift(e).map(|e| Miri(e, frames.clone())),
- })
- }
-}
-
impl<'a, 'tcx> Lift<'tcx> for ty::layout::LayoutError<'a> {
type Lifted = ty::layout::LayoutError<'tcx>;
fn lift_to_tcx<'b, 'gcx>(&self, tcx: TyCtxt<'b, 'gcx, 'tcx>) -> Option<Self::Lifted> {
}
}
-impl<'tcx> TypeFoldable<'tcx> for ConstVal<'tcx> {
+impl<'tcx> TypeFoldable<'tcx> for ConstValue<'tcx> {
fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F) -> Self {
match *self {
- ConstVal::Value(v) => ConstVal::Value(v),
- ConstVal::Unevaluated(def_id, substs) => {
- ConstVal::Unevaluated(def_id, substs.fold_with(folder))
+ ConstValue::Scalar(v) => ConstValue::Scalar(v),
+ ConstValue::ScalarPair(a, b) => ConstValue::ScalarPair(a, b),
+ ConstValue::ByRef(alloc, offset) => ConstValue::ByRef(alloc, offset),
+ ConstValue::Unevaluated(def_id, substs) => {
+ ConstValue::Unevaluated(def_id, substs.fold_with(folder))
}
}
}
fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
match *self {
- ConstVal::Value(_) => false,
- ConstVal::Unevaluated(_, substs) => substs.visit_with(visitor),
+ ConstValue::Scalar(_) |
+ ConstValue::ScalarPair(_, _) |
+ ConstValue::ByRef(_, _) => false,
+ ConstValue::Unevaluated(_, substs) => substs.visit_with(visitor),
}
}
}
use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use middle::region;
use polonius_engine::Atom;
use rustc_data_structures::indexed_vec::Idx;
use ty::{self, AdtDef, TypeFlags, Ty, TyCtxt, TypeFoldable};
use ty::{Slice, TyS, ParamEnvAnd, ParamEnv};
use util::captures::Captures;
-use mir::interpret::{Scalar, Pointer, Value, ConstValue};
+use mir::interpret::{Scalar, Pointer, Value};
use std::iter;
use std::cmp::Ordering;
use rustc_target::spec::abi;
-use syntax::ast::{self, Name};
+use syntax::ast::{self, Ident};
use syntax::symbol::{keywords, InternedString};
use serialize;
/// Construct a ProjectionTy by searching the trait from trait_ref for the
/// associated item named item_name.
pub fn from_ref_and_name(
- tcx: TyCtxt, trait_ref: ty::TraitRef<'tcx>, item_name: Name
+ tcx: TyCtxt, trait_ref: ty::TraitRef<'tcx>, item_name: Ident
) -> ProjectionTy<'tcx> {
let item_def_id = tcx.associated_items(trait_ref.def_id).find(|item| {
item.kind == ty::AssociatedKind::Type &&
- tcx.hygienic_eq(item_name, item.name, trait_ref.def_id)
+ tcx.hygienic_eq(item_name, item.ident, trait_ref.def_id)
}).unwrap().def_id;
ProjectionTy {
///
/// [1]: http://smallcultfollowing.com/babysteps/blog/2013/10/29/intermingled-parameter-lists/
/// [2]: http://smallcultfollowing.com/babysteps/blog/2013/11/04/intermingled-parameter-lists/
-/// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/trait-hrtb.html
+/// [rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/traits/hrtb.html
#[derive(Clone, PartialEq, Eq, Hash, Copy, RustcEncodable, RustcDecodable, PartialOrd, Ord)]
pub enum RegionKind {
// Region bound in a type or fn declaration which will be
}
}
+ pub fn is_impl_trait(&self) -> bool {
+ match self.sty {
+ TyAnon(..) => true,
+ _ => false,
+ }
+ }
+
pub fn ty_to_def_id(&self) -> Option<DefId> {
match self.sty {
TyDynamic(ref tt, ..) => tt.principal().map(|p| p.def_id()),
pub struct Const<'tcx> {
pub ty: Ty<'tcx>,
- pub val: ConstVal<'tcx>,
+ pub val: ConstValue<'tcx>,
}
impl<'tcx> Const<'tcx> {
ty: Ty<'tcx>,
) -> &'tcx Self {
tcx.mk_const(Const {
- val: ConstVal::Unevaluated(def_id, substs),
+ val: ConstValue::Unevaluated(def_id, substs),
ty,
})
}
#[inline]
- pub fn from_const_val(
+ pub fn from_const_value(
tcx: TyCtxt<'_, '_, 'tcx>,
- val: ConstVal<'tcx>,
+ val: ConstValue<'tcx>,
ty: Ty<'tcx>,
) -> &'tcx Self {
tcx.mk_const(Const {
})
}
- #[inline]
- pub fn from_const_value(
- tcx: TyCtxt<'_, '_, 'tcx>,
- val: ConstValue<'tcx>,
- ty: Ty<'tcx>,
- ) -> &'tcx Self {
- Self::from_const_val(tcx, ConstVal::Value(val), ty)
- }
-
#[inline]
pub fn from_byval_value(
tcx: TyCtxt<'_, '_, 'tcx>,
}
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
- match self.val {
- ConstVal::Value(val) => val.to_bits(size),
- _ => None,
- }
+ self.val.to_bits(size)
}
#[inline]
pub fn to_ptr(&self) -> Option<Pointer> {
- match self.val {
- ConstVal::Value(val) => val.to_ptr(),
- _ => None,
- }
+ self.val.to_ptr()
}
#[inline]
pub fn to_byval_value(&self) -> Option<Value> {
- match self.val {
- ConstVal::Value(val) => val.to_byval_value(),
- _ => None,
- }
+ self.val.to_byval_value()
}
#[inline]
pub fn to_scalar(&self) -> Option<Scalar> {
- match self.val {
- ConstVal::Value(val) => val.to_scalar(),
- _ => None,
- }
+ self.val.to_scalar()
}
#[inline]
assert_eq!(self.ty, ty.value);
let ty = tcx.lift_to_global(&ty).unwrap();
let size = tcx.layout_of(ty).ok()?.size;
- match self.val {
- ConstVal::Value(val) => val.to_bits(size),
- _ => None,
- }
+ self.val.to_bits(size)
}
#[inline]
self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
}
+ /// True if this def-id refers to the implicit constructor for
+ /// a tuple struct like `struct Foo(u32)`.
+ pub fn is_struct_constructor(self, def_id: DefId) -> bool {
+ self.def_key(def_id).disambiguated_data.data == DefPathData::StructCtor
+ }
+
/// Given the `DefId` of a fn or closure, returns the `DefId` of
/// the innermost fn item that the closure is contained within.
/// This is a significant def-id because, when we do
//! An iterator over the type substructure.
//! WARNING: this does not keep track of the region depth.
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use ty::{self, Ty};
use rustc_data_structures::small_vec::SmallVec;
use rustc_data_structures::accumulate_vec::IntoIter as AccIntoIter;
}
fn push_const<'tcx>(stack: &mut TypeWalkerStack<'tcx>, constant: &'tcx ty::Const<'tcx>) {
- match constant.val {
- ConstVal::Value(_) => {}
- ConstVal::Unevaluated(_, substs) => {
- stack.extend(substs.types().rev());
- }
+ if let ConstValue::Unevaluated(_, substs) = constant.val {
+ stack.extend(substs.types().rev());
}
stack.push(constant.ty);
}
// except according to those terms.
use hir::def_id::DefId;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use infer::InferCtxt;
use ty::subst::Substs;
use traits;
/// into `self.out`.
fn compute_const(&mut self, constant: &'tcx ty::Const<'tcx>) {
self.require_sized(constant.ty, traits::ConstSized);
- match constant.val {
- ConstVal::Value(_) => {}
- ConstVal::Unevaluated(def_id, substs) => {
- let obligations = self.nominal_obligations(def_id, substs);
- self.out.extend(obligations);
-
- let predicate = ty::Predicate::ConstEvaluatable(def_id, substs);
- let cause = self.cause(traits::MiscObligation);
- self.out.push(traits::Obligation::new(cause,
- self.param_env,
- predicate));
- }
+ if let ConstValue::Unevaluated(def_id, substs) = constant.val {
+ let obligations = self.nominal_obligations(def_id, substs);
+ self.out.extend(obligations);
+
+ let predicate = ty::Predicate::ConstEvaluatable(def_id, substs);
+ let cause = self.cause(traits::MiscObligation);
+ self.out.push(traits::Obligation::new(cause,
+ self.param_env,
+ predicate));
}
}
use hir::def_id::DefId;
use hir::map::definitions::DefPathData;
-use middle::const_val::ConstVal;
+use mir::interpret::ConstValue;
use middle::region::{self, BlockRemainder};
use ty::subst::{self, Subst};
use ty::{BrAnon, BrEnv, BrFresh, BrNamed};
ty::tls::with(|tcx|
print!(f, self,
write("{}=",
- tcx.associated_item(projection.projection_ty.item_def_id).name),
+ tcx.associated_item(projection.projection_ty.item_def_id).ident),
print_display(projection.ty))
)?;
}
TyParam(ref param_ty) => write!(f, "{}", param_ty),
TyAdt(def, substs) => cx.parameterized(f, substs, def.did, &[]),
TyDynamic(data, r) => {
- data.print(f, cx)?;
let r = r.print_to_string(cx);
if !r.is_empty() {
- write!(f, " + {}", r)
+ write!(f, "(")?;
+ }
+ write!(f, "dyn ")?;
+ data.print(f, cx)?;
+ if !r.is_empty() {
+ write!(f, " + {})", r)
} else {
Ok(())
}
TyArray(ty, sz) => {
print!(f, cx, write("["), print(ty), write("; "))?;
match sz.val {
- ConstVal::Value(..) => ty::tls::with(|tcx| {
- write!(f, "{}", sz.unwrap_usize(tcx))
- })?,
- ConstVal::Unevaluated(_def_id, _substs) => {
+ ConstValue::Unevaluated(_def_id, _substs) => {
write!(f, "_")?;
}
+ _ => ty::tls::with(|tcx| {
+ write!(f, "{}", sz.unwrap_usize(tcx))
+ })?,
}
write!(f, "]")
}
// parameterized(f, self.substs, self.item_def_id, &[])
// (which currently ICEs).
let (trait_ref, item_name) = ty::tls::with(|tcx|
- (self.trait_ref(tcx), tcx.associated_item(self.item_def_id).name)
+ (self.trait_ref(tcx), tcx.associated_item(self.item_def_id).ident)
);
print!(f, cx, print_debug(trait_ref), write("::{}", item_name))
}
rustc_target = { path = "../librustc_target" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
+log = "0.4"
use rustc::middle::allocator::AllocatorKind;
use rustc_errors;
-use syntax::ast::{Attribute, Crate, LitKind, StrStyle};
-use syntax::ast::{Arg, FnHeader, Generics, Mac, Mutability, Ty, Unsafety};
-use syntax::ast::{self, Expr, Ident, Item, ItemKind, TyKind, VisibilityKind};
-use syntax::attr;
-use syntax::codemap::respan;
-use syntax::codemap::{ExpnInfo, MacroAttribute};
-use syntax::ext::base::ExtCtxt;
-use syntax::ext::base::Resolver;
-use syntax::ext::build::AstBuilder;
-use syntax::ext::expand::ExpansionConfig;
-use syntax::ext::hygiene::{self, Mark, SyntaxContext};
-use syntax::fold::{self, Folder};
-use syntax::parse::ParseSess;
-use syntax::ptr::P;
-use syntax::symbol::Symbol;
-use syntax::util::small_vector::SmallVector;
-use syntax_pos::{Span, DUMMY_SP};
+use syntax::{
+ ast::{
+ self, Arg, Attribute, Crate, Expr, FnHeader, Generics, Ident, Item, ItemKind,
+ LitKind, Mac, Mod, Mutability, StrStyle, Ty, TyKind, Unsafety, VisibilityKind,
+ },
+ attr,
+ codemap::{
+ respan, ExpnInfo, MacroAttribute,
+ },
+ ext::{
+ base::{ExtCtxt, Resolver},
+ build::AstBuilder,
+ expand::ExpansionConfig,
+ hygiene::{self, Mark, SyntaxContext},
+ },
+ fold::{self, Folder},
+ parse::ParseSess,
+ ptr::P,
+ symbol::Symbol,
+ util::small_vector::SmallVector,
+};
+use syntax_pos::Span;
use {AllocatorMethod, AllocatorTy, ALLOCATOR_METHODS};
sess: &ParseSess,
resolver: &mut Resolver,
krate: Crate,
+ crate_name: String,
handler: &rustc_errors::Handler,
) -> ast::Crate {
ExpandAllocatorDirectives {
sess,
resolver,
found: false,
+ crate_name: Some(crate_name),
+ in_submod: -1, // -1 to account for the "root" module
}.fold_crate(krate)
}
handler: &'a rustc_errors::Handler,
sess: &'a ParseSess,
resolver: &'a mut Resolver,
+ crate_name: Option<String>,
+
+ // For now, we disallow `global_allocator` in submodules because hygiene is hard. Keep track of
+ // whether we are in a submodule or not. If `in_submod > 0` we are in a submodule.
+ in_submod: isize,
}
impl<'a> Folder for ExpandAllocatorDirectives<'a> {
fn fold_item(&mut self, item: P<Item>) -> SmallVector<P<Item>> {
+ debug!("in submodule {}", self.in_submod);
+
let name = if attr::contains_name(&item.attrs, "global_allocator") {
"global_allocator"
} else {
}
}
+ if self.in_submod > 0 {
+ self.handler
+ .span_err(item.span, "`global_allocator` cannot be used in submodules");
+ return SmallVector::one(item);
+ }
+
if self.found {
- self.handler.span_err(
- item.span,
- "cannot define more than one \
- #[global_allocator]",
- );
+ self.handler
+ .span_err(item.span, "cannot define more than one #[global_allocator]");
return SmallVector::one(item);
}
self.found = true;
+ // Create a fresh Mark for the new macro expansion we are about to do
let mark = Mark::fresh(Mark::root());
mark.set_expn_info(ExpnInfo {
- call_site: DUMMY_SP,
+ call_site: item.span, // use the call site of the static
def_site: None,
format: MacroAttribute(Symbol::intern(name)),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
+
+ // Tie the span to the macro expansion info we just created
let span = item.span.with_ctxt(SyntaxContext::empty().apply_mark(mark));
- let ecfg = ExpansionConfig::default(name.to_string());
+
+ // Create an expansion config
+ let ecfg = ExpansionConfig::default(self.crate_name.take().unwrap());
+
+ // Generate a bunch of new items using the AllocFnFactory
let mut f = AllocFnFactory {
span,
kind: AllocatorKind::Global,
core: Ident::from_str("core"),
cx: ExtCtxt::new(self.sess, ecfg, self.resolver),
};
+
+ // We will generate a new submodule. To `use` the static from that module, we need to get
+ // the `super::...` path.
let super_path = f.cx.path(f.span, vec![Ident::from_str("super"), f.global]);
+
+ // Generate the items in the submodule
let mut items = vec![
+ // import `core` to use allocators
f.cx.item_extern_crate(f.span, f.core),
+ // `use` the `global_allocator` in `super`
f.cx.item_use_simple(
f.span,
respan(f.span.shrink_to_lo(), VisibilityKind::Inherited),
super_path,
),
];
- for method in ALLOCATOR_METHODS {
- items.push(f.allocator_fn(method));
- }
+
+ // Add the allocator methods to the submodule
+ items.extend(
+ ALLOCATOR_METHODS
+ .iter()
+ .map(|method| f.allocator_fn(method)),
+ );
+
+ // Generate the submodule itself
let name = f.kind.fn_name("allocator_abi");
let allocator_abi = Ident::with_empty_ctxt(Symbol::gensym(&name));
let module = f.cx.item_mod(span, span, allocator_abi, Vec::new(), items);
let module = f.cx.monotonic_expander().fold_item(module).pop().unwrap();
- let mut ret = SmallVector::new();
+ // Return the item and new submodule
+ let mut ret = SmallVector::with_capacity(2);
ret.push(item);
ret.push(module);
+
return ret;
}
+ // If we enter a submodule, take note.
+ fn fold_mod(&mut self, m: Mod) -> Mod {
+ debug!("enter submodule");
+ self.in_submod += 1;
+ let ret = fold::noop_fold_mod(m, self);
+ self.in_submod -= 1;
+ debug!("exit submodule");
+ ret
+ }
+
+ // `fold_mac` is disabled by default. Enable it here.
fn fold_mac(&mut self, mac: Mac) -> Mac {
fold::noop_fold_mac(mac, self)
}
#![feature(rustc_private)]
+#[macro_use] extern crate log;
extern crate rustc;
extern crate rustc_errors;
extern crate rustc_target;
} else {
loss = Some(match hex_value {
0 => Loss::ExactlyZero,
- 1...7 => Loss::LessThanHalf,
+ 1..=7 => Loss::LessThanHalf,
8 => Loss::ExactlyHalf,
- 9...15 => Loss::MoreThanHalf,
+ 9..=15 => Loss::MoreThanHalf,
_ => unreachable!(),
});
}
> WARNING: This README is more or less obsolete, and will be removed
> soon! The new system is described in the [rustc guide].
-[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir-borrowck.html
+[rustc guide]: https://rust-lang-nursery.github.io/rustc-guide/mir/borrowck.html
This pass has the job of enforcing memory safety. This is a subtle
topic. This docs aim to explain both the practice and the theory
cmt: &'c mc::cmt_<'tcx>) {
let source = get_pattern_source(bccx.tcx,move_pat);
let pat_span_path_opt = match move_pat.node {
- PatKind::Binding(_, _, ref path1, _) => {
+ PatKind::Binding(_, _, ident, _) => {
Some(MovePlace {
span: move_pat.span,
- name: path1.node,
+ name: ident.name,
pat_source: source,
})
}
let mut err = self.cannot_act_on_moved_value(use_span,
verb,
msg,
- &format!("{}", nl),
+ Some(format!("{}", nl)),
Origin::Ast);
let need_note = match lp.ty.sty {
ty::TypeVariants::TyClosure(id, _) => {
let tcx = self.bccx.tcx;
let mut mutables = FxHashMap();
for p in pats {
- p.each_binding(|_, hir_id, span, path1| {
- let name = path1.node;
-
+ p.each_binding(|_, hir_id, span, ident| {
// Skip anything that looks like `_foo`
- if name.as_str().starts_with("_") {
+ if ident.as_str().starts_with("_") {
return;
}
_ => return,
}
- mutables.entry(name).or_insert(Vec::new()).push((hir_id, span));
+ mutables.entry(ident.name).or_insert(Vec::new()).push((hir_id, span));
});
}
}
fn u32(&mut self, val: u32) {
- let at = self.data.len();
- leb128::write_u32_leb128(&mut self.data, at, val);
+ leb128::write_u32_leb128(&mut self.data, val);
}
fn byte(&mut self, val: u8) {
fn fetch_wasm_section(tcx: TyCtxt, id: DefId) -> (String, Vec<u8>) {
use rustc::mir::interpret::GlobalId;
- use rustc::middle::const_val::ConstVal;
info!("loading wasm section {:?}", id);
};
let param_env = ty::ParamEnv::reveal_all();
let val = tcx.const_eval(param_env.and(cid)).unwrap();
-
- let const_val = match val.val {
- ConstVal::Value(val) => val,
- ConstVal::Unevaluated(..) => bug!("should be evaluated"),
- };
-
- let alloc = tcx.const_value_to_allocation((const_val, val.ty));
+ let alloc = tcx.const_value_to_allocation(val);
(section.to_string(), alloc.bytes.clone())
}
"##,
}
-
-
-register_diagnostics! {
- E0558
-}
// except according to those terms.
use llvm::{self, ValueRef};
-use rustc::middle::const_val::{ConstVal, ConstEvalErr};
+use rustc::mir::interpret::ConstEvalErr;
use rustc_mir::interpret::{read_target_uint, const_val_field};
use rustc::hir::def_id::DefId;
use rustc::mir;
use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::sync::Lrc;
use rustc::mir::interpret::{GlobalId, Pointer, Scalar, Allocation, ConstValue, AllocType};
use rustc::ty::{self, Ty};
use rustc::ty::layout::{self, HasDataLayout, LayoutOf, Size};
pub fn codegen_static_initializer<'a, 'tcx>(
cx: &CodegenCx<'a, 'tcx>,
def_id: DefId)
- -> Result<ValueRef, ConstEvalErr<'tcx>>
+ -> Result<ValueRef, Lrc<ConstEvalErr<'tcx>>>
{
let instance = ty::Instance::mono(cx.tcx, def_id);
let cid = GlobalId {
let static_ = cx.tcx.const_eval(param_env.and(cid))?;
let alloc = match static_.val {
- ConstVal::Value(ConstValue::ByRef(alloc, n)) if n.bytes() == 0 => alloc,
+ ConstValue::ByRef(alloc, n) if n.bytes() == 0 => alloc,
_ => bug!("static const eval returned {:#?}", static_),
};
Ok(const_alloc_to_llvm(cx, alloc))
}
impl<'a, 'tcx> FunctionCx<'a, 'tcx> {
- fn const_to_const_value(
+ fn fully_evaluate(
&mut self,
bx: &Builder<'a, 'tcx>,
constant: &'tcx ty::Const<'tcx>,
- ) -> Result<ConstValue<'tcx>, ConstEvalErr<'tcx>> {
+ ) -> Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
match constant.val {
- ConstVal::Unevaluated(def_id, ref substs) => {
+ ConstValue::Unevaluated(def_id, ref substs) => {
let tcx = bx.tcx();
let param_env = ty::ParamEnv::reveal_all();
let instance = ty::Instance::resolve(tcx, param_env, def_id, substs).unwrap();
instance,
promoted: None,
};
- let c = tcx.const_eval(param_env.and(cid))?;
- self.const_to_const_value(bx, c)
+ tcx.const_eval(param_env.and(cid))
},
- ConstVal::Value(val) => Ok(val),
+ _ => Ok(constant),
}
}
- pub fn mir_constant_to_const_value(
+ pub fn eval_mir_constant(
&mut self,
bx: &Builder<'a, 'tcx>,
constant: &mir::Constant<'tcx>,
- ) -> Result<ConstValue<'tcx>, ConstEvalErr<'tcx>> {
+ ) -> Result<&'tcx ty::Const<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
match constant.literal {
mir::Literal::Promoted { index } => {
let param_env = ty::ParamEnv::reveal_all();
mir::Literal::Value { value } => {
Ok(self.monomorphize(&value))
}
- }.and_then(|c| self.const_to_const_value(bx, c))
+ }.and_then(|c| self.fully_evaluate(bx, c))
}
/// process constant containing SIMD shuffle indices
bx: &Builder<'a, 'tcx>,
constant: &mir::Constant<'tcx>,
) -> (ValueRef, Ty<'tcx>) {
- self.mir_constant_to_const_value(bx, constant)
+ self.eval_mir_constant(bx, constant)
.and_then(|c| {
- let field_ty = constant.ty.builtin_index().unwrap();
- let fields = match constant.ty.sty {
+ let field_ty = c.ty.builtin_index().unwrap();
+ let fields = match c.ty.sty {
ty::TyArray(_, n) => n.unwrap_usize(bx.tcx()),
ref other => bug!("invalid simd shuffle type: {}", other),
};
- let values: Result<Vec<ValueRef>, _> = (0..fields).map(|field| {
+ let values: Result<Vec<ValueRef>, Lrc<_>> = (0..fields).map(|field| {
let field = const_val_field(
bx.tcx(),
ty::ParamEnv::reveal_all(),
None,
mir::Field::new(field as usize),
c,
- constant.ty,
)?;
if let Some(prim) = field.to_scalar() {
let layout = bx.cx.layout_of(field_ty);
}
}).collect();
let llval = C_struct(bx.cx, &values?, false);
- Ok((llval, constant.ty))
+ Ok((llval, c.ty))
})
.unwrap_or_else(|e| {
e.report_as_error(
// except according to those terms.
use llvm::{ValueRef, LLVMConstInBoundsGEP};
-use rustc::middle::const_val::ConstEvalErr;
+use rustc::mir::interpret::ConstEvalErr;
use rustc::mir;
use rustc::mir::interpret::ConstValue;
use rustc::ty;
use rustc::ty::layout::{self, Align, LayoutOf, TyLayout};
use rustc_data_structures::indexed_vec::Idx;
+use rustc_data_structures::sync::Lrc;
use base;
use common::{self, CodegenCx, C_null, C_undef, C_usize};
}
pub fn from_const(bx: &Builder<'a, 'tcx>,
- val: ConstValue<'tcx>,
- ty: ty::Ty<'tcx>)
- -> Result<OperandRef<'tcx>, ConstEvalErr<'tcx>> {
- let layout = bx.cx.layout_of(ty);
+ val: &'tcx ty::Const<'tcx>)
+ -> Result<OperandRef<'tcx>, Lrc<ConstEvalErr<'tcx>>> {
+ let layout = bx.cx.layout_of(val.ty);
if layout.is_zst() {
return Ok(OperandRef::new_zst(bx.cx, layout));
}
- let val = match val {
+ let val = match val.val {
+ ConstValue::Unevaluated(..) => bug!(),
ConstValue::Scalar(x) => {
let scalar = match layout.abi {
layout::Abi::Scalar(ref x) => x,
mir::Operand::Constant(ref constant) => {
let ty = self.monomorphize(&constant.ty);
- self.mir_constant_to_const_value(bx, constant)
- .and_then(|c| OperandRef::from_const(bx, c, ty))
+ self.eval_mir_constant(bx, constant)
+ .and_then(|c| OperandRef::from_const(bx, c))
.unwrap_or_else(|err| {
match constant.literal {
mir::Literal::Promoted { .. } => {
'-' | ':' => result.push('.'),
// These are legal symbols
- 'a'...'z' | 'A'...'Z' | '0'...'9' | '_' | '.' | '$' => result.push(c),
+ 'a'..='z' | 'A'..='Z' | '0'..='9' | '_' | '.' | '$' => result.push(c),
_ => {
result.push('$');
obligation: O,
state: Cell<NodeState>,
- /// Obligations that depend on this obligation for their
- /// completion. They must all be in a non-pending state.
- dependents: Vec<NodeIndex>,
/// The parent of a node - the original obligation of
/// which it is a subobligation. Except for error reporting,
- /// this is just another member of `dependents`.
+ /// it is just like any member of `dependents`.
parent: Option<NodeIndex>,
+
+ /// Obligations that depend on this obligation for their
+ /// completion. They must all be in a non-pending state.
+ dependents: Vec<NodeIndex>,
}
/// The state of one node in some tree within the forest. This
Entry::Occupied(o) => {
debug!("register_obligation_at({:?}, {:?}) - duplicate of {:?}!",
obligation, parent, o.get());
+ let node = &mut self.nodes[o.get().get()];
if let Some(parent) = parent {
- if self.nodes[o.get().get()].dependents.contains(&parent) {
- debug!("register_obligation_at({:?}, {:?}) - duplicate subobligation",
- obligation, parent);
- } else {
- self.nodes[o.get().get()].dependents.push(parent);
+ // If the node is already in `waiting_cache`, it's already
+ // been marked with a parent. (It's possible that parent
+ // has been cleared by `apply_rewrites`, though.) So just
+ // dump `parent` into `node.dependents`... unless it's
+ // already in `node.dependents` or `node.parent`.
+ if !node.dependents.contains(&parent) && Some(parent) != node.parent {
+ node.dependents.push(parent);
}
}
- if let NodeState::Error = self.nodes[o.get().get()].state.get() {
+ if let NodeState::Error = node.state.get() {
Err(())
} else {
Ok(())
NodeState::Success => {
node.state.set(NodeState::OnDfsStack);
stack.push(index);
- if let Some(parent) = node.parent {
- self.find_cycles_from_node(stack, processor, parent.get());
- }
- for dependent in &node.dependents {
+ for dependent in node.parent.iter().chain(node.dependents.iter()) {
self.find_cycles_from_node(stack, processor, dependent.get());
}
stack.pop();
}
error_stack.extend(
- node.dependents.iter().cloned().chain(node.parent).map(|x| x.get())
+ node.parent.iter().chain(node.dependents.iter()).map(|x| x.get())
);
}
#[inline]
fn mark_neighbors_as_waiting_from(&self, node: &Node<O>) {
- if let Some(parent) = node.parent {
- self.mark_as_waiting_from(&self.nodes[parent.get()]);
- }
-
- for dependent in &node.dependents {
+ for dependent in node.parent.iter().chain(node.dependents.iter()) {
self.mark_as_waiting_from(&self.nodes[dependent.get()]);
}
}
fn new(parent: Option<NodeIndex>, obligation: O) -> Node<O> {
Node {
obligation,
- parent,
state: Cell::new(NodeState::Pending),
+ parent,
dependents: vec![],
}
}
});
}
+ // Expand global allocators, which are treated as an in-tree proc macro
krate = time(sess, "creating allocators", || {
- allocator::expand::modify(&sess.parse_sess, &mut resolver, krate, sess.diagnostic())
+ allocator::expand::modify(
+ &sess.parse_sess,
+ &mut resolver,
+ krate,
+ crate_name.to_string(),
+ sess.diagnostic(),
+ )
});
+ // Done with macro expansion!
+
after_expand(&krate)?;
if sess.opts.debugging_opts.input_stats {
let mut cfgs = Vec::new();
for &(name, ref value) in sess.parse_sess.config.iter() {
let gated_cfg = GatedCfg::gate(&ast::MetaItem {
- ident: ast::Path::from_ident(name.to_ident()),
+ ident: ast::Path::from_ident(ast::Ident::with_empty_ctxt(name)),
node: ast::MetaItemKind::Word,
span: DUMMY_SP,
});
use std::env;
use rustc::session::config::nightly_options;
+use rustc_serialize::opaque::Encoder;
/// The first few bytes of files generated by incremental compilation
const FILE_MAGIC: &'static [u8] = b"RSIC";
/// the git commit hash.
const RUSTC_VERSION: Option<&'static str> = option_env!("CFG_VERSION");
-pub fn write_file_header<W: io::Write>(stream: &mut W) -> io::Result<()> {
- stream.write_all(FILE_MAGIC)?;
- stream.write_all(&[(HEADER_FORMAT_VERSION >> 0) as u8,
- (HEADER_FORMAT_VERSION >> 8) as u8])?;
+pub fn write_file_header(stream: &mut Encoder) {
+ stream.emit_raw_bytes(FILE_MAGIC);
+ stream.emit_raw_bytes(&[(HEADER_FORMAT_VERSION >> 0) as u8,
+ (HEADER_FORMAT_VERSION >> 8) as u8]);
let rustc_version = rustc_version();
assert_eq!(rustc_version.len(), (rustc_version.len() as u8) as usize);
- stream.write_all(&[rustc_version.len() as u8])?;
- stream.write_all(rustc_version.as_bytes())?;
-
- Ok(())
+ stream.emit_raw_bytes(&[rustc_version.len() as u8]);
+ stream.emit_raw_bytes(rustc_version.as_bytes());
}
/// Reads the contents of a file with a file header as defined in this module.
use rustc_data_structures::sync::join;
use rustc_serialize::Encodable as RustcEncodable;
use rustc_serialize::opaque::Encoder;
-use std::io::{self, Cursor};
use std::fs;
use std::path::PathBuf;
}
fn save_in<F>(sess: &Session, path_buf: PathBuf, encode: F)
- where F: FnOnce(&mut Encoder) -> io::Result<()>
+ where F: FnOnce(&mut Encoder)
{
debug!("save: storing data in {}", path_buf.display());
}
// generate the data in a memory buffer
- let mut wr = Cursor::new(Vec::new());
- file_format::write_file_header(&mut wr).unwrap();
- match encode(&mut Encoder::new(&mut wr)) {
- Ok(()) => {}
- Err(err) => {
- sess.err(&format!("could not encode dep-graph to `{}`: {}",
- path_buf.display(),
- err));
- return;
- }
- }
+ let mut encoder = Encoder::new(Vec::new());
+ file_format::write_file_header(&mut encoder);
+ encode(&mut encoder);
// write the data out
- let data = wr.into_inner();
+ let data = encoder.into_inner();
match fs::write(&path_buf, data) {
Ok(_) => {
debug!("save: data written to disk successfully");
}
fn encode_dep_graph(tcx: TyCtxt,
- encoder: &mut Encoder)
- -> io::Result<()> {
+ encoder: &mut Encoder) {
// First encode the commandline arguments hash
- tcx.sess.opts.dep_tracking_hash().encode(encoder)?;
+ tcx.sess.opts.dep_tracking_hash().encode(encoder).unwrap();
// Encode the graph data.
let serialized_graph = time(tcx.sess, "getting serialized graph", || {
}
time(tcx.sess, "encoding serialized graph", || {
- serialized_graph.encode(encoder)
- })?;
-
- Ok(())
+ serialized_graph.encode(encoder).unwrap();
+ });
}
fn encode_work_product_index(work_products: &FxHashMap<WorkProductId, WorkProduct>,
- encoder: &mut Encoder) -> io::Result<()> {
+ encoder: &mut Encoder) {
let serialized_products: Vec<_> = work_products
.iter()
.map(|(id, work_product)| {
})
.collect();
- serialized_products.encode(encoder)
+ serialized_products.encode(encoder).unwrap();
}
fn encode_query_cache(tcx: TyCtxt,
- encoder: &mut Encoder)
- -> io::Result<()> {
+ encoder: &mut Encoder) {
time(tcx.sess, "serialize query result cache", || {
- tcx.serialize_query_result_cache(encoder)
+ tcx.serialize_query_result_cache(encoder).unwrap();
})
}
GenericParamKind::Lifetime { .. } => {}
GenericParamKind::Type { synthetic, .. } => {
if synthetic.is_none() {
- self.check_case(cx, "type parameter", param.name.name(), param.span);
+ self.check_case(cx, "type parameter", param.name.ident().name, param.span);
}
}
}
fn check_generic_param(&mut self, cx: &LateContext, param: &hir::GenericParam) {
match param.kind {
GenericParamKind::Lifetime { .. } => {
- let name = param.name.name().as_str();
+ let name = param.name.ident().as_str();
self.check_snake_case(cx, "lifetime", &name, Some(param.span));
}
GenericParamKind::Type { .. } => {}
}
fn check_trait_item(&mut self, cx: &LateContext, item: &hir::TraitItem) {
- if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(ref names)) = item.node {
+ if let hir::TraitItemKind::Method(_, hir::TraitMethod::Required(ref pnames)) = item.node {
self.check_snake_case(cx,
"trait method",
- &item.name.as_str(),
+ &item.ident.as_str(),
Some(item.span));
- for name in names {
- self.check_snake_case(cx, "variable", &name.node.as_str(), Some(name.span));
+ for param_name in pnames {
+ self.check_snake_case(cx, "variable", ¶m_name.as_str(), Some(param_name.span));
}
}
}
fn check_pat(&mut self, cx: &LateContext, p: &hir::Pat) {
- if let &PatKind::Binding(_, _, ref path1, _) = &p.node {
- self.check_snake_case(cx, "variable", &path1.node.as_str(), Some(p.span));
+ if let &PatKind::Binding(_, _, ref ident, _) = &p.node {
+ self.check_snake_case(cx, "variable", &ident.as_str(), Some(p.span));
}
}
fn check_trait_item(&mut self, cx: &LateContext, ti: &hir::TraitItem) {
match ti.node {
hir::TraitItemKind::Const(..) => {
- NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ti.name, ti.span);
+ NonUpperCaseGlobals::check_upper_case(cx, "associated constant",
+ ti.ident.name, ti.span);
}
_ => {}
}
fn check_impl_item(&mut self, cx: &LateContext, ii: &hir::ImplItem) {
match ii.node {
hir::ImplItemKind::Const(..) => {
- NonUpperCaseGlobals::check_upper_case(cx, "associated constant", ii.name, ii.span);
+ NonUpperCaseGlobals::check_upper_case(cx, "associated constant",
+ ii.ident.name, ii.span);
}
_ => {}
}
if path.segments.len() == 1 {
NonUpperCaseGlobals::check_upper_case(cx,
"constant in pattern",
- path.segments[0].name,
+ path.segments[0].ident.name,
path.span);
}
}
use syntax::ast;
use syntax::attr;
+use syntax::codemap::Spanned;
use syntax::edition::Edition;
use syntax::feature_gate::{AttributeGate, AttributeType, Stability, deprecated_attributes};
use syntax_pos::{BytePos, Span, SyntaxContext};
// (Issue #49588)
continue;
}
- if let PatKind::Binding(_, _, name, None) = fieldpat.node.pat.node {
- let binding_ident = ast::Ident::new(name.node, name.span);
- if cx.tcx.find_field_index(binding_ident, &variant) ==
+ if let PatKind::Binding(_, _, ident, None) = fieldpat.node.pat.node {
+ if cx.tcx.find_field_index(ident, &variant) ==
Some(cx.tcx.field_index(fieldpat.node.id, cx.tables)) {
let mut err = cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS,
fieldpat.span,
- &format!("the `{}:` in this pattern is redundant",
- name.node));
+ &format!("the `{}:` in this pattern is redundant", ident));
let subspan = cx.tcx.sess.codemap().span_through_char(fieldpat.span, ':');
- err.span_suggestion_short(subspan,
- "remove this",
- format!("{}", name.node));
+ err.span_suggestion_short(subspan, "remove this", format!("{}", ident));
err.emit();
}
}
let container = ty::ImplContainer(vtable_impl.impl_def_id);
// It matches if it comes from the same impl,
// and has the same method name.
- container == method.container && callee_item.name == method.name
+ container == method.container &&
+ callee_item.ident.name == method.ident.name
}
// There's no way to know if this call is
}
}
+
/// Does nothing as a lint pass, but registers some `Lint`s
/// which are used by other parts of the compiler.
#[derive(Copy, Clone)]
)
}
}
+
+
+declare_lint! {
+ pub ELLIPSIS_INCLUSIVE_RANGE_PATTERNS,
+ Allow,
+ "`...` range patterns are deprecated"
+}
+
+
+pub struct EllipsisInclusiveRangePatterns;
+
+impl LintPass for EllipsisInclusiveRangePatterns {
+ fn get_lints(&self) -> LintArray {
+ lint_array!(ELLIPSIS_INCLUSIVE_RANGE_PATTERNS)
+ }
+}
+
+impl EarlyLintPass for EllipsisInclusiveRangePatterns {
+ fn check_pat(&mut self, cx: &EarlyContext, pat: &ast::Pat) {
+ use self::ast::{PatKind, RangeEnd, RangeSyntax};
+
+ if let PatKind::Range(
+ _, _, Spanned { span, node: RangeEnd::Included(RangeSyntax::DotDotDot) }
+ ) = pat.node {
+ let msg = "`...` range patterns are deprecated";
+ let mut err = cx.struct_span_lint(ELLIPSIS_INCLUSIVE_RANGE_PATTERNS, span, msg);
+ err.span_suggestion_short_with_applicability(
+ span, "use `..=` for an inclusive range", "..=".to_owned(),
+ // FIXME: outstanding problem with precedence in ref patterns:
+ // https://github.com/rust-lang/rust/issues/51043#issuecomment-392252285
+ Applicability::MaybeIncorrect
+ );
+ err.emit()
+ }
+ }
+}
extern crate syntax_pos;
use rustc::lint;
+use rustc::lint::{LateContext, LateLintPass, LintPass, LintArray};
use rustc::lint::builtin::{BARE_TRAIT_OBJECTS, ABSOLUTE_PATHS_NOT_STARTING_WITH_CRATE};
use rustc::session;
use rustc::util;
+use rustc::hir;
+
+use syntax::ast;
+use syntax_pos::Span;
use session::Session;
use syntax::edition::Edition;
/// defined in this crate and the ones defined in
/// `rustc::lint::builtin`).
pub fn register_builtins(store: &mut lint::LintStore, sess: Option<&Session>) {
- macro_rules! add_builtin {
- ($sess:ident, $($name:ident),*,) => (
- {$(
- store.register_late_pass($sess, false, box $name);
- )*}
- )
- }
-
macro_rules! add_early_builtin {
($sess:ident, $($name:ident),*,) => (
{$(
)
}
- macro_rules! add_builtin_with_new {
- ($sess:ident, $($name:ident),*,) => (
- {$(
- store.register_late_pass($sess, false, box $name::new());
- )*}
- )
- }
-
macro_rules! add_early_builtin_with_new {
($sess:ident, $($name:ident),*,) => (
{$(
AnonymousParameters,
UnusedDocComment,
BadRepr,
+ EllipsisInclusiveRangePatterns,
);
add_early_builtin_with_new!(sess,
DeprecatedAttr,
);
- add_builtin!(sess,
- HardwiredLints,
- WhileTrue,
- ImproperCTypes,
- VariantSizeDifferences,
- BoxPointers,
- UnusedAttributes,
- PathStatements,
- UnusedResults,
- NonCamelCaseTypes,
- NonSnakeCase,
- NonUpperCaseGlobals,
- NonShorthandFieldPatterns,
- UnsafeCode,
- UnusedAllocation,
- MissingCopyImplementations,
- UnstableFeatures,
- UnconditionalRecursion,
- InvalidNoMangleItems,
- PluginAsLibrary,
- MutableTransmutes,
- UnionsWithDropFields,
- UnreachablePub,
- TypeAliasBounds,
- UnusedBrokenConst,
- TrivialConstraints,
- );
+ late_lint_methods!(declare_combined_late_lint_pass, [BuiltinCombinedLateLintPass, [
+ HardwiredLints: HardwiredLints,
+ WhileTrue: WhileTrue,
+ ImproperCTypes: ImproperCTypes,
+ VariantSizeDifferences: VariantSizeDifferences,
+ BoxPointers: BoxPointers,
+ UnusedAttributes: UnusedAttributes,
+ PathStatements: PathStatements,
+ UnusedResults: UnusedResults,
+ NonCamelCaseTypes: NonCamelCaseTypes,
+ NonSnakeCase: NonSnakeCase,
+ NonUpperCaseGlobals: NonUpperCaseGlobals,
+ NonShorthandFieldPatterns: NonShorthandFieldPatterns,
+ UnsafeCode: UnsafeCode,
+ UnusedAllocation: UnusedAllocation,
+ MissingCopyImplementations: MissingCopyImplementations,
+ UnstableFeatures: UnstableFeatures,
+ UnconditionalRecursion: UnconditionalRecursion,
+ InvalidNoMangleItems: InvalidNoMangleItems,
+ PluginAsLibrary: PluginAsLibrary,
+ MutableTransmutes: MutableTransmutes,
+ UnionsWithDropFields: UnionsWithDropFields,
+ UnreachablePub: UnreachablePub,
+ TypeAliasBounds: TypeAliasBounds,
+ UnusedBrokenConst: UnusedBrokenConst,
+ TrivialConstraints: TrivialConstraints,
+ TypeLimits: TypeLimits::new(),
+ MissingDoc: MissingDoc::new(),
+ MissingDebugImplementations: MissingDebugImplementations::new(),
+ ]], ['tcx]);
- add_builtin_with_new!(sess,
- TypeLimits,
- MissingDoc,
- MissingDebugImplementations,
- );
+ store.register_late_pass(sess, false, box BuiltinCombinedLateLintPass::new());
add_lint_group!(sess,
"bad_style",
"rust_2018_idioms",
BARE_TRAIT_OBJECTS,
UNREACHABLE_PUB,
- UNUSED_EXTERN_CRATES);
+ UNUSED_EXTERN_CRATES,
+ ELLIPSIS_INCLUSIVE_RANGE_PATTERNS);
// Guidelines for creating a future incompatibility lint:
//
reference: "issue TBD",
edition: Some(Edition::Edition2018),
},
+ FutureIncompatibleInfo {
+ id: LintId::of(WHERE_CLAUSES_OBJECT_SAFETY),
+ reference: "issue #51443 <https://github.com/rust-lang/rust/issues/51443>",
+ edition: None,
+ },
FutureIncompatibleInfo {
id: LintId::of(DUPLICATE_ASSOCIATED_TYPE_BINDINGS),
reference: "issue #50589 <https://github.com/rust-lang/rust/issues/50589>",
};
ty::AssociatedItem {
- name: name.as_symbol(),
+ ident: Ident::from_interned_str(name),
kind,
vis: item.visibility.decode(self),
defaultness: container.defaultness(),
src_hash,
start_pos,
end_pos,
- lines,
- multibyte_chars,
- non_narrow_chars,
+ mut lines,
+ mut multibyte_chars,
+ mut non_narrow_chars,
name_hash,
.. } = filemap_to_import;
// `CodeMap::new_imported_filemap()` will then translate those
// coordinates to their new global frame of reference when the
// offset of the FileMap is known.
- let mut lines = lines.into_inner();
for pos in &mut lines {
*pos = *pos - start_pos;
}
- let mut multibyte_chars = multibyte_chars.into_inner();
for mbc in &mut multibyte_chars {
mbc.pos = mbc.pos - start_pos;
}
- let mut non_narrow_chars = non_narrow_chars.into_inner();
for swc in &mut non_narrow_chars {
*swc = *swc - start_pos;
}
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
use std::hash::Hash;
-use std::io::prelude::*;
-use std::io::Cursor;
use std::path::Path;
use rustc_data_structures::sync::Lrc;
use std::u32;
use syntax::ast::{self, CRATE_NODE_ID};
-use syntax::codemap::Spanned;
use syntax::attr;
-use syntax::symbol::Symbol;
+use syntax::symbol::keywords;
use syntax_pos::{self, hygiene, FileName, FileMap, Span, DUMMY_SP};
use rustc::hir::{self, PatKind};
use rustc::hir::intravisit;
pub struct EncodeContext<'a, 'tcx: 'a> {
- opaque: opaque::Encoder<'a>,
+ opaque: opaque::Encoder,
pub tcx: TyCtxt<'a, 'tcx, 'tcx>,
link_meta: &'a LinkMeta,
}
impl<'a, 'tcx> Encoder for EncodeContext<'a, 'tcx> {
- type Error = <opaque::Encoder<'a> as Encoder>::Error;
+ type Error = <opaque::Encoder as Encoder>::Error;
fn emit_nil(&mut self) -> Result<(), Self::Error> {
Ok(())
// Index the items
i = self.position();
- let index = items.write_index(&mut self.opaque.cursor);
+ let index = items.write_index(&mut self.opaque);
let index_bytes = self.position() - i;
let attrs = tcx.hir.krate_attrs();
if self.tcx.sess.meta_stats() {
let mut zero_bytes = 0;
- for e in self.opaque.cursor.get_ref() {
+ for e in self.opaque.data.iter() {
if *e == 0 {
zero_bytes += 1;
}
let body = self.tcx.hir.body(body_id);
self.lazy_seq(body.arguments.iter().map(|arg| {
match arg.pat.node {
- PatKind::Binding(_, _, name, _) => name.node,
- _ => Symbol::intern("")
+ PatKind::Binding(_, _, ident, _) => ident.name,
+ _ => keywords::Invalid.name(),
}
}))
})
}
- fn encode_fn_arg_names(&mut self, names: &[Spanned<ast::Name>])
- -> LazySeq<ast::Name> {
- self.lazy_seq(names.iter().map(|name| name.node))
+ fn encode_fn_arg_names(&mut self, param_names: &[ast::Ident]) -> LazySeq<ast::Name> {
+ self.lazy_seq(param_names.iter().map(|ident| ident.name))
}
fn encode_optimized_mir(&mut self, def_id: DefId) -> Option<Lazy<mir::Mir<'tcx>>> {
link_meta: &LinkMeta)
-> EncodedMetadata
{
- let mut cursor = Cursor::new(vec![]);
- cursor.write_all(METADATA_HEADER).unwrap();
+ let mut encoder = opaque::Encoder::new(vec![]);
+ encoder.emit_raw_bytes(METADATA_HEADER);
// Will be filled with the root position after encoding everything.
- cursor.write_all(&[0, 0, 0, 0]).unwrap();
+ encoder.emit_raw_bytes(&[0, 0, 0, 0]);
- let root = {
+ let (root, mut result) = {
let mut ecx = EncodeContext {
- opaque: opaque::Encoder::new(&mut cursor),
+ opaque: encoder,
tcx,
link_meta,
lazy_state: LazyState::NoNode,
// Encode all the entries and extra information in the crate,
// culminating in the `CrateRoot` which points to all of it.
- ecx.encode_crate_root()
+ let root = ecx.encode_crate_root();
+ (root, ecx.opaque.into_inner())
};
- let mut result = cursor.into_inner();
// Encode the root position.
let header = METADATA_HEADER.len();
use schema::*;
use rustc::hir::def_id::{DefId, DefIndex, DefIndexAddressSpace};
-use std::io::{Cursor, Write};
+use rustc_serialize::opaque::Encoder;
use std::slice;
use std::u32;
self.positions[space_index][array_index] = position.to_le();
}
- pub fn write_index(&self, buf: &mut Cursor<Vec<u8>>) -> LazySeq<Index> {
+ pub fn write_index(&self, buf: &mut Encoder) -> LazySeq<Index> {
let pos = buf.position();
// First we write the length of the lower range ...
- buf.write_all(words_to_bytes(&[(self.positions[0].len() as u32).to_le()])).unwrap();
+ buf.emit_raw_bytes(words_to_bytes(&[(self.positions[0].len() as u32).to_le()]));
// ... then the values in the lower range ...
- buf.write_all(words_to_bytes(&self.positions[0][..])).unwrap();
+ buf.emit_raw_bytes(words_to_bytes(&self.positions[0][..]));
// ... then the values in the higher range.
- buf.write_all(words_to_bytes(&self.positions[1][..])).unwrap();
+ buf.emit_raw_bytes(words_to_bytes(&self.positions[1][..]));
LazySeq::with_position_and_length(pos as usize,
self.positions[0].len() + self.positions[1].len() + 1)
}
#![feature(box_patterns)]
#![feature(fs_read_write)]
#![feature(libc)]
+#![feature(macro_at_most_once_rep)]
#![cfg_attr(stage0, feature(macro_lifetime_matcher))]
#![feature(proc_macro_internals)]
#![feature(quote)]
self.moved_error_reported.insert(root_place.clone());
- let item_msg = match self.describe_place(place) {
+ let item_msg = match self.describe_place_with_options(place, IncludingDowncast(true)) {
Some(name) => format!("`{}`", name),
None => "value".to_owned(),
};
.cannot_act_on_uninitialized_variable(
span,
desired_action.as_noun(),
- &self.describe_place(place).unwrap_or("_".to_owned()),
+ &self
+ .describe_place_with_options(place, IncludingDowncast(true))
+ .unwrap_or("_".to_owned()),
Origin::Mir,
)
.span_label(span, format!("use of possibly uninitialized {}", item_msg))
span,
desired_action.as_noun(),
msg,
- &self.describe_place(place).unwrap_or("_".to_owned()),
+ self.describe_place_with_options(&place, IncludingDowncast(true)),
Origin::Mir,
);
let mut is_loop_move = false;
- for moi in mois {
+ for moi in &mois {
let move_msg = ""; //FIXME: add " (into closure)"
- let move_span = self.mir.source_info(self.move_data.moves[*moi].source).span;
+ let move_span = self
+ .mir
+ .source_info(self.move_data.moves[**moi].source)
+ .span;
if span == move_span {
err.span_label(
span,
};
if needs_note {
- let note_msg = match self.describe_place(place) {
- Some(name) => format!("`{}`", name),
- None => "value".to_owned(),
- };
+ let mpi = self.move_data.moves[*mois[0]].path;
+ let place = &self.move_data.move_paths[mpi].place;
+
+ if let Some(ty) = self.retrieve_type_for_place(place) {
+ let note_msg = match self
+ .describe_place_with_options(place, IncludingDowncast(true))
+ {
+ Some(name) => format!("`{}`", name),
+ None => "value".to_owned(),
+ };
- err.note(&format!(
- "move occurs because {} has type `{}`, \
- which does not implement the `Copy` trait",
- note_msg, ty
- ));
+ err.note(&format!(
+ "move occurs because {} has type `{}`, \
+ which does not implement the `Copy` trait",
+ note_msg, ty
+ ));
+ }
}
}
let local_decl = &self.mir.local_decls[*local];
if let Some(name) = local_decl.name {
if local_decl.can_be_made_mutable() {
- err.span_label(local_decl.source_info.span,
- format!("consider changing this to `mut {}`", name));
+ err.span_label(
+ local_decl.source_info.span,
+ format!("consider changing this to `mut {}`", name),
+ );
}
}
}
}
}
+pub(super) struct IncludingDowncast(bool);
+
impl<'cx, 'gcx, 'tcx> MirBorrowckCtxt<'cx, 'gcx, 'tcx> {
// End-user visible description of `place` if one can be found. If the
// place is a temporary for instance, None will be returned.
pub(super) fn describe_place(&self, place: &Place<'tcx>) -> Option<String> {
+ self.describe_place_with_options(place, IncludingDowncast(false))
+ }
+
+ // End-user visible description of `place` if one can be found. If the
+ // place is a temporary for instance, None will be returned.
+ // `IncludingDowncast` parameter makes the function return `Err` if `ProjectionElem` is
+ // `Downcast` and `IncludingDowncast` is true
+ pub(super) fn describe_place_with_options(
+ &self,
+ place: &Place<'tcx>,
+ including_downcast: IncludingDowncast,
+ ) -> Option<String> {
let mut buf = String::new();
- match self.append_place_to_string(place, &mut buf, false) {
+ match self.append_place_to_string(place, &mut buf, false, &including_downcast) {
Ok(()) => Some(buf),
Err(()) => None,
}
place: &Place<'tcx>,
buf: &mut String,
mut autoderef: bool,
+ including_downcast: &IncludingDowncast,
) -> Result<(), ()> {
match *place {
Place::Local(local) => {
}
} else {
if autoderef {
- self.append_place_to_string(&proj.base, buf, autoderef)?;
+ self.append_place_to_string(
+ &proj.base,
+ buf,
+ autoderef,
+ &including_downcast,
+ )?;
} else {
buf.push_str(&"*");
- self.append_place_to_string(&proj.base, buf, autoderef)?;
+ self.append_place_to_string(
+ &proj.base,
+ buf,
+ autoderef,
+ &including_downcast,
+ )?;
}
}
}
ProjectionElem::Downcast(..) => {
- self.append_place_to_string(&proj.base, buf, autoderef)?;
+ self.append_place_to_string(
+ &proj.base,
+ buf,
+ autoderef,
+ &including_downcast,
+ )?;
+ if including_downcast.0 {
+ return Err(());
+ }
}
ProjectionElem::Field(field, _ty) => {
autoderef = true;
buf.push_str(&name);
} else {
let field_name = self.describe_field(&proj.base, field);
- self.append_place_to_string(&proj.base, buf, autoderef)?;
+ self.append_place_to_string(
+ &proj.base,
+ buf,
+ autoderef,
+ &including_downcast,
+ )?;
buf.push_str(&format!(".{}", field_name));
}
}
ProjectionElem::Index(index) => {
autoderef = true;
- self.append_place_to_string(&proj.base, buf, autoderef)?;
+ self.append_place_to_string(
+ &proj.base,
+ buf,
+ autoderef,
+ &including_downcast,
+ )?;
buf.push_str("[");
if let Err(_) = self.append_local_to_string(index, buf) {
buf.push_str("..");
// Since it isn't possible to borrow an element on a particular index and
// then use another while the borrow is held, don't output indices details
// to avoid confusing the end-user
- self.append_place_to_string(&proj.base, buf, autoderef)?;
+ self.append_place_to_string(
+ &proj.base,
+ buf,
+ autoderef,
+ &including_downcast,
+ )?;
buf.push_str(&"[..]");
}
};
use rustc::hir::def_id::DefId;
use rustc::hir::map::definitions::DefPathData;
use rustc::infer::InferCtxt;
-use rustc::ty::{self, ParamEnv, TyCtxt};
-use rustc::ty::query::Providers;
use rustc::lint::builtin::UNUSED_MUT;
use rustc::mir::{self, AggregateKind, BasicBlock, BorrowCheckResult, BorrowKind};
-use rustc::mir::{ClearCrossCrate, Local, Location, Place, Mir, Mutability, Operand};
-use rustc::mir::{Projection, ProjectionElem, Rvalue, Field, Statement, StatementKind};
+use rustc::mir::{ClearCrossCrate, Local, Location, Mir, Mutability, Operand, Place};
+use rustc::mir::{Field, Projection, ProjectionElem, Rvalue, Statement, StatementKind};
use rustc::mir::{Terminator, TerminatorKind};
+use rustc::ty::query::Providers;
+use rustc::ty::{self, ParamEnv, TyCtxt};
use rustc_data_structures::control_flow_graph::dominators::Dominators;
use rustc_data_structures::fx::FxHashSet;
use syntax_pos::Span;
-use dataflow::{do_dataflow, DebugFormatted};
+use dataflow::indexes::BorrowIndex;
+use dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex};
+use dataflow::move_paths::{IllegalMoveOriginKind, MoveError};
+use dataflow::Borrows;
+use dataflow::DataflowResultsConsumer;
use dataflow::FlowAtLocation;
use dataflow::MoveDataParamEnv;
-use dataflow::{DataflowResultsConsumer};
-use dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
+use dataflow::{do_dataflow, DebugFormatted};
use dataflow::{EverInitializedPlaces, MovingOutStatements};
-use dataflow::Borrows;
-use dataflow::indexes::BorrowIndex;
-use dataflow::move_paths::{IllegalMoveOriginKind, MoveError};
-use dataflow::move_paths::{HasMoveData, LookupResult, MoveData, MovePathIndex};
+use dataflow::{MaybeInitializedPlaces, MaybeUninitializedPlaces};
use util::borrowck_errors::{BorrowckErrors, Origin};
use util::collect_writes::FindAssignments;
-use self::borrow_set::{BorrowSet, BorrowData};
+use self::borrow_set::{BorrowData, BorrowSet};
use self::flows::Flows;
use self::location::LocationTable;
use self::prefixes::PrefixSet;
mod error_reporting;
mod flows;
mod location;
+mod path_utils;
crate mod place_ext;
mod prefixes;
-mod path_utils;
mod used_muts;
pub(crate) mod nll;
};
}
-fn mir_borrowck<'a, 'tcx>(
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- def_id: DefId,
-) -> BorrowCheckResult<'tcx> {
+fn mir_borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> BorrowCheckResult<'tcx> {
let input_mir = tcx.mir_validated(def_id);
debug!("run query mir_borrowck: {}", tcx.item_path_str(def_id));
- if !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck() {
+ let mut return_early;
+
+ // Return early if we are not supposed to use MIR borrow checker for this function.
+ return_early = !tcx.has_attr(def_id, "rustc_mir_borrowck") && !tcx.use_mir_borrowck();
+
+ if tcx.is_struct_constructor(def_id) {
+ // We are not borrow checking the automatically generated struct constructors
+ // because we want to accept structs such as this (taken from the `linked-hash-map`
+ // crate):
+ // ```rust
+ // struct Qey<Q: ?Sized>(Q);
+ // ```
+ // MIR of this struct constructor looks something like this:
+ // ```rust
+ // fn Qey(_1: Q) -> Qey<Q>{
+ // let mut _0: Qey<Q>; // return place
+ //
+ // bb0: {
+ // (_0.0: Q) = move _1; // bb0[0]: scope 0 at src/main.rs:1:1: 1:26
+ // return; // bb0[1]: scope 0 at src/main.rs:1:1: 1:26
+ // }
+ // }
+ // ```
+ // The problem here is that `(_0.0: Q) = move _1;` is valid only if `Q` is
+ // of statically known size, which is not known to be true because of the
+ // `Q: ?Sized` constraint. However, it is true because the constructor can be
+ // called only when `Q` is of statically known size.
+ return_early = true;
+ }
+
+ if return_early {
return BorrowCheckResult {
closure_requirements: None,
used_mut_upvars: SmallVec::new(),
let tcx = infcx.tcx;
let attributes = tcx.get_attrs(def_id);
let param_env = tcx.param_env(def_id);
- let id = tcx.hir
+ let id = tcx
+ .hir
.as_local_node_id(def_id)
.expect("do_mir_borrowck: non-local DefId");
// borrow to provide feedback about why this
// was a move rather than a copy.
match ty.sty {
- ty::TyArray(..) | ty::TySlice(..) =>
- tcx.cannot_move_out_of_interior_noncopy(span, ty, None, origin),
- _ => tcx.cannot_move_out_of(span, "borrowed content", origin)
+ ty::TyArray(..) | ty::TySlice(..) => {
+ tcx.cannot_move_out_of_interior_noncopy(span, ty, None, origin)
+ }
+ _ => tcx.cannot_move_out_of(span, "borrowed content", origin),
}
}
IllegalMoveOriginKind::InteriorOfTypeWithDestructor { container_ty: ty } => {
mir_def_id: def_id,
move_data: &mdpe.move_data,
param_env: param_env,
+ location_table,
movable_generator,
locals_are_invalidated_at_exit: match tcx.hir.body_owner_kind(id) {
hir::BodyOwnerKind::Const | hir::BodyOwnerKind::Static(_) => false,
// Note that this set is expected to be small - only upvars from closures
// would have a chance of erroneously adding non-user-defined mutable vars
// to the set.
- let temporary_used_locals: FxHashSet<Local> =
- mbcx.used_mut.iter()
- .filter(|&local| !mbcx.mir.local_decls[*local].is_user_variable.is_some())
- .cloned()
- .collect();
+ let temporary_used_locals: FxHashSet<Local> = mbcx
+ .used_mut
+ .iter()
+ .filter(|&local| !mbcx.mir.local_decls[*local].is_user_variable.is_some())
+ .cloned()
+ .collect();
mbcx.gather_used_muts(temporary_used_locals);
debug!("mbcx.used_mut: {:?}", mbcx.used_mut);
- for local in mbcx.mir.mut_vars_and_args_iter().filter(|local| !mbcx.used_mut.contains(local)) {
+ for local in mbcx
+ .mir
+ .mut_vars_and_args_iter()
+ .filter(|local| !mbcx.used_mut.contains(local))
+ {
if let ClearCrossCrate::Set(ref vsi) = mbcx.mir.source_scope_local_data {
let local_decl = &mbcx.mir.local_decls[local];
// Skip over locals that begin with an underscore or have no name
match local_decl.name {
- Some(name) => if name.as_str().starts_with("_") { continue; },
+ Some(name) => if name.as_str().starts_with("_") {
+ continue;
+ },
None => continue,
}
UNUSED_MUT,
vsi[local_decl.source_info.scope].lint_root,
span,
- "variable does not need to be mutable"
- )
- .span_suggestion_short(mut_span, "remove this `mut`", "".to_owned())
- .emit();
+ "variable does not need to be mutable",
+ ).span_suggestion_short(mut_span, "remove this `mut`", "".to_owned())
+ .emit();
}
}
mir: &'cx Mir<'tcx>,
mir_def_id: DefId,
move_data: &'cx MoveData<'tcx>,
+
+ /// Map from MIR `Location` to `LocationIndex`; created
+ /// when MIR borrowck begins.
+ location_table: &'cx LocationTable,
+
param_env: ParamEnv<'gcx>,
movable_generator: bool,
/// This keeps track of whether local variables are free-ed when the function
);
}
StatementKind::ReadForMatch(ref place) => {
- self.access_place(ContextKind::ReadForMatch.new(location),
- (place, span),
- (Deep, Read(ReadKind::Borrow(BorrowKind::Shared))),
- LocalMutationIsAllowed::No,
- flow_state,
- );
+ self.access_place(
+ ContextKind::ReadForMatch.new(location),
+ (place, span),
+ (Deep, Read(ReadKind::Borrow(BorrowKind::Shared))),
+ LocalMutationIsAllowed::No,
+ flow_state,
+ );
}
StatementKind::SetDiscriminant {
ref place,
// ignored when consuming results (update to
// flow_state already handled).
}
- StatementKind::Nop |
- StatementKind::UserAssertTy(..) |
- StatementKind::Validate(..) |
- StatementKind::StorageLive(..) => {
+ StatementKind::Nop
+ | StatementKind::UserAssertTy(..)
+ | StatementKind::Validate(..)
+ | StatementKind::StorageLive(..) => {
// `Nop`, `UserAssertTy`, `Validate`, and `StorageLive` are irrelevant
// to borrow check.
}
use rustc::mir::interpret::EvalErrorKind::BoundsCheck;
if let BoundsCheck { ref len, ref index } = *msg {
self.consume_operand(ContextKind::Assert.new(loc), (len, span), flow_state);
- self.consume_operand(
- ContextKind::Assert.new(loc),
- (index, span),
- flow_state,
- );
+ self.consume_operand(ContextKind::Assert.new(loc), (index, span), flow_state);
}
}
WriteAndRead,
}
-use self::ShallowOrDeep::{Deep, Shallow};
use self::ReadOrWrite::{Activation, Read, Reservation, Write};
+use self::ShallowOrDeep::{Deep, Shallow};
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum ArtificialField {
span: Span,
) {
let gcx = self.tcx.global_tcx();
- let drop_field = |
- mir: &mut MirBorrowckCtxt<'cx, 'gcx, 'tcx>,
- (index, field): (usize, ty::Ty<'gcx>),
- | {
+ let drop_field = |mir: &mut MirBorrowckCtxt<'cx, 'gcx, 'tcx>,
+ (index, field): (usize, ty::Ty<'gcx>)| {
let field_ty = gcx.normalize_erasing_regions(mir.param_env, field);
let place = drop_place.clone().field(Field::new(index), field_ty);
}
// Same as above, but for tuples.
ty::TyTuple(tys) => {
- tys.iter().cloned().enumerate()
+ tys.iter()
+ .cloned()
+ .enumerate()
.for_each(|field| drop_field(self, field));
}
// Closures also have disjoint fields, but they are only
// directly accessed in the body of the closure.
ty::TyClosure(def, substs)
- if *drop_place == Place::Local(Local::new(1)) && !self.mir.upvar_decls.is_empty()
- => {
- substs.upvar_tys(def, self.tcx).enumerate()
+ if *drop_place == Place::Local(Local::new(1))
+ && !self.mir.upvar_decls.is_empty() =>
+ {
+ substs
+ .upvar_tys(def, self.tcx)
+ .enumerate()
.for_each(|field| drop_field(self, field));
}
// Generators also have disjoint fields, but they are only
// directly accessed in the body of the generator.
ty::TyGenerator(def, substs, _)
- if *drop_place == Place::Local(Local::new(1)) && !self.mir.upvar_decls.is_empty()
- => {
- substs.upvar_tys(def, self.tcx).enumerate()
+ if *drop_place == Place::Local(Local::new(1))
+ && !self.mir.upvar_decls.is_empty() =>
+ {
+ substs
+ .upvar_tys(def, self.tcx)
+ .enumerate()
.for_each(|field| drop_field(self, field));
}
_ => {
}
}
- if self.access_place_error_reported
+ if self
+ .access_place_error_reported
.contains(&(place_span.0.clone(), place_span.1))
{
debug!(
) -> bool {
debug!(
"check_access_for_conflict(context={:?}, place_span={:?}, sd={:?}, rw={:?})",
- context,
- place_span,
- sd,
- rw,
+ context, place_span, sd, rw,
);
let mut error_reported = false;
let tcx = self.tcx;
let mir = self.mir;
- let location_table = &LocationTable::new(mir);
- let location = location_table.start_index(context.loc);
+ let location = self.location_table.start_index(context.loc);
let borrow_set = self.borrow_set.clone();
each_borrow_involving_path(
self,
(sd, place_span.0),
&borrow_set,
flow_state.borrows_in_scope(location),
- |this, borrow_index, borrow|
- match (rw, borrow.kind) {
+ |this, borrow_index, borrow| match (rw, borrow.kind) {
// Obviously an activation is compatible with its own
// reservation (or even prior activating uses of same
// borrow); so don't check if they interfere.
}
ReadKind::Borrow(bk) => {
error_reported = true;
- this.report_conflicting_borrow(
- context,
- place_span,
- bk,
- &borrow,
- )
+ this.report_conflicting_borrow(context, place_span, bk, &borrow)
}
}
Control::Break
match kind {
WriteKind::MutableBorrow(bk) => {
error_reported = true;
- this.report_conflicting_borrow(
- context,
- place_span,
- bk,
- &borrow,
- )
+ this.report_conflicting_borrow(context, place_span, bk, &borrow)
}
WriteKind::StorageDeadOrDrop => {
error_reported = true;
// moved into the closure and subsequently used by the closure,
// in order to populate our used_mut set.
if let AggregateKind::Closure(def_id, _) = &**aggregate_kind {
- let BorrowCheckResult { used_mut_upvars, .. } = self.tcx.mir_borrowck(*def_id);
+ let BorrowCheckResult {
+ used_mut_upvars, ..
+ } = self.tcx.mir_borrowck(*def_id);
debug!("{:?} used_mut_upvars={:?}", def_id, used_mut_upvars);
for field in used_mut_upvars {
match operands[field.index()] {
self.used_mut_upvars.push(field);
}
}
- Operand::Move(Place::Static(..)) |
- Operand::Copy(..) |
- Operand::Constant(..) => {}
+ Operand::Move(Place::Static(..))
+ | Operand::Copy(..)
+ | Operand::Constant(..) => {}
}
}
}
Place::Static(statik) => {
// Thread-locals might be dropped after the function exits, but
// "true" statics will never be.
- let is_thread_local = self.tcx
+ let is_thread_local = self
+ .tcx
.get_attrs(statik.def_id)
.iter()
.any(|attr| attr.check_name("thread_local"));
Reservation(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Unique))
| Reservation(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Mut { .. }))
| Write(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Unique))
- | Write(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Mut { .. })) =>
- {
+ | Write(WriteKind::MutableBorrow(borrow_kind @ BorrowKind::Mut { .. })) => {
let is_local_mutation_allowed = match borrow_kind {
BorrowKind::Unique => LocalMutationIsAllowed::Yes,
BorrowKind::Mut { .. } => is_local_mutation_allowed,
// `act` and `acted_on` are strings that let us abstract over
// the verbs used in some diagnostic messages.
- let act; let acted_on;
+ let act;
+ let acted_on;
match error_access {
AccessKind::Mutate => {
let item_msg = match the_place_err {
Place::Projection(box Projection {
base: _,
- elem: ProjectionElem::Deref }
- ) => match self.describe_place(place) {
- Some(description) =>
- format!("`{}` which is behind a `&` reference", description),
+ elem: ProjectionElem::Deref,
+ }) => match self.describe_place(place) {
+ Some(description) => {
+ format!("`{}` which is behind a `&` reference", description)
+ }
None => format!("data in a `&` reference"),
},
_ => item_msg,
};
err = self.tcx.cannot_assign(span, &item_msg, Origin::Mir);
- act = "assign"; acted_on = "written";
+ act = "assign";
+ acted_on = "written";
}
AccessKind::MutableBorrow => {
- err = self.tcx
+ err = self
+ .tcx
.cannot_borrow_path_as_mutable(span, &item_msg, Origin::Mir);
- act = "borrow as mutable"; acted_on = "borrowed as mutable";
+ act = "borrow as mutable";
+ acted_on = "borrowed as mutable";
}
}
let local_decl = &self.mir.local_decls[*local];
assert_eq!(local_decl.mutability, Mutability::Not);
- err.span_label(span, format!("cannot {ACT}", ACT=act));
- err.span_suggestion(local_decl.source_info.span,
- "consider changing this to be mutable",
- format!("mut {}", local_decl.name.unwrap()));
+ err.span_label(span, format!("cannot {ACT}", ACT = act));
+ err.span_suggestion(
+ local_decl.source_info.span,
+ "consider changing this to be mutable",
+ format!("mut {}", local_decl.name.unwrap()),
+ );
}
// complete hack to approximate old AST-borrowck
// diagnostic: if the span starts with a mutable borrow of
// a local variable, then just suggest the user remove it.
- Place::Local(_) if {
- if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {
- snippet.starts_with("&mut ")
- } else {
- false
- }
- } => {
- err.span_label(span, format!("cannot {ACT}", ACT=act));
+ Place::Local(_)
+ if {
+ if let Ok(snippet) = self.tcx.sess.codemap().span_to_snippet(span) {
+ snippet.starts_with("&mut ")
+ } else {
+ false
+ }
+ } =>
+ {
+ err.span_label(span, format!("cannot {ACT}", ACT = act));
err.span_label(span, "try removing `&mut` here");
}
//
// FIXME: can this case be generalized to work for an
// arbitrary base for the projection?
- Place::Projection(box Projection { base: Place::Local(local),
- elem: ProjectionElem::Deref })
- if self.mir.local_decls[*local].is_nonref_binding() =>
+ Place::Projection(box Projection {
+ base: Place::Local(local),
+ elem: ProjectionElem::Deref,
+ }) if self.mir.local_decls[*local].is_nonref_binding() =>
{
let (err_help_span, suggested_code) =
find_place_to_suggest_ampmut(self.tcx, self.mir, *local);
- err.span_suggestion(err_help_span,
- "consider changing this to be a mutable reference",
- suggested_code);
+ err.span_suggestion(
+ err_help_span,
+ "consider changing this to be a mutable reference",
+ suggested_code,
+ );
let local_decl = &self.mir.local_decls[*local];
if let Some(name) = local_decl.name {
err.span_label(
- span, format!("`{NAME}` is a `&` reference, \
- so the data it refers to cannot be {ACTED_ON}",
- NAME=name, ACTED_ON=acted_on));
+ span,
+ format!(
+ "`{NAME}` is a `&` reference, \
+ so the data it refers to cannot be {ACTED_ON}",
+ NAME = name,
+ ACTED_ON = acted_on
+ ),
+ );
} else {
- err.span_label(span, format!("cannot {ACT} through `&`-reference", ACT=act));
+ err.span_label(
+ span,
+ format!("cannot {ACT} through `&`-reference", ACT = act),
+ );
}
}
_ => {
- err.span_label(span, format!("cannot {ACT}", ACT=act));
+ err.span_label(span, format!("cannot {ACT}", ACT = act));
}
}
// for example, if the RHS is present and the Type is not, then the type is going to
// be inferred *from* the RHS, which means we should highlight that (and suggest
// that they borrow the RHS mutably).
- fn find_place_to_suggest_ampmut<'cx, 'gcx, 'tcx>(tcx: TyCtxt<'cx, 'gcx, 'tcx>,
- mir: &Mir<'tcx>,
- local: Local) -> (Span, String)
- {
+ fn find_place_to_suggest_ampmut<'cx, 'gcx, 'tcx>(
+ tcx: TyCtxt<'cx, 'gcx, 'tcx>,
+ mir: &Mir<'tcx>,
+ local: Local,
+ ) -> (Span, String) {
// This implementation attempts to emulate AST-borrowck prioritization
// by trying (3.), then (2.) and finally falling back on (1.).
let locations = mir.find_assignments(local);
// if this is a variable binding with an explicit type,
// try to highlight that for the suggestion.
Some(ClearCrossCrate::Set(mir::BindingForm::Var(mir::VarBindingForm {
- opt_ty_info: Some(ty_span), .. }))) => ty_span,
+ opt_ty_info: Some(ty_span),
+ ..
+ }))) => ty_span,
Some(ClearCrossCrate::Clear) => bug!("saw cleared local state"),
fn add_used_mut<'d>(
&mut self,
root_place: RootPlace<'d, 'tcx>,
- flow_state: &Flows<'cx, 'gcx, 'tcx>
+ flow_state: &Flows<'cx, 'gcx, 'tcx>,
) {
match root_place {
RootPlace {
let local = &self.mir.local_decls[local];
match local.mutability {
Mutability::Not => match is_local_mutation_allowed {
- LocalMutationIsAllowed::Yes => {
- Ok(RootPlace {
- place,
- is_local_mutation_allowed: LocalMutationIsAllowed::Yes
- })
- }
- LocalMutationIsAllowed::ExceptUpvars => {
- Ok(RootPlace {
- place,
- is_local_mutation_allowed: LocalMutationIsAllowed::ExceptUpvars
- })
- }
+ LocalMutationIsAllowed::Yes => Ok(RootPlace {
+ place,
+ is_local_mutation_allowed: LocalMutationIsAllowed::Yes,
+ }),
+ LocalMutationIsAllowed::ExceptUpvars => Ok(RootPlace {
+ place,
+ is_local_mutation_allowed: LocalMutationIsAllowed::ExceptUpvars,
+ }),
LocalMutationIsAllowed::No => Err(place),
},
- Mutability::Mut => Ok(RootPlace { place, is_local_mutation_allowed }),
+ Mutability::Mut => Ok(RootPlace {
+ place,
+ is_local_mutation_allowed,
+ }),
}
}
- Place::Static(ref static_) =>
+ Place::Static(ref static_) => {
if self.tcx.is_static(static_.def_id) != Some(hir::Mutability::MutMutable) {
Err(place)
} else {
- Ok(RootPlace { place, is_local_mutation_allowed })
- },
+ Ok(RootPlace {
+ place,
+ is_local_mutation_allowed,
+ })
+ }
+ }
Place::Projection(ref proj) => {
match proj.elem {
ProjectionElem::Deref => {
// `*mut` raw pointers are always mutable, regardless of
// context. The users have to check by themselves.
hir::MutMutable => {
- return Ok(RootPlace { place, is_local_mutation_allowed });
+ return Ok(RootPlace {
+ place,
+ is_local_mutation_allowed,
+ });
}
}
}
// }
// ```
let _ = self.is_mutable(&proj.base, is_local_mutation_allowed)?;
- Ok(RootPlace { place, is_local_mutation_allowed })
+ Ok(RootPlace {
+ place,
+ is_local_mutation_allowed,
+ })
}
}
} else {
match *place {
Place::Projection(ref proj) => match proj.elem {
ProjectionElem::Field(field, _ty) => {
- let is_projection_from_ty_closure = proj.base
+ let is_projection_from_ty_closure = proj
+ .base
.ty(self.mir, self.tcx)
.to_ty(self.tcx)
.is_closure();
}
}
}
-
use dataflow::indexes::BorrowIndex;
use polonius_engine::AllFacts as PoloniusAllFacts;
use polonius_engine::Atom;
-use rustc::ty::RegionVid;
+use rustc::ty::{RegionVid, TyCtxt};
use rustc_data_structures::indexed_vec::Idx;
use std::error::Error;
use std::fmt::Debug;
crate type AllFacts = PoloniusAllFacts<RegionVid, BorrowIndex, LocationIndex>;
crate trait AllFactsExt {
+ /// Returns true if there is a need to gather `AllFacts` given the
+ /// current `-Z` flags.
+ fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool;
+
fn write_to_dir(
&self,
dir: impl AsRef<Path>,
}
impl AllFactsExt for AllFacts {
+ /// Return
+ fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool {
+ tcx.sess.opts.debugging_opts.nll_facts
+ || tcx.sess.opts.debugging_opts.polonius
+ }
+
fn write_to_dir(
&self,
dir: impl AsRef<Path>,
Option<Rc<Output<RegionVid, BorrowIndex, LocationIndex>>>,
Option<ClosureRegionRequirements<'gcx>>,
) {
- let mut all_facts = if infcx.tcx.sess.opts.debugging_opts.nll_facts
- || infcx.tcx.sess.opts.debugging_opts.polonius
- {
+ let mut all_facts = if AllFacts::enabled(infcx.tcx) {
Some(AllFacts::default())
} else {
None
use super::universal_regions::UniversalRegions;
use borrow_check::nll::region_infer::values::ToElementIndex;
use rustc::hir::def_id::DefId;
+use rustc::infer::canonical::QueryRegionConstraint;
use rustc::infer::error_reporting::nice_region_error::NiceRegionError;
use rustc::infer::region_constraints::{GenericKind, VarInfos};
use rustc::infer::InferCtxt;
use rustc::infer::NLLRegionVariableOrigin;
-use rustc::infer::RegionObligation;
use rustc::infer::RegionVariableOrigin;
-use rustc::infer::SubregionOrigin;
use rustc::mir::{
ClosureOutlivesRequirement, ClosureOutlivesSubject, ClosureRegionRequirements, Local, Location,
Mir,
};
-use rustc::traits::ObligationCause;
-use rustc::ty::{self, RegionVid, Ty, TypeFoldable};
+use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
use rustc::util::common::{self, ErrorReported};
use rustc_data_structures::bitvec::BitVector;
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
use std::fmt;
use std::rc::Rc;
-use syntax::ast;
use syntax_pos::Span;
mod annotation;
pub trait ClosureRegionRequirementsExt<'gcx, 'tcx> {
fn apply_requirements(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- body_id: ast::NodeId,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
location: Location,
closure_def_id: DefId,
closure_substs: ty::ClosureSubsts<'tcx>,
- );
+ ) -> Vec<QueryRegionConstraint<'tcx>>;
fn subst_closure_mapping<T>(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
value: &T,
) -> T
/// requirements.
fn apply_requirements(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- body_id: ast::NodeId,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
location: Location,
closure_def_id: DefId,
closure_substs: ty::ClosureSubsts<'tcx>,
- ) {
- let tcx = infcx.tcx;
-
+ ) -> Vec<QueryRegionConstraint<'tcx>> {
debug!(
"apply_requirements(location={:?}, closure_def_id={:?}, closure_substs={:?})",
location, closure_def_id, closure_substs
// into a vector. These are the regions that we will be
// relating to one another.
let closure_mapping =
- &UniversalRegions::closure_mapping(infcx, user_closure_ty, self.num_external_vids);
+ &UniversalRegions::closure_mapping(tcx, user_closure_ty, self.num_external_vids);
debug!("apply_requirements: closure_mapping={:?}", closure_mapping);
// Create the predicates.
- for outlives_requirement in &self.outlives_requirements {
- let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
-
- // FIXME, this origin is not entirely suitable.
- let origin = SubregionOrigin::CallRcvr(outlives_requirement.blame_span);
-
- match outlives_requirement.subject {
- ClosureOutlivesSubject::Region(region) => {
- let region = closure_mapping[region];
- debug!(
- "apply_requirements: region={:?} \
- outlived_region={:?} \
- outlives_requirement={:?}",
- region, outlived_region, outlives_requirement,
- );
- infcx.sub_regions(origin, outlived_region, region);
- }
+ self.outlives_requirements
+ .iter()
+ .map(|outlives_requirement| {
+ let outlived_region = closure_mapping[outlives_requirement.outlived_free_region];
+
+ match outlives_requirement.subject {
+ ClosureOutlivesSubject::Region(region) => {
+ let region = closure_mapping[region];
+ debug!(
+ "apply_requirements: region={:?} \
+ outlived_region={:?} \
+ outlives_requirement={:?}",
+ region, outlived_region, outlives_requirement,
+ );
+ ty::Binder::dummy(ty::OutlivesPredicate(region.into(), outlived_region))
+ }
- ClosureOutlivesSubject::Ty(ty) => {
- let ty = self.subst_closure_mapping(infcx, closure_mapping, &ty);
- debug!(
- "apply_requirements: ty={:?} \
- outlived_region={:?} \
- outlives_requirement={:?}",
- ty, outlived_region, outlives_requirement,
- );
- infcx.register_region_obligation(
- body_id,
- RegionObligation {
- sup_type: ty,
- sub_region: outlived_region,
- cause: ObligationCause::misc(outlives_requirement.blame_span, body_id),
- },
- );
+ ClosureOutlivesSubject::Ty(ty) => {
+ let ty = self.subst_closure_mapping(tcx, closure_mapping, &ty);
+ debug!(
+ "apply_requirements: ty={:?} \
+ outlived_region={:?} \
+ outlives_requirement={:?}",
+ ty, outlived_region, outlives_requirement,
+ );
+ ty::Binder::dummy(ty::OutlivesPredicate(ty.into(), outlived_region))
+ }
}
- }
- }
+ })
+ .collect()
}
fn subst_closure_mapping<T>(
&self,
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ tcx: TyCtxt<'_, 'gcx, 'tcx>,
closure_mapping: &IndexVec<RegionVid, ty::Region<'tcx>>,
value: &T,
) -> T
where
T: TypeFoldable<'tcx>,
{
- infcx.tcx.fold_regions(value, &mut false, |r, _depth| {
+ tcx.fold_regions(value, &mut false, |r, _depth| {
if let ty::ReClosureBound(vid) = r {
closure_mapping[*vid]
} else {
use borrow_check::nll::region_infer::{OutlivesConstraint, RegionTest, TypeTest};
use borrow_check::nll::type_check::Locations;
use borrow_check::nll::universal_regions::UniversalRegions;
-use rustc::infer::region_constraints::Constraint;
-use rustc::infer::region_constraints::RegionConstraintData;
-use rustc::infer::region_constraints::{Verify, VerifyBound};
+use rustc::infer::canonical::QueryRegionConstraint;
+use rustc::infer::outlives::obligations::{TypeOutlives, TypeOutlivesDelegate};
+use rustc::infer::region_constraints::{GenericKind, VerifyBound};
+use rustc::infer::{self, SubregionOrigin};
use rustc::mir::{Location, Mir};
-use rustc::ty;
+use rustc::ty::subst::UnpackedKind;
+use rustc::ty::{self, TyCtxt};
use syntax::codemap::Span;
-crate struct ConstraintConversion<'a, 'tcx: 'a> {
+crate struct ConstraintConversion<'a, 'gcx: 'tcx, 'tcx: 'a> {
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
mir: &'a Mir<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
location_table: &'a LocationTable,
+ region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
+ implicit_region_bound: Option<ty::Region<'tcx>>,
+ param_env: ty::ParamEnv<'tcx>,
+ locations: Locations,
outlives_constraints: &'a mut Vec<OutlivesConstraint>,
type_tests: &'a mut Vec<TypeTest<'tcx>>,
all_facts: &'a mut Option<AllFacts>,
-
}
-impl<'a, 'tcx> ConstraintConversion<'a, 'tcx> {
+impl<'a, 'gcx, 'tcx> ConstraintConversion<'a, 'gcx, 'tcx> {
crate fn new(
+ tcx: TyCtxt<'a, 'gcx, 'tcx>,
mir: &'a Mir<'tcx>,
universal_regions: &'a UniversalRegions<'tcx>,
location_table: &'a LocationTable,
+ region_bound_pairs: &'a [(ty::Region<'tcx>, GenericKind<'tcx>)],
+ implicit_region_bound: Option<ty::Region<'tcx>>,
+ param_env: ty::ParamEnv<'tcx>,
+ locations: Locations,
outlives_constraints: &'a mut Vec<OutlivesConstraint>,
type_tests: &'a mut Vec<TypeTest<'tcx>>,
all_facts: &'a mut Option<AllFacts>,
) -> Self {
Self {
+ tcx,
mir,
universal_regions,
location_table,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ locations,
outlives_constraints,
type_tests,
all_facts,
}
}
- crate fn convert(
- &mut self,
- locations: Locations,
- data: &RegionConstraintData<'tcx>,
- ) {
- debug!("generate: constraints at: {:#?}", locations);
- let RegionConstraintData {
- constraints,
- verifys,
- givens,
- } = data;
-
- let span = self
- .mir
- .source_info(locations.from_location().unwrap_or(Location::START))
- .span;
-
- let at_location = locations.at_location().unwrap_or(Location::START);
-
- for constraint in constraints.keys() {
- debug!("generate: constraint: {:?}", constraint);
- let (a_vid, b_vid) = match constraint {
- Constraint::VarSubVar(a_vid, b_vid) => (*a_vid, *b_vid),
- Constraint::RegSubVar(a_r, b_vid) => (self.to_region_vid(a_r), *b_vid),
- Constraint::VarSubReg(a_vid, b_r) => (*a_vid, self.to_region_vid(b_r)),
- Constraint::RegSubReg(a_r, b_r) => {
- (self.to_region_vid(a_r), self.to_region_vid(b_r))
- }
- };
-
- // We have the constraint that `a_vid <= b_vid`. Add
- // `b_vid: a_vid` to our region checker. Note that we
- // reverse direction, because `regioncx` talks about
- // "outlives" (`>=`) whereas the region constraints
- // talk about `<=`.
- self.add_outlives(span, b_vid, a_vid, at_location);
-
- // In the new analysis, all outlives relations etc
- // "take effect" at the mid point of the statement
- // that requires them, so ignore the `at_location`.
- if let Some(all_facts) = &mut self.all_facts {
- if let Some(from_location) = locations.from_location() {
- all_facts.outlives.push((
- b_vid,
- a_vid,
- self.location_table.mid_index(from_location),
- ));
- } else {
- for location in self.location_table.all_points() {
- all_facts.outlives.push((b_vid, a_vid, location));
+ pub(super) fn convert_all(&mut self, query_constraints: &[QueryRegionConstraint<'tcx>]) {
+ for query_constraint in query_constraints {
+ self.convert(query_constraint);
+ }
+ }
+
+ pub(super) fn convert(&mut self, query_constraint: &QueryRegionConstraint<'tcx>) {
+ debug!("generate: constraints at: {:#?}", self.locations);
+
+ // Extract out various useful fields we'll need below.
+ let ConstraintConversion {
+ tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ ..
+ } = *self;
+
+ // At the moment, we never generate any "higher-ranked"
+ // region constraints like `for<'a> 'a: 'b`. At some point
+ // when we move to universes, we will, and this assertion
+ // will start to fail.
+ let ty::OutlivesPredicate(k1, r2) =
+ query_constraint.no_late_bound_regions().unwrap_or_else(|| {
+ span_bug!(
+ self.span(),
+ "query_constraint {:?} contained bound regions",
+ query_constraint,
+ );
+ });
+
+ match k1.unpack() {
+ UnpackedKind::Lifetime(r1) => {
+ let r1_vid = self.to_region_vid(r1);
+ let r2_vid = self.to_region_vid(r2);
+ self.add_outlives(r1_vid, r2_vid);
+
+ // In the new analysis, all outlives relations etc
+ // "take effect" at the mid point of the statement
+ // that requires them, so ignore the `at_location`.
+ if let Some(all_facts) = &mut self.all_facts {
+ if let Some(from_location) = self.locations.from_location() {
+ all_facts.outlives.push((
+ r1_vid,
+ r2_vid,
+ self.location_table.mid_index(from_location),
+ ));
+ } else {
+ for location in self.location_table.all_points() {
+ all_facts.outlives.push((r1_vid, r2_vid, location));
+ }
}
}
}
- }
- for verify in verifys {
- let type_test = self.verify_to_type_test(verify, span, locations);
- self.add_type_test(type_test);
+ UnpackedKind::Type(t1) => {
+ // we don't actually use this for anything, but
+ // the `TypeOutlives` code needs an origin.
+ let origin = infer::RelateParamBound(self.span(), t1);
+
+ TypeOutlives::new(
+ &mut *self,
+ tcx,
+ region_bound_pairs,
+ implicit_region_bound,
+ param_env,
+ ).type_must_outlive(origin, t1, r2);
+ }
}
-
- assert!(
- givens.is_empty(),
- "MIR type-checker does not use givens (thank goodness)"
- );
}
fn verify_to_type_test(
&self,
- verify: &Verify<'tcx>,
- span: Span,
- locations: Locations,
+ generic_kind: GenericKind<'tcx>,
+ region: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
) -> TypeTest<'tcx> {
- let generic_kind = verify.kind;
-
- let lower_bound = self.to_region_vid(verify.region);
+ let lower_bound = self.to_region_vid(region);
- let point = locations.at_location().unwrap_or(Location::START);
+ let point = self.locations.at_location().unwrap_or(Location::START);
- let test = self.verify_bound_to_region_test(&verify.bound);
+ let test = self.verify_bound_to_region_test(&bound);
TypeTest {
generic_kind,
lower_bound,
point,
- span,
+ span: self.span(),
test,
}
}
self.universal_regions.to_region_vid(r)
}
- fn add_outlives(
- &mut self,
- span: Span,
- sup: ty::RegionVid,
- sub: ty::RegionVid,
- point: Location,
- ) {
+ fn span(&self) -> Span {
+ self.mir
+ .source_info(self.locations.from_location().unwrap_or(Location::START))
+ .span
+ }
+
+ fn add_outlives(&mut self, sup: ty::RegionVid, sub: ty::RegionVid) {
+ let span = self.span();
+ let point = self.locations.at_location().unwrap_or(Location::START);
+
self.outlives_constraints.push(OutlivesConstraint {
span,
sub,
self.type_tests.push(type_test);
}
}
+
+impl<'a, 'b, 'gcx, 'tcx> TypeOutlivesDelegate<'tcx>
+ for &'a mut ConstraintConversion<'b, 'gcx, 'tcx>
+{
+ fn push_sub_region_constraint(
+ &mut self,
+ _origin: SubregionOrigin<'tcx>,
+ a: ty::Region<'tcx>,
+ b: ty::Region<'tcx>,
+ ) {
+ let b = self.universal_regions.to_region_vid(b);
+ let a = self.universal_regions.to_region_vid(a);
+ self.add_outlives(b, a);
+ }
+
+ fn push_verify(
+ &mut self,
+ _origin: SubregionOrigin<'tcx>,
+ kind: GenericKind<'tcx>,
+ a: ty::Region<'tcx>,
+ bound: VerifyBound<'tcx>,
+ ) {
+ let type_test = self.verify_to_type_test(kind, a, bound);
+ self.add_type_test(type_test);
+ }
+}
use rustc::infer::InferOk;
use rustc::mir::visit::TyContext;
use rustc::mir::*;
+use rustc::traits::query::type_op::custom::CustomTypeOp;
use rustc::traits::{ObligationCause, PredicateObligations};
use rustc::ty::subst::Subst;
use rustc::ty::Ty;
// Equate expected input tys with those in the MIR.
let argument_locals = (1..).map(Local::new);
for (&unnormalized_input_ty, local) in unnormalized_input_tys.iter().zip(argument_locals) {
- let input_ty = self.normalize(&unnormalized_input_ty, Locations::All);
+ let input_ty = self.normalize(unnormalized_input_ty, Locations::All);
let mir_input_ty = mir.local_decls[local].ty;
self.equate_normalized_input_or_output(input_ty, mir_input_ty);
}
"equate_inputs_and_outputs: unnormalized_output_ty={:?}",
unnormalized_output_ty
);
- let output_ty = self.normalize(&unnormalized_output_ty, Locations::All);
+ let output_ty = self.normalize(unnormalized_output_ty, Locations::All);
debug!(
"equate_inputs_and_outputs: normalized output_ty={:?}",
output_ty
);
+ let param_env = self.param_env;
let mir_output_ty = mir.local_decls[RETURN_PLACE].ty;
let anon_type_map =
self.fully_perform_op(
Locations::All,
- || format!("input_output"),
- |cx| {
- let mut obligations = ObligationAccumulator::default();
-
- let dummy_body_id = ObligationCause::dummy().body_id;
- let (output_ty, anon_type_map) = obligations.add(infcx.instantiate_anon_types(
- mir_def_id,
- dummy_body_id,
- cx.param_env,
- &output_ty,
- ));
- debug!(
- "equate_inputs_and_outputs: instantiated output_ty={:?}",
- output_ty
- );
- debug!(
- "equate_inputs_and_outputs: anon_type_map={:#?}",
- anon_type_map
- );
-
- debug!(
- "equate_inputs_and_outputs: mir_output_ty={:?}",
- mir_output_ty
- );
- obligations.add(
- infcx
- .at(&ObligationCause::dummy(), cx.param_env)
- .eq(output_ty, mir_output_ty)?,
- );
-
- for (&anon_def_id, anon_decl) in &anon_type_map {
- let anon_defn_ty = tcx.type_of(anon_def_id);
- let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
- let anon_defn_ty = renumber::renumber_regions(
- cx.infcx,
- TyContext::Location(Location::START),
- &anon_defn_ty,
+ CustomTypeOp::new(
+ |infcx| {
+ let mut obligations = ObligationAccumulator::default();
+
+ let dummy_body_id = ObligationCause::dummy().body_id;
+ let (output_ty, anon_type_map) =
+ obligations.add(infcx.instantiate_anon_types(
+ mir_def_id,
+ dummy_body_id,
+ param_env,
+ &output_ty,
+ ));
+ debug!(
+ "equate_inputs_and_outputs: instantiated output_ty={:?}",
+ output_ty
);
debug!(
- "equate_inputs_and_outputs: concrete_ty={:?}",
- anon_decl.concrete_ty
+ "equate_inputs_and_outputs: anon_type_map={:#?}",
+ anon_type_map
+ );
+
+ debug!(
+ "equate_inputs_and_outputs: mir_output_ty={:?}",
+ mir_output_ty
);
- debug!("equate_inputs_and_outputs: anon_defn_ty={:?}", anon_defn_ty);
obligations.add(
infcx
- .at(&ObligationCause::dummy(), cx.param_env)
- .eq(anon_decl.concrete_ty, anon_defn_ty)?,
+ .at(&ObligationCause::dummy(), param_env)
+ .eq(output_ty, mir_output_ty)?,
);
- }
-
- debug!("equate_inputs_and_outputs: equated");
- Ok(InferOk {
- value: Some(anon_type_map),
- obligations: obligations.into_vec(),
- })
- },
+ for (&anon_def_id, anon_decl) in &anon_type_map {
+ let anon_defn_ty = tcx.type_of(anon_def_id);
+ let anon_defn_ty = anon_defn_ty.subst(tcx, anon_decl.substs);
+ let anon_defn_ty = renumber::renumber_regions(
+ infcx,
+ TyContext::Location(Location::START),
+ &anon_defn_ty,
+ );
+ debug!(
+ "equate_inputs_and_outputs: concrete_ty={:?}",
+ anon_decl.concrete_ty
+ );
+ debug!("equate_inputs_and_outputs: anon_defn_ty={:?}", anon_defn_ty);
+ obligations.add(
+ infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .eq(anon_decl.concrete_ty, anon_defn_ty)?,
+ );
+ }
+
+ debug!("equate_inputs_and_outputs: equated");
+
+ Ok(InferOk {
+ value: Some(anon_type_map),
+ obligations: obligations.into_vec(),
+ })
+ },
+ || format!("input_output"),
+ ),
).unwrap_or_else(|terr| {
span_mirbug!(
self,
if let Some(anon_type_map) = anon_type_map {
self.fully_perform_op(
Locations::All,
- || format!("anon_type_map"),
- |_cx| {
- infcx.constrain_anon_types(&anon_type_map, universal_regions);
- Ok(InferOk {
- value: (),
- obligations: vec![],
- })
- },
+ CustomTypeOp::new(
+ |_cx| {
+ infcx.constrain_anon_types(&anon_type_map, universal_regions);
+ Ok(InferOk {
+ value: (),
+ obligations: vec![],
+ })
+ },
+ || format!("anon_type_map"),
+ ),
).unwrap();
}
}
use dataflow::move_paths::{HasMoveData, MoveData};
use dataflow::MaybeInitializedPlaces;
use dataflow::{FlowAtLocation, FlowsAtLocation};
-use rustc::infer::region_constraints::RegionConstraintData;
+use rustc::infer::canonical::QueryRegionConstraint;
use rustc::mir::Local;
use rustc::mir::{BasicBlock, Location, Mir};
-use rustc::traits::ObligationCause;
-use rustc::ty::subst::Kind;
+use rustc::traits::query::dropck_outlives::DropckOutlivesResult;
+use rustc::traits::query::type_op::outlives::DropckOutlives;
+use rustc::traits::query::type_op::TypeOp;
use rustc::ty::{Ty, TypeFoldable};
use rustc_data_structures::fx::FxHashMap;
use std::rc::Rc;
}
struct DropData<'tcx> {
- dropped_kinds: Vec<Kind<'tcx>>,
- region_constraint_data: Option<Rc<RegionConstraintData<'tcx>>>,
+ dropck_result: DropckOutlivesResult<'tcx>,
+ region_constraint_data: Option<Rc<Vec<QueryRegionConstraint<'tcx>>>>,
}
impl<'gen, 'typeck, 'flow, 'gcx, 'tcx> TypeLivenessGenerator<'gen, 'typeck, 'flow, 'gcx, 'tcx> {
);
cx.tcx().for_each_free_region(&value, |live_region| {
- cx
- .constraints
+ cx.constraints
.liveness_set
.push((live_region, location, cause.clone()));
});
});
if let Some(data) = &drop_data.region_constraint_data {
- self.cx
- .push_region_constraints(location.at_self(), data.clone());
+ self.cx.push_region_constraints(location.at_self(), data);
}
+ drop_data.dropck_result.report_overflows(
+ self.cx.infcx.tcx,
+ self.mir.source_info(location).span,
+ dropped_ty,
+ );
+
// All things in the `outlives` array may be touched by
// the destructor and must be live at this point.
let cause = Cause::DropVar(dropped_local, location);
- for &kind in &drop_data.dropped_kinds {
+ for &kind in &drop_data.dropck_result.kinds {
Self::push_type_live_constraint(&mut self.cx, kind, location, cause);
}
}
) -> DropData<'tcx> {
debug!("compute_drop_data(dropped_ty={:?})", dropped_ty,);
- let (dropped_kinds, region_constraint_data) =
- cx.fully_perform_op_and_get_region_constraint_data(
- || format!("compute_drop_data(dropped_ty={:?})", dropped_ty),
- |cx| {
- Ok(cx
- .infcx
- .at(&ObligationCause::dummy(), cx.param_env)
- .dropck_outlives(dropped_ty))
- },
- ).unwrap();
+ let param_env = cx.param_env;
+ let (dropck_result, region_constraint_data) = param_env
+ .and(DropckOutlives::new(dropped_ty))
+ .fully_perform(cx.infcx)
+ .unwrap();
DropData {
- dropped_kinds,
+ dropck_result,
region_constraint_data,
}
}
use dataflow::FlowAtLocation;
use dataflow::MaybeInitializedPlaces;
use rustc::hir::def_id::DefId;
-use rustc::infer::region_constraints::{GenericKind, RegionConstraintData};
-use rustc::infer::{InferCtxt, InferOk, InferResult, LateBoundRegionConversionTime, UnitResult};
+use rustc::infer::canonical::QueryRegionConstraint;
+use rustc::infer::region_constraints::GenericKind;
+use rustc::infer::{InferCtxt, LateBoundRegionConversionTime};
use rustc::mir::interpret::EvalErrorKind::BoundsCheck;
use rustc::mir::tcx::PlaceTy;
use rustc::mir::visit::{PlaceContext, Visitor};
use rustc::mir::*;
-use rustc::traits::query::NoSolution;
-use rustc::traits::{self, ObligationCause, Normalized, TraitEngine};
-use rustc::ty::error::TypeError;
+use rustc::traits::query::type_op;
+use rustc::traits::query::{Fallible, NoSolution};
use rustc::ty::fold::TypeFoldable;
use rustc::ty::{self, ToPolyTraitRef, Ty, TyCtxt, TypeVariants};
use std::fmt;
-use std::rc::Rc;
use syntax_pos::{Span, DUMMY_SP};
use transform::{MirPass, MirSource};
use util::liveness::LivenessResults;
let instantiated_predicates =
tcx.predicates_of(def_id).instantiate(tcx, substs);
- let predicates =
- type_checker.normalize(&instantiated_predicates.predicates, location);
- type_checker.prove_predicates(predicates, location);
+ type_checker.normalize_and_prove_instantiated_predicates(
+ instantiated_predicates,
+ location,
+ );
}
value.ty
Place::Static(box Static { def_id, ty: sty }) => {
let sty = self.sanitize_type(place, sty);
let ty = self.tcx().type_of(def_id);
- let ty = self.cx.normalize(&ty, location);
+ let ty = self.cx.normalize(ty, location);
if let Err(terr) = self.cx.eq_types(ty, sty, location.at_self()) {
span_mirbug!(
self,
fn fully_perform_op<R>(
&mut self,
locations: Locations,
- describe_op: impl Fn() -> String,
- op: impl FnOnce(&mut Self) -> InferResult<'tcx, R>,
- ) -> Result<R, TypeError<'tcx>> {
- let (r, opt_data) = self.fully_perform_op_and_get_region_constraint_data(
- || format!("{} at {:?}", describe_op(), locations),
- op,
- )?;
-
- if let Some(data) = opt_data {
+ op: impl type_op::TypeOp<'gcx, 'tcx, Output = R>,
+ ) -> Fallible<R> {
+ let (r, opt_data) = op.fully_perform(self.infcx)?;
+
+ if let Some(data) = &opt_data {
self.push_region_constraints(locations, data);
}
fn push_region_constraints(
&mut self,
locations: Locations,
- data: Rc<RegionConstraintData<'tcx>>,
+ data: &[QueryRegionConstraint<'tcx>],
) {
debug!(
"push_region_constraints: constraints generated at {:?} are {:#?}",
if let Some(borrowck_context) = &mut self.borrowck_context {
constraint_conversion::ConstraintConversion::new(
+ self.infcx.tcx,
self.mir,
borrowck_context.universal_regions,
borrowck_context.location_table,
+ self.region_bound_pairs,
+ self.implicit_region_bound,
+ self.param_env,
+ locations,
&mut self.constraints.outlives_constraints,
&mut self.constraints.type_tests,
&mut borrowck_context.all_facts,
- ).convert(locations, &data);
- }
- }
-
- /// Helper for `fully_perform_op`, but also used on its own
- /// sometimes to enable better caching: executes `op` fully (along
- /// with resulting obligations) and returns the full set of region
- /// obligations. If the same `op` were to be performed at some
- /// other location, then the same set of region obligations would
- /// be generated there, so this can be useful for caching.
- fn fully_perform_op_and_get_region_constraint_data<R>(
- &mut self,
- describe_op: impl Fn() -> String,
- op: impl FnOnce(&mut Self) -> InferResult<'tcx, R>,
- ) -> Result<(R, Option<Rc<RegionConstraintData<'tcx>>>), TypeError<'tcx>> {
- if cfg!(debug_assertions) {
- info!(
- "fully_perform_op_and_get_region_constraint_data({})",
- describe_op(),
- );
- }
-
- let mut fulfill_cx = TraitEngine::new(self.infcx.tcx);
- let dummy_body_id = ObligationCause::dummy().body_id;
- let InferOk { value, obligations } = self.infcx.commit_if_ok(|_| op(self))?;
- debug_assert!(obligations.iter().all(|o| o.cause.body_id == dummy_body_id));
- fulfill_cx.register_predicate_obligations(self.infcx, obligations);
- if let Err(e) = fulfill_cx.select_all_or_error(self.infcx) {
- span_mirbug!(self, "", "errors selecting obligation: {:?}", e);
- }
-
- self.infcx.process_registered_region_obligations(
- self.region_bound_pairs,
- self.implicit_region_bound,
- self.param_env,
- dummy_body_id,
- );
-
- let data = self.infcx.take_and_reset_region_constraints();
- if data.is_empty() {
- Ok((value, None))
- } else {
- Ok((value, Some(Rc::new(data))))
+ ).convert_all(&data);
}
}
sub: Ty<'tcx>,
sup: Ty<'tcx>,
locations: Locations,
- ) -> UnitResult<'tcx> {
- // Micro-optimization.
- if sub == sup {
- return Ok(());
- }
-
+ ) -> Fallible<()> {
+ let param_env = self.param_env;
self.fully_perform_op(
locations,
- || format!("sub_types({:?} <: {:?})", sub, sup),
- |this| {
- this.infcx
- .at(&ObligationCause::dummy(), this.param_env)
- .sup(sup, sub)
- },
+ param_env.and(type_op::subtype::Subtype::new(sub, sup)),
)
}
- fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> UnitResult<'tcx> {
- // Micro-optimization.
- if a == b {
- return Ok(());
- }
-
- self.fully_perform_op(
- locations,
- || format!("eq_types({:?} = {:?})", a, b),
- |this| {
- this.infcx
- .at(&ObligationCause::dummy(), this.param_env)
- .eq(b, a)
- },
- )
+ fn eq_types(&mut self, a: Ty<'tcx>, b: Ty<'tcx>, locations: Locations) -> Fallible<()> {
+ let param_env = self.param_env;
+ self.fully_perform_op(locations, param_env.and(type_op::eq::Eq::new(b, a)))
}
fn tcx(&self) -> TyCtxt<'a, 'gcx, 'tcx> {
);
}
self.check_rvalue(mir, rv, location);
+ let trait_ref = ty::TraitRef {
+ def_id: tcx.lang_items().sized_trait().unwrap(),
+ substs: tcx.mk_substs_trait(place_ty, &[]),
+ };
+ self.prove_trait_ref(trait_ref, location);
}
StatementKind::SetDiscriminant {
ref place,
LateBoundRegionConversionTime::FnCall,
&sig,
);
- let sig = self.normalize(&sig, term_location);
+ let sig = self.normalize(sig, term_location);
self.check_call_dest(mir, term, &sig, destination, term_location);
self.prove_predicates(
let variant = &def.variants[variant_index];
let adj_field_index = active_field_index.unwrap_or(field_index);
if let Some(field) = variant.fields.get(adj_field_index) {
- Ok(self.normalize(&field.ty(tcx, substs), location))
+ Ok(self.normalize(field.ty(tcx, substs), location))
} else {
Err(FieldAccessError::OutOfRange {
field_count: variant.fields.len(),
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
- let fn_sig = self.normalize(&fn_sig, location);
+ let fn_sig = self.normalize(fn_sig, location);
let ty_fn_ptr_from = tcx.mk_fn_ptr(fn_sig);
// function definition. When we extract the
// signature, it comes from the `fn_sig` query,
// and hence may contain unnormalized results.
- let fn_sig = self.normalize(&fn_sig, location);
+ let fn_sig = self.normalize(fn_sig, location);
let ty_fn_ptr_from = tcx.safe_to_unsafe_fn_ty(fn_sig);
if let Some(closure_region_requirements) =
tcx.mir_borrowck(*def_id).closure_requirements
{
- let dummy_body_id = ObligationCause::dummy().body_id;
- closure_region_requirements.apply_requirements(
- self.infcx,
- dummy_body_id,
+ let closure_constraints = closure_region_requirements.apply_requirements(
+ self.infcx.tcx,
location,
*def_id,
*substs,
);
+
+ self.push_region_constraints(
+ location.at_self(),
+ &closure_constraints,
+ );
}
tcx.predicates_of(*def_id).instantiate(tcx, substs.substs)
AggregateKind::Array(_) | AggregateKind::Tuple => ty::InstantiatedPredicates::empty(),
};
- let predicates = self.normalize(&instantiated_predicates.predicates, location);
- debug!("prove_aggregate_predicates: predicates={:?}", predicates);
- self.prove_predicates(predicates, location);
+ self.normalize_and_prove_instantiated_predicates(instantiated_predicates, location);
}
fn prove_trait_ref(&mut self, trait_ref: ty::TraitRef<'tcx>, location: Location) {
);
}
- fn prove_predicates<T>(&mut self, predicates: T, location: Location)
- where
- T: IntoIterator<Item = ty::Predicate<'tcx>> + Clone,
- {
- let cause = ObligationCause::dummy();
- let obligations: Vec<_> = predicates
- .into_iter()
- .map(|p| traits::Obligation::new(cause.clone(), self.param_env, p))
- .collect();
-
- // Micro-optimization
- if obligations.is_empty() {
- return;
+ fn normalize_and_prove_instantiated_predicates(
+ &mut self,
+ instantiated_predicates: ty::InstantiatedPredicates<'tcx>,
+ location: Location,
+ ) {
+ for predicate in instantiated_predicates.predicates {
+ let predicate = self.normalize(predicate, location);
+ self.prove_predicate(predicate, location);
}
+ }
- // This intermediate vector is mildly unfortunate, in that we
- // sometimes create it even when logging is disabled, but only
- // if debug-info is enabled, and I doubt it is actually
- // expensive. -nmatsakis
- let predicates_vec: Vec<_> = if cfg!(debug_assertions) {
- obligations.iter().map(|o| o.predicate).collect()
- } else {
- Vec::new()
- };
+ fn prove_predicates(
+ &mut self,
+ predicates: impl IntoIterator<Item = ty::Predicate<'tcx>>,
+ location: Location,
+ ) {
+ for predicate in predicates {
+ debug!(
+ "prove_predicates(predicate={:?}, location={:?})",
+ predicate, location,
+ );
+ self.prove_predicate(predicate, location);
+ }
+ }
+
+ fn prove_predicate(&mut self, predicate: ty::Predicate<'tcx>, location: Location) {
debug!(
- "prove_predicates(predicates={:?}, location={:?})",
- predicates_vec, location,
+ "prove_predicate(predicate={:?}, location={:?})",
+ predicate, location,
);
+ let param_env = self.param_env;
self.fully_perform_op(
location.at_self(),
- || format!("prove_predicates({:?})", predicates_vec),
- |_this| {
- Ok(InferOk {
- value: (),
- obligations,
- })
- },
- ).unwrap()
+ param_env.and(type_op::prove_predicate::ProvePredicate::new(predicate)),
+ ).unwrap_or_else(|NoSolution| {
+ span_mirbug!(self, NoSolution, "could not prove {:?}", predicate);
+ })
}
fn typeck_mir(&mut self, mir: &Mir<'tcx>) {
}
}
- fn normalize<T>(&mut self, value: &T, location: impl ToLocations) -> T
+ fn normalize<T>(&mut self, value: T, location: impl ToLocations) -> T
where
- T: fmt::Debug + TypeFoldable<'tcx>,
+ T: type_op::normalize::Normalizable<'gcx, 'tcx> + Copy,
{
- // Micro-optimization: avoid work when we don't have to
- if !value.has_projections() {
- return value.clone();
- }
-
debug!("normalize(value={:?}, location={:?})", value, location);
+ let param_env = self.param_env;
self.fully_perform_op(
location.to_locations(),
- || format!("normalize(value={:?})", value),
- |this| {
- let Normalized { value, obligations } = this
- .infcx
- .at(&ObligationCause::dummy(), this.param_env)
- .normalize(value)
- .unwrap_or_else(|NoSolution| {
- span_bug!(
- this.last_span,
- "normalization of `{:?}` failed at {:?}",
- value,
- location,
- );
- });
- Ok(InferOk { value, obligations })
- },
- ).unwrap()
+ param_env.and(type_op::normalize::Normalize::new(value)),
+ ).unwrap_or_else(|NoSolution| {
+ span_mirbug!(self, NoSolution, "failed to normalize `{:?}`", value);
+ value
+ })
}
}
// broken MIR, so try not to report duplicate errors.
return;
}
+
+ if tcx.is_struct_constructor(def_id) {
+ // We just assume that the automatically generated struct constructors are
+ // correct. See the comment in the `mir_borrowck` implementation for an
+ // explanation why we need this.
+ return;
+ }
+
let param_env = tcx.param_env(def_id);
tcx.infer_ctxt().enter(|infcx| {
- let _ = type_check_internal(
- &infcx,
- def_id,
- param_env,
- mir,
- &[],
- None,
- None,
- &mut |_| (),
- );
+ let _ =
+ type_check_internal(&infcx, def_id, param_env, mir, &[], None, None, &mut |_| ());
// For verification purposes, we just ignore the resulting
// region constraint sets. Not our problem. =)
/// `'1: '2`, then the caller would impose the constraint that
/// `V[1]: V[2]`.
pub fn closure_mapping(
- infcx: &InferCtxt<'_, '_, 'tcx>,
+ tcx: TyCtxt<'_, '_, 'tcx>,
closure_ty: Ty<'tcx>,
expected_num_vars: usize,
) -> IndexVec<RegionVid, ty::Region<'tcx>> {
let mut region_mapping = IndexVec::with_capacity(expected_num_vars);
- region_mapping.push(infcx.tcx.types.re_static);
- infcx.tcx.for_each_free_region(&closure_ty, |fr| {
+ region_mapping.push(tcx.types.re_static);
+ tcx.for_each_free_region(&closure_ty, |fr| {
region_mapping.push(fr);
});
mutability: Mutability::Not,
};
if let Some(hir::map::NodeBinding(pat)) = tcx.hir.find(var_id) {
- if let hir::PatKind::Binding(_, _, ref name, _) = pat.node {
- decl.debug_name = name.node;
+ if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
+ decl.debug_name = ident.name;
let bm = *hir.tables.pat_binding_modes()
.get(pat.hir_id)
// If this is a simple binding pattern, give the local a nice name for debuginfo.
let mut name = None;
if let Some(pat) = pattern {
- if let hir::PatKind::Binding(_, _, ref ident, _) = pat.node {
- name = Some(ident.node);
+ if let hir::PatKind::Binding(_, _, ident, _) = pat.node {
+ name = Some(ident.name);
}
}
// ==============
/// Finds the breakable scope for a given label. This is used for
/// resolving `break` and `continue`.
- pub fn find_breakable_scope(&mut self,
+ pub fn find_breakable_scope(&self,
span: Span,
label: region::Scope)
- -> &mut BreakableScope<'tcx> {
+ -> &BreakableScope<'tcx> {
// find the loop-scope with the correct id
- self.breakable_scopes.iter_mut()
+ self.breakable_scopes.iter()
.rev()
.filter(|breakable_scope| breakable_scope.region_scope == label)
.next()
```compile_fail
match 5u32 {
// This range is ok, albeit pointless.
- 1 ... 1 => {}
+ 1 ..= 1 => {}
// This range is empty, and the compiler can tell.
- 1000 ... 5 => {}
+ 1000 ..= 5 => {}
}
```
"##,
let method_name = Symbol::intern(method_name);
let substs = self.tcx.mk_substs_trait(self_ty, params);
for item in self.tcx.associated_items(trait_def_id) {
- if item.kind == ty::AssociatedKind::Method && item.name == method_name {
+ if item.kind == ty::AssociatedKind::Method && item.ident.name == method_name {
let method_ty = self.tcx.type_of(item.def_id);
let method_ty = method_ty.subst(self.tcx, substs);
return (method_ty,
use self::Usefulness::*;
use self::WitnessPreference::*;
-use rustc::middle::const_val::ConstVal;
-
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::indexed_vec::Idx;
use rustc::ty::{self, Ty, TyCtxt, TypeFoldable};
use rustc::mir::Field;
+use rustc::mir::interpret::ConstValue;
use rustc::util::common::ErrorReported;
use syntax_pos::{Span, DUMMY_SP};
for row in patterns {
match *row.kind {
- PatternKind::Constant {
- value: const_val @ &ty::Const {
- val: ConstVal::Value(..),
- ..
- }
- } => {
- if let Some(ptr) = const_val.to_ptr() {
- let is_array_ptr = const_val.ty
+ PatternKind::Constant { value } => {
+ if let Some(ptr) = value.to_ptr() {
+ let is_array_ptr = value.ty
.builtin_deref(true)
.and_then(|t| t.ty.builtin_index())
.map_or(false, |t| t == cx.tcx.types.u8);
suffix: &[Pattern<'tcx>]
) -> Result<bool, ErrorReported> {
let data: &[u8] = match *ctor {
- ConstantValue(const_val @ &ty::Const { val: ConstVal::Value(..), .. }) => {
- if let Some(ptr) = const_val.to_ptr() {
+ ConstantValue(const_val) => {
+ let val = match const_val.val {
+ ConstValue::Unevaluated(..) |
+ ConstValue::ByRef(..) => bug!("unexpected ConstValue: {:?}", const_val),
+ ConstValue::Scalar(val) | ConstValue::ScalarPair(val, _) => val,
+ };
+ if let Ok(ptr) = val.to_ptr() {
let is_array_ptr = const_val.ty
.builtin_deref(true)
.and_then(|t| t.ty.builtin_index())
assert!(is_array_ptr);
tcx.alloc_map.lock().unwrap_memory(ptr.alloc_id).bytes.as_ref()
} else {
- bug!()
+ bug!("unexpected non-ptr ConstantValue")
}
}
_ => bug!()
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::subst::Substs;
use rustc::lint;
-use rustc_errors::DiagnosticBuilder;
+use rustc_errors::{Applicability, DiagnosticBuilder};
use rustc::util::common::ErrorReported;
use rustc::hir::def::*;
}
PatternError::FloatBug => {
// FIXME(#31407) this is only necessary because float parsing is buggy
- ::rustc::middle::const_val::struct_error(
+ ::rustc::mir::interpret::struct_error(
self.tcx.at(pat_span),
"could not evaluate float literal (see issue #31407)",
).emit();
}
PatternError::NonConstPath(span) => {
- ::rustc::middle::const_val::struct_error(
+ ::rustc::mir::interpret::struct_error(
self.tcx.at(span),
"runtime values cannot be referenced in patterns",
).emit();
fn check_for_bindings_named_the_same_as_variants(cx: &MatchVisitor, pat: &Pat) {
pat.walk(|p| {
- if let PatKind::Binding(_, _, name, None) = p.node {
+ if let PatKind::Binding(_, _, ident, None) = p.node {
let bm = *cx.tables
.pat_binding_modes()
.get(p.hir_id)
let pat_ty = cx.tables.pat_ty(p);
if let ty::TyAdt(edef, _) = pat_ty.sty {
if edef.is_enum() && edef.variants.iter().any(|variant| {
- variant.name == name.node && variant.ctor_kind == CtorKind::Const
+ variant.name == ident.name && variant.ctor_kind == CtorKind::Const
}) {
let ty_path = cx.tcx.item_path_str(edef.did);
let mut err = struct_span_warn!(cx.tcx.sess, p.span, E0170,
"pattern binding `{}` is named the same as one \
of the variants of the type `{}`",
- name.node, ty_path);
- help!(err,
- "if you meant to match on a variant, \
- consider making the path in the pattern qualified: `{}::{}`",
- ty_path, name.node);
+ ident, ty_path);
+ err.span_suggestion_with_applicability(
+ p.span,
+ "to match on the variant, qualify the path",
+ format!("{}::{}", ty_path, ident),
+ Applicability::MachineApplicable
+ );
err.emit();
}
}
NotUseful => {
match source {
hir::MatchSource::IfLetDesugar { .. } => {
- if printed_if_let_err {
- // we already printed an irrefutable if-let pattern error.
- // We don't want two, that's just confusing.
+ if cx.tcx.features().irrefutable_let_patterns {
+ cx.tcx.lint_node(
+ lint::builtin::IRREFUTABLE_LET_PATTERNS,
+ hir_pat.id, pat.span,
+ "irrefutable if-let pattern");
} else {
- // find the first arm pattern so we can use its span
- let &(ref first_arm_pats, _) = &arms[0];
- let first_pat = &first_arm_pats[0];
- let span = first_pat.0.span;
- struct_span_err!(cx.tcx.sess, span, E0162,
- "irrefutable if-let pattern")
- .span_label(span, "irrefutable pattern")
- .emit();
- printed_if_let_err = true;
+ if printed_if_let_err {
+ // we already printed an irrefutable if-let pattern error.
+ // We don't want two, that's just confusing.
+ } else {
+ // find the first arm pattern so we can use its span
+ let &(ref first_arm_pats, _) = &arms[0];
+ let first_pat = &first_arm_pats[0];
+ let span = first_pat.0.span;
+ struct_span_err!(cx.tcx.sess, span, E0162,
+ "irrefutable if-let pattern")
+ .span_label(span, "irrefutable pattern")
+ .emit();
+ printed_if_let_err = true;
+ }
}
},
hir::MatchSource::WhileLetDesugar => {
- // find the first arm pattern so we can use its span
- let &(ref first_arm_pats, _) = &arms[0];
- let first_pat = &first_arm_pats[0];
- let span = first_pat.0.span;
-
// check which arm we're on.
match arm_index {
// The arm with the user-specified pattern.
0 => {
cx.tcx.lint_node(
- lint::builtin::UNREACHABLE_PATTERNS,
+ lint::builtin::UNREACHABLE_PATTERNS,
hir_pat.id, pat.span,
"unreachable pattern");
},
// The arm with the wildcard pattern.
1 => {
- struct_span_err!(cx.tcx.sess, span, E0165,
- "irrefutable while-let pattern")
- .span_label(span, "irrefutable pattern")
- .emit();
+ if cx.tcx.features().irrefutable_let_patterns {
+ cx.tcx.lint_node(
+ lint::builtin::IRREFUTABLE_LET_PATTERNS,
+ hir_pat.id, pat.span,
+ "irrefutable while-let pattern");
+ } else {
+ // find the first arm pattern so we can use its span
+ let &(ref first_arm_pats, _) = &arms[0];
+ let first_pat = &first_arm_pats[0];
+ let span = first_pat.0.span;
+ struct_span_err!(cx.tcx.sess, span, E0165,
+ "irrefutable while-let pattern")
+ .span_label(span, "irrefutable pattern")
+ .emit();
+ }
},
_ => bug!(),
}
let joined_patterns = match witnesses.len() {
0 => bug!(),
1 => format!("`{}`", witnesses[0]),
- 2...LIMIT => {
+ 2..=LIMIT => {
let (tail, head) = witnesses.split_last().unwrap();
let head: Vec<_> = head.iter().map(|w| w.to_string()).collect();
format!("`{}` and `{}`", head.join("`, `"), tail)
use interpret::{const_val_field, const_variant_index, self};
-use rustc::middle::const_val::ConstVal;
use rustc::mir::{fmt_const_val, Field, BorrowKind, Mutability};
use rustc::mir::interpret::{Scalar, GlobalId, ConstValue, Value};
use rustc::ty::{self, TyCtxt, AdtDef, Ty, Region};
},
}
-fn print_const_val(value: &ty::Const, f: &mut fmt::Formatter) -> fmt::Result {
- match value.val {
- ConstVal::Value(..) => fmt_const_val(f, value),
- ConstVal::Unevaluated(..) => bug!("{:?} not printable in a pattern", value)
- }
-}
-
impl<'tcx> fmt::Display for Pattern<'tcx> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self.kind {
write!(f, "{}", subpattern)
}
PatternKind::Constant { value } => {
- print_const_val(value, f)
+ fmt_const_val(f, value)
}
PatternKind::Range { lo, hi, end } => {
- print_const_val(lo, f)?;
+ fmt_const_val(f, lo)?;
match end {
RangeEnd::Included => write!(f, "...")?,
RangeEnd::Excluded => write!(f, "..")?,
}
- print_const_val(hi, f)
+ fmt_const_val(f, hi)
}
PatternKind::Slice { ref prefix, ref slice, ref suffix } |
PatternKind::Array { ref prefix, ref slice, ref suffix } => {
}
PatKind::Slice(ref prefix, ref slice, ref suffix) => {
- let ty = self.tables.node_id_to_type(pat.hir_id);
match ty.sty {
ty::TyRef(_, ty, _) =>
PatternKind::Deref {
pat.span, ty, prefix, slice, suffix))
},
},
-
ty::TySlice(..) |
ty::TyArray(..) =>
self.slice_or_array_pattern(pat.span, ty, prefix, slice, suffix),
-
+ ty::TyError => { // Avoid ICE
+ return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) };
+ }
ref sty =>
span_bug!(
pat.span,
}
PatKind::Tuple(ref subpatterns, ddpos) => {
- let ty = self.tables.node_id_to_type(pat.hir_id);
match ty.sty {
ty::TyTuple(ref tys) => {
let subpatterns =
PatternKind::Leaf { subpatterns: subpatterns }
}
-
+ ty::TyError => { // Avoid ICE (#50577)
+ return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) };
+ }
ref sty => span_bug!(pat.span, "unexpected type for tuple pattern: {:?}", sty),
}
}
- PatKind::Binding(_, id, ref name, ref sub) => {
+ PatKind::Binding(_, id, ident, ref sub) => {
let var_ty = self.tables.node_id_to_type(pat.hir_id);
let region = match var_ty.sty {
ty::TyRef(r, _, _) => Some(r),
+ ty::TyError => { // Avoid ICE
+ return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) };
+ }
_ => None,
};
let bm = *self.tables.pat_binding_modes().get(pat.hir_id)
if let ty::TyRef(_, rty, _) = ty.sty {
ty = rty;
} else {
- bug!("`ref {}` has wrong type {}", name.node, ty);
+ bug!("`ref {}` has wrong type {}", ident, ty);
}
}
PatternKind::Binding {
mutability,
mode,
- name: name.node,
+ name: ident.name,
var: id,
ty: var_ty,
subpattern: self.lower_opt_pattern(sub),
let def = self.tables.qpath_def(qpath, pat.hir_id);
let adt_def = match ty.sty {
ty::TyAdt(adt_def, _) => adt_def,
- ty::TyError => { // Avoid ICE (#50585)
- return Pattern {
- span: pat.span,
- ty,
- kind: Box::new(PatternKind::Wild),
- };
+ ty::TyError => { // Avoid ICE (#50585)
+ return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) };
}
_ => span_bug!(pat.span,
"tuple struct pattern not applied to an ADT {:?}",
debug!("const_to_pat: cv={:#?}", cv);
let adt_subpattern = |i, variant_opt| {
let field = Field::new(i);
- let val = match cv.val {
- ConstVal::Value(miri) => const_val_field(
- self.tcx, self.param_env, instance,
- variant_opt, field, miri, cv.ty,
- ).expect("field access failed"),
- _ => bug!("{:#?} is not a valid adt", cv),
- };
+ let val = const_val_field(
+ self.tcx, self.param_env, instance,
+ variant_opt, field, cv,
+ ).expect("field access failed");
self.const_to_pat(instance, val, id, span)
};
let adt_subpatterns = |n, variant_opt| {
PatternKind::Wild
},
ty::TyAdt(adt_def, substs) if adt_def.is_enum() => {
- match cv.val {
- ConstVal::Value(val) => {
- let variant_index = const_variant_index(
- self.tcx, self.param_env, instance, val, cv.ty
- ).expect("const_variant_index failed");
- let subpatterns = adt_subpatterns(
- adt_def.variants[variant_index].fields.len(),
- Some(variant_index),
- );
- PatternKind::Variant {
- adt_def,
- substs,
- variant_index,
- subpatterns,
- }
- },
- ConstVal::Unevaluated(..) =>
- span_bug!(span, "{:#?} is not a valid enum constant", cv),
+ let variant_index = const_variant_index(
+ self.tcx, self.param_env, instance, cv
+ ).expect("const_variant_index failed");
+ let subpatterns = adt_subpatterns(
+ adt_def.variants[variant_index].fields.len(),
+ Some(variant_index),
+ );
+ PatternKind::Variant {
+ adt_def,
+ substs,
+ variant_index,
+ subpatterns,
}
},
ty::TyAdt(adt_def, _) => {
use rustc::hir;
-use rustc::middle::const_val::{ConstEvalErr, ErrKind};
-use rustc::middle::const_val::ErrKind::{TypeckError, CheckMatchError};
+use rustc::mir::interpret::{ConstEvalErr};
use rustc::mir;
use rustc::ty::{self, TyCtxt, Ty, Instance};
use rustc::ty::layout::{self, LayoutOf, Primitive};
use std::fmt;
use std::error::Error;
-use rustc_data_structures::sync::Lrc;
pub fn mk_borrowck_eval_cx<'a, 'mir, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
val: Value,
ty: Ty<'tcx>,
) -> &'tcx ty::Const<'tcx> {
- let layout = ecx.tcx.layout_of(ty::ParamEnv::reveal_all().and(ty)).unwrap();
+ let layout = ecx.layout_of(ty).unwrap();
match (val, &layout.abi) {
(Value::Scalar(Scalar::Bits { defined: 0, ..}), _) if layout.is_zst() => {},
(Value::ByRef(..), _) |
let (frames, span) = ecx.generate_stacktrace(None);
let err = ConstEvalErr {
span,
- kind: ErrKind::Miri(err, frames).into(),
+ error: err,
+ stacktrace: frames,
};
err.report_as_error(
ecx.tcx,
instance: ty::Instance<'tcx>,
variant: Option<usize>,
field: mir::Field,
- value: ConstValue<'tcx>,
- ty: Ty<'tcx>,
-) -> ::rustc::middle::const_val::EvalResult<'tcx> {
- trace!("const_val_field: {:?}, {:?}, {:?}, {:?}", instance, field, value, ty);
+ value: &'tcx ty::Const<'tcx>,
+) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
+ trace!("const_val_field: {:?}, {:?}, {:?}", instance, field, value);
let mut ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
let result = (|| {
- let value = ecx.const_value_to_value(value, ty)?;
+ let ty = value.ty;
+ let value = ecx.const_to_value(value.val)?;
let layout = ecx.layout_of(ty)?;
let (ptr, align) = match value {
Value::ByRef(ptr, align) => (ptr, align),
})();
result.map_err(|err| {
let (trace, span) = ecx.generate_stacktrace(None);
- let err = ErrKind::Miri(err, trace);
ConstEvalErr {
- kind: err.into(),
+ error: err,
+ stacktrace: trace,
span,
- }
+ }.into()
})
}
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
instance: ty::Instance<'tcx>,
- val: ConstValue<'tcx>,
- ty: Ty<'tcx>,
+ val: &'tcx ty::Const<'tcx>,
) -> EvalResult<'tcx, usize> {
- trace!("const_variant_index: {:?}, {:?}, {:?}", instance, val, ty);
+ trace!("const_variant_index: {:?}, {:?}", instance, val);
let mut ecx = mk_eval_cx(tcx, instance, param_env).unwrap();
- let value = ecx.const_value_to_value(val, ty)?;
+ let value = ecx.const_to_value(val.val)?;
let (ptr, align) = match value {
Value::ScalarPair(..) | Value::Scalar(_) => {
- let layout = ecx.layout_of(ty)?;
+ let layout = ecx.layout_of(val.ty)?;
let ptr = ecx.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))?.into();
- ecx.write_value_to_ptr(value, ptr, layout.align, ty)?;
+ ecx.write_value_to_ptr(value, ptr, layout.align, val.ty)?;
(ptr, layout.align)
},
Value::ByRef(ptr, align) => (ptr, align),
};
let place = Place::from_scalar_ptr(ptr, align);
- ecx.read_discriminant_as_variant_index(place, ty)
+ ecx.read_discriminant_as_variant_index(place, val.ty)
}
pub fn const_value_to_allocation_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
- (val, ty): (ConstValue<'tcx>, Ty<'tcx>),
+ val: &'tcx ty::Const<'tcx>,
) -> &'tcx Allocation {
- match val {
+ match val.val {
ConstValue::ByRef(alloc, offset) => {
assert_eq!(offset.bytes(), 0);
return alloc;
ty::ParamEnv::reveal_all(),
CompileTimeEvaluator,
());
- let value = ecx.const_value_to_value(val, ty)?;
- let layout = ecx.layout_of(ty)?;
+ let value = ecx.const_to_value(val.val)?;
+ let layout = ecx.layout_of(val.ty)?;
let ptr = ecx.memory.allocate(layout.size, layout.align, Some(MemoryKind::Stack))?;
- ecx.write_value_to_ptr(value, ptr.into(), layout.align, ty)?;
+ ecx.write_value_to_ptr(value, ptr.into(), layout.align, val.ty)?;
let alloc = ecx.memory.get(ptr.alloc_id)?;
Ok(tcx.intern_const_alloc(alloc.clone()))
};
- result().expect("unable to convert ConstVal to Allocation")
+ result().expect("unable to convert ConstValue to Allocation")
}
pub fn const_eval_provider<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
key: ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>,
-) -> ::rustc::middle::const_val::EvalResult<'tcx> {
+) -> ::rustc::mir::interpret::ConstEvalResult<'tcx> {
trace!("const eval: {:?}", key);
let cid = key.value;
let def_id = cid.instance.def.def_id();
// Do match-check before building MIR
if tcx.check_match(def_id).is_err() {
return Err(ConstEvalErr {
- kind: Lrc::new(CheckMatchError),
+ error: EvalErrorKind::CheckMatchError.into(),
+ stacktrace: vec![],
span,
- });
+ }.into());
}
if let hir::BodyOwnerKind::Const = tcx.hir.body_owner_kind(id) {
// Do not continue into miri if typeck errors occurred; it will fail horribly
if tables.tainted_by_errors {
return Err(ConstEvalErr {
- kind: Lrc::new(TypeckError),
+ error: EvalErrorKind::CheckMatchError.into(),
+ stacktrace: vec![],
span,
- });
+ }.into());
}
};
Ok(value_to_const_value(&ecx, val, miri_ty))
}).map_err(|err| {
let (trace, span) = ecx.generate_stacktrace(None);
- let err = ErrKind::Miri(err, trace);
let err = ConstEvalErr {
- kind: err.into(),
+ error: err,
+ stacktrace: trace,
span,
};
if tcx.is_static(def_id).is_some() {
err.report_as_error(ecx.tcx, "could not evaluate static initializer");
}
- err
+ err.into()
})
}
use rustc::hir::def_id::DefId;
use rustc::hir::def::Def;
use rustc::hir::map::definitions::DefPathData;
-use rustc::middle::const_val::ConstVal;
use rustc::mir;
use rustc::ty::layout::{self, Size, Align, HasDataLayout, IntegerExt, LayoutOf, TyLayout};
use rustc::ty::subst::{Subst, Substs};
use rustc::ty::{self, Ty, TyCtxt, TypeAndMut};
use rustc::ty::query::TyCtxtAt;
use rustc_data_structures::indexed_vec::{IndexVec, Idx};
-use rustc::middle::const_val::FrameInfo;
+use rustc::mir::interpret::FrameInfo;
use syntax::codemap::{self, Span};
use syntax::ast::Mutability;
use rustc::mir::interpret::{
Ok(Scalar::Ptr(ptr).to_value_with_len(s.len() as u64, self.tcx.tcx))
}
- pub fn const_value_to_value(
+ pub fn const_to_value(
&mut self,
val: ConstValue<'tcx>,
- _ty: Ty<'tcx>,
) -> EvalResult<'tcx, Value> {
match val {
+ ConstValue::Unevaluated(def_id, substs) => {
+ let instance = self.resolve(def_id, substs)?;
+ self.read_global_as_value(GlobalId {
+ instance,
+ promoted: None,
+ })
+ }
ConstValue::ByRef(alloc, offset) => {
// FIXME: Allocate new AllocId for all constants inside
let id = self.memory.allocate_value(alloc.clone(), Some(MemoryKind::Stack))?;
}
}
- pub(super) fn const_to_value(
- &mut self,
- const_val: &ConstVal<'tcx>,
- ty: Ty<'tcx>
- ) -> EvalResult<'tcx, Value> {
- match *const_val {
- ConstVal::Unevaluated(def_id, substs) => {
- let instance = self.resolve(def_id, substs)?;
- self.read_global_as_value(GlobalId {
- instance,
- promoted: None,
- }, ty)
- }
- ConstVal::Value(val) => self.const_value_to_value(val, ty)
- }
- }
-
pub(super) fn resolve(&self, def_id: DefId, substs: &'tcx Substs<'tcx>) -> EvalResult<'tcx, ty::Instance<'tcx>> {
trace!("resolve: {:?}, {:#?}", def_id, substs);
trace!("substs: {:#?}", self.substs());
self.param_env,
def_id,
substs,
- ).ok_or_else(|| EvalErrorKind::TypeckError.into()) // turn error prop into a panic to expose associated type in const issue
+ ).ok_or_else(|| EvalErrorKind::TooGeneric.into())
}
pub(super) fn type_is_sized(&self, ty: Ty<'tcx>) -> bool {
self.param_env,
def_id,
substs,
- ).ok_or_else(|| EvalErrorKind::TypeckError.into());
+ ).ok_or_else(|| EvalErrorKind::TooGeneric.into());
let fn_ptr = self.memory.create_fn_alloc(instance?);
let valty = ValTy {
value: Value::Scalar(fn_ptr.into()),
use rustc::mir::Literal;
let mir::Constant { ref literal, .. } = **constant;
let value = match *literal {
- Literal::Value { ref value } => self.const_to_value(&value.val, ty)?,
+ Literal::Value { ref value } => self.const_to_value(value.val)?,
Literal::Promoted { index } => {
let instance = self.frame().instance;
self.read_global_as_value(GlobalId {
instance,
promoted: Some(index),
- }, ty)?
+ })?
}
};
Ok(())
}
- pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>, ty: Ty<'tcx>) -> EvalResult<'tcx, Value> {
- if self.tcx.is_static(gid.instance.def_id()).is_some() {
- let alloc_id = self
- .tcx
- .alloc_map
- .lock()
- .intern_static(gid.instance.def_id());
- let layout = self.layout_of(ty)?;
- return Ok(Value::ByRef(Scalar::Ptr(alloc_id.into()), layout.align))
- }
+ pub fn read_global_as_value(&mut self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, Value> {
let cv = self.const_eval(gid)?;
- self.const_to_value(&cv.val, ty)
+ self.const_to_value(cv.val)
}
pub fn const_eval(&self, gid: GlobalId<'tcx>) -> EvalResult<'tcx, &'tcx ty::Const<'tcx>> {
use rustc::ty::query::TyCtxtAt;
use rustc::ty::layout::{self, Align, TargetDataLayout, Size};
use syntax::ast::Mutability;
-use rustc::middle::const_val::ConstVal;
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
use rustc::mir::interpret::{Pointer, AllocId, Allocation, AccessKind, Value,
instance,
promoted: None,
};
- self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|_| {
+ self.tcx.const_eval(ParamEnv::reveal_all().and(gid)).map_err(|err| {
// no need to report anything, the const_eval call takes care of that for statics
assert!(self.tcx.is_static(def_id).is_some());
- EvalErrorKind::TypeckError.into()
+ EvalErrorKind::ReferencedConstant(err).into()
}).map(|val| {
- let const_val = match val.val {
- ConstVal::Value(val) => val,
- ConstVal::Unevaluated(..) => bug!("should be evaluated"),
- };
- self.tcx.const_value_to_allocation((const_val, val.ty))
+ self.tcx.const_value_to_allocation(val)
})
}
use rustc::hir::map as hir_map;
use rustc::hir::def_id::DefId;
-use rustc::middle::const_val::ConstVal;
use rustc::mir::interpret::{AllocId, ConstValue};
use rustc::middle::lang_items::{ExchangeMallocFnLangItem, StartFnLangItem};
use rustc::ty::subst::Substs;
MonoItemCollectionMode::Lazy => {
self.entry_fn == Some(def_id) ||
self.tcx.is_reachable_non_generic(def_id) ||
+ self.tcx.is_weak_lang_item(def_id) ||
self.tcx.codegen_fn_attrs(def_id).flags.contains(
CodegenFnAttrFlags::RUSTC_STD_INTERNAL_SYMBOL)
}
if let Some(trait_ref) = tcx.impl_trait_ref(impl_def_id) {
let overridden_methods: FxHashSet<_> =
impl_item_refs.iter()
- .map(|iiref| iiref.name)
+ .map(|iiref| iiref.ident.modern())
.collect();
for method in tcx.provided_trait_methods(trait_ref.def_id) {
- if overridden_methods.contains(&method.name) {
+ if overridden_methods.contains(&method.ident.modern()) {
continue;
}
match tcx.const_eval(param_env.and(cid)) {
Ok(val) => collect_const(tcx, val, instance.substs, output),
Err(err) => {
- use rustc::middle::const_val::ErrKind;
use rustc::mir::interpret::EvalErrorKind;
- if let ErrKind::Miri(ref miri, ..) = *err.kind {
- if let EvalErrorKind::ReferencedConstant(_) = miri.kind {
- err.report_as_error(
- tcx.at(mir.promoted[i].span),
- "erroneous constant used",
- );
- }
+ if let EvalErrorKind::ReferencedConstant(_) = err.error.kind {
+ err.report_as_error(
+ tcx.at(mir.promoted[i].span),
+ "erroneous constant used",
+ );
}
},
}
debug!("visiting const {:?}", *constant);
let val = match constant.val {
- ConstVal::Unevaluated(def_id, substs) => {
+ ConstValue::Unevaluated(def_id, substs) => {
let param_env = ty::ParamEnv::reveal_all();
let substs = tcx.subst_and_normalize_erasing_regions(
param_substs,
_ => constant.val,
};
match val {
- ConstVal::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
- ConstVal::Value(ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b))) => {
+ ConstValue::Unevaluated(..) => bug!("const eval yielded unevaluated const"),
+ ConstValue::ScalarPair(Scalar::Ptr(a), Scalar::Ptr(b)) => {
collect_miri(tcx, a.alloc_id, output);
collect_miri(tcx, b.alloc_id, output);
}
- ConstVal::Value(ConstValue::ScalarPair(_, Scalar::Ptr(ptr))) |
- ConstVal::Value(ConstValue::ScalarPair(Scalar::Ptr(ptr), _)) |
- ConstVal::Value(ConstValue::Scalar(Scalar::Ptr(ptr))) =>
+ ConstValue::ScalarPair(_, Scalar::Ptr(ptr)) |
+ ConstValue::ScalarPair(Scalar::Ptr(ptr), _) |
+ ConstValue::Scalar(Scalar::Ptr(ptr)) =>
collect_miri(tcx, ptr.alloc_id, output),
- ConstVal::Value(ConstValue::ByRef(alloc, _offset)) => {
+ ConstValue::ByRef(alloc, _offset) => {
for &id in alloc.relocations.values() {
collect_miri(tcx, id, output);
}
for projection in projections {
let projection = projection.skip_binder();
- let name = &self.tcx.associated_item(projection.item_def_id).name.as_str();
+ let name = &self.tcx.associated_item(projection.item_def_id).ident.as_str();
output.push_str(name);
output.push_str("=");
self.push_type_name(projection.ty, output);
use rustc::mir::{NullOp, StatementKind, Statement, BasicBlock, LocalKind};
use rustc::mir::{TerminatorKind, ClearCrossCrate, SourceInfo, BinOp, ProjectionElem};
use rustc::mir::visit::{Visitor, PlaceContext};
-use rustc::middle::const_val::{ConstVal, ConstEvalErr, ErrKind};
+use rustc::mir::interpret::ConstEvalErr;
use rustc::ty::{TyCtxt, self, Instance};
use rustc::mir::interpret::{Value, Scalar, GlobalId, EvalResult};
use interpret::EvalContext;
return;
}
match tcx.describe_def(source.def_id) {
- // skip statics because they'll be evaluated by miri anyway
+ // skip statics/consts because they'll be evaluated by miri anyway
+ Some(Def::Const(..)) |
Some(Def::Static(..)) => return,
+ // we still run on associated constants, because they might not get evaluated
+ // within the current crate
_ => {},
}
trace!("ConstProp starting for {:?}", source.def_id);
let (frames, span) = self.ecx.generate_stacktrace(None);
let err = ConstEvalErr {
span,
- kind: ErrKind::Miri(err, frames).into(),
+ error: err,
+ stacktrace: frames,
};
err.report_as_lint(
self.ecx.tcx,
r
}
- fn const_eval(&mut self, cid: GlobalId<'tcx>, source_info: SourceInfo) -> Option<Const<'tcx>> {
- let value = match self.tcx.const_eval(self.param_env.and(cid)) {
- Ok(val) => val,
- Err(err) => {
- err.report_as_error(
- self.tcx.at(err.span),
- "constant evaluation error",
- );
- return None;
- },
- };
- let val = match value.val {
- ConstVal::Value(v) => {
- self.use_ecx(source_info, |this| this.ecx.const_value_to_value(v, value.ty))?
- },
- _ => bug!("eval produced: {:?}", value),
- };
- let val = (val, value.ty, source_info.span);
- trace!("evaluated {:?} to {:?}", cid, val);
- Some(val)
- }
-
fn eval_constant(
&mut self,
c: &Constant<'tcx>,
source_info: SourceInfo,
) -> Option<Const<'tcx>> {
match c.literal {
- Literal::Value { value } => match value.val {
- ConstVal::Value(v) => {
- let v = self.use_ecx(source_info, |this| {
- this.ecx.const_value_to_value(v, value.ty)
- })?;
- Some((v, value.ty, c.span))
- },
- ConstVal::Unevaluated(did, substs) => {
- let instance = Instance::resolve(
- self.tcx,
- self.param_env,
- did,
- substs,
- )?;
- let cid = GlobalId {
- instance,
- promoted: None,
- };
- self.const_eval(cid, source_info)
- },
+ Literal::Value { value } => {
+ self.ecx.tcx.span = source_info.span;
+ match self.ecx.const_to_value(value.val) {
+ Ok(val) => Some((val, value.ty, c.span)),
+ Err(error) => {
+ let (stacktrace, span) = self.ecx.generate_stacktrace(None);
+ let err = ConstEvalErr {
+ span,
+ error,
+ stacktrace,
+ };
+ err.report_as_error(
+ self.tcx.at(source_info.span),
+ "could not evaluate constant",
+ );
+ None
+ },
+ }
},
// evaluate the promoted and replace the constant with the evaluated result
Literal::Promoted { index } => {
use rustc_data_structures::fx::FxHashSet;
use rustc::hir;
use rustc::hir::def_id::DefId;
-use rustc::middle::const_val::ConstVal;
+use rustc::mir::interpret::ConstValue;
use rustc::traits::{self, TraitEngine};
use rustc::ty::{self, TyCtxt, Ty, TypeFoldable};
use rustc::ty::cast::CastTy;
}
Operand::Constant(ref constant) => {
if let Literal::Value {
- value: &ty::Const { val: ConstVal::Unevaluated(def_id, _), ty, .. }
+ value: &ty::Const { val: ConstValue::Unevaluated(def_id, _), ty, .. }
} = constant.literal {
// Don't peek inside trait associated constants.
if self.tcx.trait_of_item(def_id).is_some() {
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::ty::{self, TyCtxt};
use rustc::session::config::BorrowckMode;
+use rustc::ty::{self, TyCtxt};
use rustc_errors::{DiagnosticBuilder, DiagnosticId};
use syntax_pos::{MultiSpan, Span};
use std::fmt;
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
-pub enum Origin { Ast, Mir }
+pub enum Origin {
+ Ast,
+ Mir,
+}
impl fmt::Display for Origin {
fn fmt(&self, w: &mut fmt::Formatter) -> fmt::Result {
}
pub trait BorrowckErrors<'cx>: Sized + Copy {
- fn struct_span_err_with_code<S: Into<MultiSpan>>(self,
- sp: S,
- msg: &str,
- code: DiagnosticId)
- -> DiagnosticBuilder<'cx>;
+ fn struct_span_err_with_code<S: Into<MultiSpan>>(
+ self,
+ sp: S,
+ msg: &str,
+ code: DiagnosticId,
+ ) -> DiagnosticBuilder<'cx>;
- fn struct_span_err<S: Into<MultiSpan>>(self,
- sp: S,
- msg: &str)
- -> DiagnosticBuilder<'cx>;
+ fn struct_span_err<S: Into<MultiSpan>>(self, sp: S, msg: &str) -> DiagnosticBuilder<'cx>;
/// Cancels the given error if we shouldn't emit errors for a given
/// origin in the current mode.
///
/// Always make sure that the error gets passed through this function
/// before you return it.
- fn cancel_if_wrong_origin(self,
- diag: DiagnosticBuilder<'cx>,
- o: Origin)
- -> DiagnosticBuilder<'cx>;
-
- fn cannot_move_when_borrowed(self, span: Span, desc: &str, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0505,
- "cannot move out of `{}` because it is borrowed{OGN}",
- desc, OGN=o);
+ fn cancel_if_wrong_origin(
+ self,
+ diag: DiagnosticBuilder<'cx>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx>;
+
+ fn cannot_move_when_borrowed(
+ self,
+ span: Span,
+ desc: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0505,
+ "cannot move out of `{}` because it is borrowed{OGN}",
+ desc,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_use_when_mutably_borrowed(self,
- span: Span,
- desc: &str,
- borrow_span: Span,
- borrow_desc: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, span, E0503,
- "cannot use `{}` because it was mutably borrowed{OGN}",
- desc, OGN=o);
-
- err.span_label(borrow_span, format!("borrow of `{}` occurs here", borrow_desc));
+ fn cannot_use_when_mutably_borrowed(
+ self,
+ span: Span,
+ desc: &str,
+ borrow_span: Span,
+ borrow_desc: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ span,
+ E0503,
+ "cannot use `{}` because it was mutably borrowed{OGN}",
+ desc,
+ OGN = o
+ );
+
+ err.span_label(
+ borrow_span,
+ format!("borrow of `{}` occurs here", borrow_desc),
+ );
err.span_label(span, format!("use of borrowed `{}`", borrow_desc));
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_act_on_uninitialized_variable(self,
- span: Span,
- verb: &str,
- desc: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0381,
- "{} of possibly uninitialized variable: `{}`{OGN}",
- verb, desc, OGN=o);
+ fn cannot_act_on_uninitialized_variable(
+ self,
+ span: Span,
+ verb: &str,
+ desc: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0381,
+ "{} of possibly uninitialized variable: `{}`{OGN}",
+ verb,
+ desc,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_mutably_borrow_multiply(self,
- new_loan_span: Span,
- desc: &str,
- opt_via: &str,
- old_loan_span: Span,
- old_opt_via: &str,
- old_load_end_span: Option<Span>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, new_loan_span, E0499,
- "cannot borrow `{}`{} as mutable more than once at a time{OGN}",
- desc, opt_via, OGN=o);
+ fn cannot_mutably_borrow_multiply(
+ self,
+ new_loan_span: Span,
+ desc: &str,
+ opt_via: &str,
+ old_loan_span: Span,
+ old_opt_via: &str,
+ old_load_end_span: Option<Span>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ new_loan_span,
+ E0499,
+ "cannot borrow `{}`{} as mutable more than once at a time{OGN}",
+ desc,
+ opt_via,
+ OGN = o
+ );
if old_loan_span == new_loan_span {
// Both borrows are happening in the same place
// Meaning the borrow is occurring in a loop
- err.span_label(new_loan_span,
- format!("mutable borrow starts here in previous \
- iteration of loop{}", opt_via));
+ err.span_label(
+ new_loan_span,
+ format!(
+ "mutable borrow starts here in previous \
+ iteration of loop{}",
+ opt_via
+ ),
+ );
if let Some(old_load_end_span) = old_load_end_span {
err.span_label(old_load_end_span, "mutable borrow ends here");
}
} else {
- err.span_label(old_loan_span,
- format!("first mutable borrow occurs here{}", old_opt_via));
- err.span_label(new_loan_span,
- format!("second mutable borrow occurs here{}", opt_via));
+ err.span_label(
+ old_loan_span,
+ format!("first mutable borrow occurs here{}", old_opt_via),
+ );
+ err.span_label(
+ new_loan_span,
+ format!("second mutable borrow occurs here{}", opt_via),
+ );
if let Some(old_load_end_span) = old_load_end_span {
err.span_label(old_load_end_span, "first borrow ends here");
}
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_uniquely_borrow_by_two_closures(self,
- new_loan_span: Span,
- desc: &str,
- old_loan_span: Span,
- old_load_end_span: Option<Span>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, new_loan_span, E0524,
- "two closures require unique access to `{}` at the same time{OGN}",
- desc, OGN=o);
- err.span_label(
- old_loan_span,
- "first closure is constructed here");
- err.span_label(
+ fn cannot_uniquely_borrow_by_two_closures(
+ self,
+ new_loan_span: Span,
+ desc: &str,
+ old_loan_span: Span,
+ old_load_end_span: Option<Span>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
new_loan_span,
- "second closure is constructed here");
+ E0524,
+ "two closures require unique access to `{}` at the same time{OGN}",
+ desc,
+ OGN = o
+ );
+ err.span_label(old_loan_span, "first closure is constructed here");
+ err.span_label(new_loan_span, "second closure is constructed here");
if let Some(old_load_end_span) = old_load_end_span {
- err.span_label(
- old_load_end_span,
- "borrow from first closure ends here");
+ err.span_label(old_load_end_span, "borrow from first closure ends here");
}
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_uniquely_borrow_by_one_closure(self,
- new_loan_span: Span,
- desc_new: &str,
- opt_via: &str,
- old_loan_span: Span,
- noun_old: &str,
- old_opt_via: &str,
- previous_end_span: Option<Span>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, new_loan_span, E0500,
- "closure requires unique access to `{}` but {} is already borrowed{}{OGN}",
- desc_new, noun_old, old_opt_via, OGN=o);
- err.span_label(new_loan_span,
- format!("closure construction occurs here{}", opt_via));
- err.span_label(old_loan_span,
- format!("borrow occurs here{}", old_opt_via));
+ fn cannot_uniquely_borrow_by_one_closure(
+ self,
+ new_loan_span: Span,
+ desc_new: &str,
+ opt_via: &str,
+ old_loan_span: Span,
+ noun_old: &str,
+ old_opt_via: &str,
+ previous_end_span: Option<Span>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ new_loan_span,
+ E0500,
+ "closure requires unique access to `{}` but {} is already borrowed{}{OGN}",
+ desc_new,
+ noun_old,
+ old_opt_via,
+ OGN = o
+ );
+ err.span_label(
+ new_loan_span,
+ format!("closure construction occurs here{}", opt_via),
+ );
+ err.span_label(old_loan_span, format!("borrow occurs here{}", old_opt_via));
if let Some(previous_end_span) = previous_end_span {
err.span_label(previous_end_span, "borrow ends here");
}
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_reborrow_already_uniquely_borrowed(self,
- new_loan_span: Span,
- desc_new: &str,
- opt_via: &str,
- kind_new: &str,
- old_loan_span: Span,
- old_opt_via: &str,
- previous_end_span: Option<Span>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, new_loan_span, E0501,
- "cannot borrow `{}`{} as {} because previous closure \
- requires unique access{OGN}",
- desc_new, opt_via, kind_new, OGN=o);
- err.span_label(new_loan_span,
- format!("borrow occurs here{}", opt_via));
- err.span_label(old_loan_span,
- format!("closure construction occurs here{}", old_opt_via));
+ fn cannot_reborrow_already_uniquely_borrowed(
+ self,
+ new_loan_span: Span,
+ desc_new: &str,
+ opt_via: &str,
+ kind_new: &str,
+ old_loan_span: Span,
+ old_opt_via: &str,
+ previous_end_span: Option<Span>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ new_loan_span,
+ E0501,
+ "cannot borrow `{}`{} as {} because previous closure \
+ requires unique access{OGN}",
+ desc_new,
+ opt_via,
+ kind_new,
+ OGN = o
+ );
+ err.span_label(new_loan_span, format!("borrow occurs here{}", opt_via));
+ err.span_label(
+ old_loan_span,
+ format!("closure construction occurs here{}", old_opt_via),
+ );
if let Some(previous_end_span) = previous_end_span {
err.span_label(previous_end_span, "borrow from closure ends here");
}
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_reborrow_already_borrowed(self,
- span: Span,
- desc_new: &str,
- msg_new: &str,
- kind_new: &str,
- old_span: Span,
- noun_old: &str,
- kind_old: &str,
- msg_old: &str,
- old_load_end_span: Option<Span>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, span, E0502,
- "cannot borrow `{}`{} as {} because {} is also borrowed as {}{}{OGN}",
- desc_new, msg_new, kind_new, noun_old, kind_old, msg_old, OGN=o);
+ fn cannot_reborrow_already_borrowed(
+ self,
+ span: Span,
+ desc_new: &str,
+ msg_new: &str,
+ kind_new: &str,
+ old_span: Span,
+ noun_old: &str,
+ kind_old: &str,
+ msg_old: &str,
+ old_load_end_span: Option<Span>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ span,
+ E0502,
+ "cannot borrow `{}`{} as {} because {} is also borrowed as {}{}{OGN}",
+ desc_new,
+ msg_new,
+ kind_new,
+ noun_old,
+ kind_old,
+ msg_old,
+ OGN = o
+ );
err.span_label(span, format!("{} borrow occurs here{}", kind_new, msg_new));
- err.span_label(old_span, format!("{} borrow occurs here{}", kind_old, msg_old));
+ err.span_label(
+ old_span,
+ format!("{} borrow occurs here{}", kind_old, msg_old),
+ );
if let Some(old_load_end_span) = old_load_end_span {
err.span_label(old_load_end_span, format!("{} borrow ends here", kind_old));
}
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_assign_to_borrowed(self, span: Span, borrow_span: Span, desc: &str, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, span, E0506,
- "cannot assign to `{}` because it is borrowed{OGN}",
- desc, OGN=o);
+ fn cannot_assign_to_borrowed(
+ self,
+ span: Span,
+ borrow_span: Span,
+ desc: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ span,
+ E0506,
+ "cannot assign to `{}` because it is borrowed{OGN}",
+ desc,
+ OGN = o
+ );
err.span_label(borrow_span, format!("borrow of `{}` occurs here", desc));
- err.span_label(span, format!("assignment to borrowed `{}` occurs here", desc));
+ err.span_label(
+ span,
+ format!("assignment to borrowed `{}` occurs here", desc),
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_move_into_closure(self, span: Span, desc: &str, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0504,
- "cannot move `{}` into closure because it is borrowed{OGN}",
- desc, OGN=o);
+ fn cannot_move_into_closure(self, span: Span, desc: &str, o: Origin) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0504,
+ "cannot move `{}` into closure because it is borrowed{OGN}",
+ desc,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_reassign_immutable(self, span: Span, desc: &str, is_arg: bool, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
+ fn cannot_reassign_immutable(
+ self,
+ span: Span,
+ desc: &str,
+ is_arg: bool,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
let msg = if is_arg {
"to immutable argument"
} else {
"twice to immutable variable"
};
- let err = struct_span_err!(self, span, E0384,
- "cannot assign {} `{}`{OGN}",
- msg, desc, OGN=o);
+ let err = struct_span_err!(
+ self,
+ span,
+ E0384,
+ "cannot assign {} `{}`{OGN}",
+ msg,
+ desc,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_assign(self, span: Span, desc: &str, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0594,
- "cannot assign to {}{OGN}",
- desc, OGN=o);
+ fn cannot_assign(self, span: Span, desc: &str, o: Origin) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(self, span, E0594, "cannot assign to {}{OGN}", desc, OGN = o);
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_assign_static(self, span: Span, desc: &str, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
+ fn cannot_assign_static(self, span: Span, desc: &str, o: Origin) -> DiagnosticBuilder<'cx> {
self.cannot_assign(span, &format!("immutable static item `{}`", desc), o)
}
- fn cannot_move_out_of(self, move_from_span: Span, move_from_desc: &str, o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, move_from_span, E0507,
- "cannot move out of {}{OGN}",
- move_from_desc, OGN=o);
+ fn cannot_move_out_of(
+ self,
+ move_from_span: Span,
+ move_from_desc: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ move_from_span,
+ E0507,
+ "cannot move out of {}{OGN}",
+ move_from_desc,
+ OGN = o
+ );
err.span_label(
move_from_span,
- format!("cannot move out of {}", move_from_desc));
+ format!("cannot move out of {}", move_from_desc),
+ );
self.cancel_if_wrong_origin(err, o)
}
/// Signal an error due to an attempt to move out of the interior
/// of an array or slice. `is_index` is None when error origin
/// didn't capture whether there was an indexing operation or not.
- fn cannot_move_out_of_interior_noncopy(self,
- move_from_span: Span,
- ty: ty::Ty,
- is_index: Option<bool>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
+ fn cannot_move_out_of_interior_noncopy(
+ self,
+ move_from_span: Span,
+ ty: ty::Ty,
+ is_index: Option<bool>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
let type_name = match (&ty.sty, is_index) {
- (&ty::TyArray(_, _), Some(true)) |
- (&ty::TyArray(_, _), None) => "array",
- (&ty::TySlice(_), _) => "slice",
+ (&ty::TyArray(_, _), Some(true)) | (&ty::TyArray(_, _), None) => "array",
+ (&ty::TySlice(_), _) => "slice",
_ => span_bug!(move_from_span, "this path should not cause illegal move"),
};
- let mut err = struct_span_err!(self, move_from_span, E0508,
- "cannot move out of type `{}`, \
- a non-copy {}{OGN}",
- ty, type_name, OGN=o);
+ let mut err = struct_span_err!(
+ self,
+ move_from_span,
+ E0508,
+ "cannot move out of type `{}`, \
+ a non-copy {}{OGN}",
+ ty,
+ type_name,
+ OGN = o
+ );
err.span_label(move_from_span, "cannot move out of here");
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_move_out_of_interior_of_drop(self,
- move_from_span: Span,
- container_ty: ty::Ty,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, move_from_span, E0509,
- "cannot move out of type `{}`, \
- which implements the `Drop` trait{OGN}",
- container_ty, OGN=o);
+ fn cannot_move_out_of_interior_of_drop(
+ self,
+ move_from_span: Span,
+ container_ty: ty::Ty,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ move_from_span,
+ E0509,
+ "cannot move out of type `{}`, \
+ which implements the `Drop` trait{OGN}",
+ container_ty,
+ OGN = o
+ );
err.span_label(move_from_span, "cannot move out of here");
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_act_on_moved_value(self,
- use_span: Span,
- verb: &str,
- optional_adverb_for_moved: &str,
- moved_path: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, use_span, E0382,
- "{} of {}moved value: `{}`{OGN}",
- verb, optional_adverb_for_moved, moved_path, OGN=o);
+ fn cannot_act_on_moved_value(
+ self,
+ use_span: Span,
+ verb: &str,
+ optional_adverb_for_moved: &str,
+ moved_path: Option<String>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let moved_path = moved_path
+ .map(|mp| format!(": `{}`", mp))
+ .unwrap_or("".to_owned());
+
+ let err = struct_span_err!(
+ self,
+ use_span,
+ E0382,
+ "{} of {}moved value{}{OGN}",
+ verb,
+ optional_adverb_for_moved,
+ moved_path,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_partially_reinit_an_uninit_struct(self,
- span: Span,
- uninit_path: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self,
- span,
- E0383,
- "partial reinitialization of uninitialized structure `{}`{OGN}",
- uninit_path, OGN=o);
+ fn cannot_partially_reinit_an_uninit_struct(
+ self,
+ span: Span,
+ uninit_path: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0383,
+ "partial reinitialization of uninitialized structure `{}`{OGN}",
+ uninit_path,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn closure_cannot_assign_to_borrowed(self,
- span: Span,
- descr: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0595, "closure cannot assign to {}{OGN}",
- descr, OGN=o);
+ fn closure_cannot_assign_to_borrowed(
+ self,
+ span: Span,
+ descr: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0595,
+ "closure cannot assign to {}{OGN}",
+ descr,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_borrow_path_as_mutable(self,
- span: Span,
- path: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0596, "cannot borrow {} as mutable{OGN}",
- path, OGN=o);
+ fn cannot_borrow_path_as_mutable(
+ self,
+ span: Span,
+ path: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0596,
+ "cannot borrow {} as mutable{OGN}",
+ path,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_borrow_across_generator_yield(self,
- span: Span,
- yield_span: Span,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self,
- span,
- E0626,
- "borrow may still be in use when generator yields{OGN}",
- OGN=o);
+ fn cannot_borrow_across_generator_yield(
+ self,
+ span: Span,
+ yield_span: Span,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ span,
+ E0626,
+ "borrow may still be in use when generator yields{OGN}",
+ OGN = o
+ );
err.span_label(yield_span, "possible yield occurs here");
self.cancel_if_wrong_origin(err, o)
}
- fn path_does_not_live_long_enough(self,
- span: Span,
- path: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0597, "{} does not live long enough{OGN}",
- path, OGN=o);
+ fn path_does_not_live_long_enough(
+ self,
+ span: Span,
+ path: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0597,
+ "{} does not live long enough{OGN}",
+ path,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn lifetime_too_short_for_reborrow(self,
- span: Span,
- path: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let err = struct_span_err!(self, span, E0598,
- "lifetime of {} is too short to guarantee \
- its contents can be safely reborrowed{OGN}",
- path, OGN=o);
+ fn lifetime_too_short_for_reborrow(
+ self,
+ span: Span,
+ path: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let err = struct_span_err!(
+ self,
+ span,
+ E0598,
+ "lifetime of {} is too short to guarantee \
+ its contents can be safely reborrowed{OGN}",
+ path,
+ OGN = o
+ );
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_act_on_capture_in_sharable_fn(self,
- span: Span,
- bad_thing: &str,
- help: (Span, &str),
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
+ fn cannot_act_on_capture_in_sharable_fn(
+ self,
+ span: Span,
+ bad_thing: &str,
+ help: (Span, &str),
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
let (help_span, help_msg) = help;
- let mut err = struct_span_err!(self, span, E0387,
- "{} in a captured outer variable in an `Fn` closure{OGN}",
- bad_thing, OGN=o);
+ let mut err = struct_span_err!(
+ self,
+ span,
+ E0387,
+ "{} in a captured outer variable in an `Fn` closure{OGN}",
+ bad_thing,
+ OGN = o
+ );
err.span_help(help_span, help_msg);
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_assign_into_immutable_reference(self,
- span: Span,
- bad_thing: &str,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, span, E0389, "{} in a `&` reference{OGN}",
- bad_thing, OGN=o);
+ fn cannot_assign_into_immutable_reference(
+ self,
+ span: Span,
+ bad_thing: &str,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ span,
+ E0389,
+ "{} in a `&` reference{OGN}",
+ bad_thing,
+ OGN = o
+ );
err.span_label(span, "assignment into an immutable reference");
self.cancel_if_wrong_origin(err, o)
}
- fn cannot_capture_in_long_lived_closure(self,
- closure_span: Span,
- borrowed_path: &str,
- capture_span: Span,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
- let mut err = struct_span_err!(self, closure_span, E0373,
- "closure may outlive the current function, \
- but it borrows {}, \
- which is owned by the current function{OGN}",
- borrowed_path, OGN=o);
+ fn cannot_capture_in_long_lived_closure(
+ self,
+ closure_span: Span,
+ borrowed_path: &str,
+ capture_span: Span,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
+ let mut err = struct_span_err!(
+ self,
+ closure_span,
+ E0373,
+ "closure may outlive the current function, \
+ but it borrows {}, \
+ which is owned by the current function{OGN}",
+ borrowed_path,
+ OGN = o
+ );
err.span_label(capture_span, format!("{} is borrowed here", borrowed_path))
- .span_label(closure_span, format!("may outlive borrowed value {}", borrowed_path));
+ .span_label(
+ closure_span,
+ format!("may outlive borrowed value {}", borrowed_path),
+ );
self.cancel_if_wrong_origin(err, o)
}
}
impl<'cx, 'gcx, 'tcx> BorrowckErrors<'cx> for TyCtxt<'cx, 'gcx, 'tcx> {
- fn struct_span_err_with_code<S: Into<MultiSpan>>(self,
- sp: S,
- msg: &str,
- code: DiagnosticId)
- -> DiagnosticBuilder<'cx>
- {
+ fn struct_span_err_with_code<S: Into<MultiSpan>>(
+ self,
+ sp: S,
+ msg: &str,
+ code: DiagnosticId,
+ ) -> DiagnosticBuilder<'cx> {
self.sess.struct_span_err_with_code(sp, msg, code)
}
- fn struct_span_err<S: Into<MultiSpan>>(self,
- sp: S,
- msg: &str)
- -> DiagnosticBuilder<'cx>
- {
+ fn struct_span_err<S: Into<MultiSpan>>(self, sp: S, msg: &str) -> DiagnosticBuilder<'cx> {
self.sess.struct_span_err(sp, msg)
}
- fn cancel_if_wrong_origin(self,
- mut diag: DiagnosticBuilder<'cx>,
- o: Origin)
- -> DiagnosticBuilder<'cx>
- {
+ fn cancel_if_wrong_origin(
+ self,
+ mut diag: DiagnosticBuilder<'cx>,
+ o: Origin,
+ ) -> DiagnosticBuilder<'cx> {
if !o.should_emit_errors(self.borrowck_mode()) {
self.sess.diagnostic().cancel(&mut diag);
}
def_info: _,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
} => {
def_info: Some((nid, self.krate_span)),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
}
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
let is_local_static = if let Def::Static(..) = def { def_id.is_local() } else { false };
if !self.item_is_accessible(def_id) && !is_local_static {
let name = match *qpath {
- hir::QPath::Resolved(_, ref path) => format!("{}", path),
- hir::QPath::TypeRelative(_, ref segment) => segment.name.to_string(),
+ hir::QPath::Resolved(_, ref path) => path.to_string(),
+ hir::QPath::TypeRelative(_, ref segment) => segment.ident.to_string(),
};
let msg = format!("{} `{}` is private", def.kind_name(), name);
self.tcx.sess.span_err(span, &msg);
args: Option<P<hir::GenericArgs>>,
is_value: bool
) -> hir::Path {
- let mut segments = iter::once(keywords::CrateRoot.name())
+ let mut segments = iter::once(keywords::CrateRoot.ident())
.chain(
crate_root.into_iter()
.chain(components.iter().cloned())
- .map(Symbol::intern)
- ).map(hir::PathSegment::from_name).collect::<Vec<_>>();
+ .map(Ident::from_str)
+ ).map(hir::PathSegment::from_ident).collect::<Vec<_>>();
if let Some(args) = args {
- let name = segments.last().unwrap().name;
+ let ident = segments.last().unwrap().ident;
*segments.last_mut().unwrap() = hir::PathSegment {
- name,
+ ident,
args: Some(args),
infer_types: true,
};
hir::Path {
span,
def: Def::Err,
- segments: iter::once(keywords::CrateRoot.name()).chain({
- path_str.split("::").skip(1).map(Symbol::intern)
- }).map(hir::PathSegment::from_name).collect(),
+ segments: iter::once(keywords::CrateRoot.ident()).chain({
+ path_str.split("::").skip(1).map(Ident::from_str)
+ }).map(hir::PathSegment::from_ident).collect(),
}
} else {
hir::Path {
span,
def: Def::Err,
- segments: path_str.split("::").map(Symbol::intern)
- .map(hir::PathSegment::from_name).collect(),
+ segments: path_str.split("::").map(Ident::from_str)
+ .map(hir::PathSegment::from_ident).collect(),
}
};
self.resolve_hir_path_cb(&mut path, is_value, |_, _, _| errored = true);
/// resolve_hir_path, but takes a callback in case there was an error
fn resolve_hir_path_cb<F>(&mut self, path: &mut hir::Path, is_value: bool, error_callback: F)
- where F: for<'c, 'b> FnOnce(&'c mut Resolver, Span, ResolutionError<'b>)
- {
+ where F: for<'c, 'b> FnOnce(&'c mut Resolver, Span, ResolutionError<'b>)
+ {
let namespace = if is_value { ValueNS } else { TypeNS };
let hir::Path { ref segments, span, ref mut def } = *path;
- let path: Vec<Ident> = segments.iter()
- .map(|seg| Ident::new(seg.name, span))
- .collect();
+ let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
// FIXME (Manishearth): Intra doc links won't get warned of epoch changes
match self.resolve_path(&path, Some(namespace), true, span, CrateLint::No) {
PathResult::Module(module) => *def = module.def().unwrap(),
match path.get(1) {
// If this import looks like `crate::...` it's already good
- Some(name) if name.name == keywords::Crate.name() => return,
+ Some(ident) if ident.name == keywords::Crate.name() => return,
// Otherwise go below to see if it's an extern crate
Some(_) => {}
// If the path has length one (and it's `CrateRoot` most likely)
kind: MacroKind, force: bool)
-> Result<Def, Determinacy> {
let ast::Path { ref segments, span } = *path;
- let path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
+ let mut path: Vec<_> = segments.iter().map(|seg| seg.ident).collect();
let invocation = self.invocations[&scope];
let module = invocation.module.get();
self.current_module = if module.is_trait() { module.parent.unwrap() } else { module };
+ // Possibly apply the macro helper hack
+ if self.use_extern_macros && kind == MacroKind::Bang && path.len() == 1 &&
+ path[0].span.ctxt().outer().expn_info().map_or(false, |info| info.local_inner_macros) {
+ let root = Ident::new(keywords::DollarCrate.name(), path[0].span);
+ path.insert(0, root);
+ }
+
if path.len() > 1 {
if !self.use_extern_macros && self.gated_errors.insert(span) {
let msg = "non-ident macro paths are experimental";
sig: &'l ast::MethodSig,
body: Option<&'l ast::Block>,
id: ast::NodeId,
- name: ast::Ident,
+ ident: ast::Ident,
generics: &'l ast::Generics,
vis: ast::Visibility,
span: Span,
) {
- debug!("process_method: {}:{}", id, name);
+ debug!("process_method: {}:{}", id, ident);
- if let Some(mut method_data) = self.save_ctxt.get_method_data(id, name.name, span) {
+ if let Some(mut method_data) = self.save_ctxt.get_method_data(id, ident.name, span) {
let sig_str = ::make_signature(&sig.decl, &generics);
if body.is_some() {
self.nest_tables(
self.process_generic_params(&generics, span, &method_data.qualname, id);
method_data.value = sig_str;
- method_data.sig = sig::method_signature(id, name, generics, sig, &self.save_ctxt);
+ method_data.sig = sig::method_signature(id, ident, generics, sig, &self.save_ctxt);
self.dumper.dump_def(&access_from!(self.save_ctxt, vis, id), method_data);
}
qualname.push_str(&self.tcx.item_path_str(def_id));
self.tcx
.associated_items(def_id)
- .find(|item| item.name == name)
+ .find(|item| item.ident.name == name)
.map(|item| decl_id = Some(item.def_id));
}
qualname.push_str(">");
let ti = self.tcx.associated_item(decl_id);
self.tcx
.associated_items(ti.container.id())
- .find(|item| item.name == ti.name && item.defaultness.has_value())
+ .find(|item| item.ident.name == ti.ident.name &&
+ item.defaultness.has_value())
.map(|item| item.def_id)
} else {
None
RegKind::Integer => {
match self.size.bits() {
1 => dl.i1_align,
- 2...8 => dl.i8_align,
- 9...16 => dl.i16_align,
- 17...32 => dl.i32_align,
- 33...64 => dl.i64_align,
- 65...128 => dl.i128_align,
+ 2..=8 => dl.i8_align,
+ 9..=16 => dl.i16_align,
+ 17..=32 => dl.i32_align,
+ 33..=64 => dl.i64_align,
+ 65..=128 => dl.i128_align,
_ => panic!("unsupported integer: {:?}", self)
}
}
/// Find the smallest Integer type which can represent the signed value.
pub fn fit_signed(x: i128) -> Integer {
match x {
- -0x0000_0000_0000_0080...0x0000_0000_0000_007f => I8,
- -0x0000_0000_0000_8000...0x0000_0000_0000_7fff => I16,
- -0x0000_0000_8000_0000...0x0000_0000_7fff_ffff => I32,
- -0x8000_0000_0000_0000...0x7fff_ffff_ffff_ffff => I64,
+ -0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
+ -0x0000_0000_0000_8000..=0x0000_0000_0000_7fff => I16,
+ -0x0000_0000_8000_0000..=0x0000_0000_7fff_ffff => I32,
+ -0x8000_0000_0000_0000..=0x7fff_ffff_ffff_ffff => I64,
_ => I128
}
}
/// Find the smallest Integer type which can represent the unsigned value.
pub fn fit_unsigned(x: u128) -> Integer {
match x {
- 0...0x0000_0000_0000_00ff => I8,
- 0...0x0000_0000_0000_ffff => I16,
- 0...0x0000_0000_ffff_ffff => I32,
- 0...0xffff_ffff_ffff_ffff => I64,
+ 0..=0x0000_0000_0000_00ff => I8,
+ 0..=0x0000_0000_0000_ffff => I16,
+ 0..=0x0000_0000_ffff_ffff => I32,
+ 0..=0xffff_ffff_ffff_ffff => I64,
_ => I128,
}
}
use chalk_engine::fallible::Fallible as ChalkEngineFallible;
use chalk_engine::{context, hh::HhGoal, DelayedLiteral, ExClause};
-use rustc::infer::canonical::{
- Canonical, CanonicalVarValues, Canonicalize, QueryRegionConstraint, QueryResult,
-};
+use rustc::infer::canonical::{Canonical, CanonicalVarValues, QueryRegionConstraint, QueryResult};
use rustc::infer::{InferCtxt, InferOk, LateBoundRegionConversionTime};
use rustc::traits::{
WellFormed,
subst, constraints
}
}
-
-impl<'gcx: 'tcx, 'tcx> Canonicalize<'gcx, 'tcx> for ConstrainedSubst<'tcx> {
- type Canonicalized = Canonical<'gcx, ConstrainedSubst<'gcx>>;
-
- fn intern(
- _gcx: TyCtxt<'_, 'gcx, 'gcx>,
- value: Canonical<'gcx, ConstrainedSubst<'gcx>>,
- ) -> Self::Canonicalized {
- value
- }
-}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use rustc::infer::canonical::{Canonical, QueryResult};
use rustc::hir::def_id::DefId;
-use rustc::traits::{FulfillmentContext, Normalized, ObligationCause};
+use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::traits::query::dropck_outlives::{DropckOutlivesResult, DtorckConstraint};
use rustc::traits::query::{CanonicalTyGoal, NoSolution};
-use rustc::traits::query::dropck_outlives::{DtorckConstraint, DropckOutlivesResult};
-use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
+use rustc::traits::{FulfillmentContext, Normalized, ObligationCause, TraitEngineExt};
+use rustc::ty::query::Providers;
use rustc::ty::subst::{Subst, Substs};
+use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use rustc::util::nodemap::FxHashSet;
use rustc_data_structures::sync::Lrc;
use syntax::codemap::{Span, DUMMY_SP};
-use util;
-crate fn dropck_outlives<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ dropck_outlives,
+ adt_dtorck_constraint,
+ ..*p
+ };
+}
+
+fn dropck_outlives<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalTyGoal<'tcx>,
) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, DropckOutlivesResult<'tcx>>>>, NoSolution> {
canonical_inference_vars,
) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
- let mut result = DropckOutlivesResult { kinds: vec![], overflows: vec![] };
+ let mut result = DropckOutlivesResult {
+ kinds: vec![],
+ overflows: vec![],
+ };
// A stack of types left to process. Each round, we pop
// something from the stack and invoke
debug!("dropck_outlives: result = {:#?}", result);
- util::make_query_response(infcx, canonical_inference_vars, result, fulfill_cx)
+ infcx.make_canonicalized_query_result(canonical_inference_vars, result, fulfill_cx)
})
}
dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ety)
}
- ty::TyTuple(tys) => tys.iter()
+ ty::TyTuple(tys) => tys
+ .iter()
.map(|ty| dtorck_constraint_for_ty(tcx, span, for_ty, depth + 1, ty))
.collect(),
dtorck_types: vec![],
overflows: vec![],
};
- debug!("dtorck_constraint: generator {:?} => {:?}", def_id, constraint);
+ debug!(
+ "dtorck_constraint: generator {:?} => {:?}",
+ def_id, constraint
+ );
Ok(constraint)
}
return Ok(result);
}
- let mut result = def.all_fields()
+ let mut result = def
+ .all_fields()
.map(|field| tcx.type_of(field.did))
.map(|fty| dtorck_constraint_for_ty(tcx, span, fty, 0, fty))
.collect::<Result<DtorckConstraint, NoSolution>>()?;
use rustc::traits::{EvaluationResult, Obligation, ObligationCause,
OverflowError, SelectionContext, TraitQueryMode};
use rustc::traits::query::CanonicalPredicateGoal;
+use rustc::ty::query::Providers;
use rustc::ty::{ParamEnvAnd, TyCtxt};
use syntax::codemap::DUMMY_SP;
-crate fn evaluate_obligation<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ evaluate_obligation,
+ ..*p
+ };
+}
+
+fn evaluate_obligation<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalPredicateGoal<'tcx>,
) -> Result<EvaluationResult, OverflowError> {
mod evaluate_obligation;
mod normalize_projection_ty;
mod normalize_erasing_regions;
-mod util;
pub mod lowering;
+mod type_op;
use rustc::ty::query::Providers;
pub fn provide(p: &mut Providers) {
- *p = Providers {
- dropck_outlives: dropck_outlives::dropck_outlives,
- adt_dtorck_constraint: dropck_outlives::adt_dtorck_constraint,
- normalize_projection_ty: normalize_projection_ty::normalize_projection_ty,
- normalize_ty_after_erasing_regions:
- normalize_erasing_regions::normalize_ty_after_erasing_regions,
- program_clauses_for: lowering::program_clauses_for,
- program_clauses_for_env: lowering::program_clauses_for_env,
- evaluate_obligation: evaluate_obligation::evaluate_obligation,
- ..*p
- };
+ dropck_outlives::provide(p);
+ evaluate_obligation::provide(p);
+ lowering::provide(p);
+ normalize_projection_ty::provide(p);
+ normalize_erasing_regions::provide(p);
+ type_op::provide(p);
}
use rustc::hir::{self, ImplPolarity};
use rustc::traits::{Clause, Clauses, DomainGoal, Goal, PolyDomainGoal, ProgramClause,
WhereClause, FromEnv, WellFormed};
+use rustc::ty::query::Providers;
use rustc::ty::subst::Substs;
use rustc::ty::{self, Slice, TyCtxt};
use rustc_data_structures::fx::FxHashSet;
use std::iter;
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ program_clauses_for,
+ program_clauses_for_env,
+ ..*p
+ };
+}
+
crate trait Lower<T> {
/// Lower a rustc construct (e.g. `ty::TraitPredicate`) to a chalk-like type.
fn lower(&self) -> T;
let hypotheses = vec![trait_implemented];
// `<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm>`
- let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.name);
+ let projection_ty = ty::ProjectionTy::from_ref_and_name(tcx, trait_ref, item.ident);
// `Normalize(<A0 as Trait<A1..An>>::AssocType<Pn+1..Pm> -> T)`
let normalize_goal = DomainGoal::Normalize(ty::ProjectionPredicate { projection_ty, ty });
use rustc::traits::{Normalized, ObligationCause};
use rustc::traits::query::NoSolution;
+use rustc::ty::query::Providers;
use rustc::ty::{self, ParamEnvAnd, Ty, TyCtxt};
use std::sync::atomic::Ordering;
-crate fn normalize_ty_after_erasing_regions<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ normalize_ty_after_erasing_regions,
+ ..*p
+ };
+}
+
+fn normalize_ty_after_erasing_regions<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: ParamEnvAnd<'tcx, Ty<'tcx>>,
) -> Ty<'tcx> {
// except according to those terms.
use rustc::infer::canonical::{Canonical, QueryResult};
-use rustc::traits::{self, FulfillmentContext, ObligationCause, SelectionContext};
-use rustc::traits::query::{CanonicalProjectionGoal, NoSolution, normalize::NormalizationResult};
+use rustc::traits::query::{normalize::NormalizationResult, CanonicalProjectionGoal, NoSolution};
+use rustc::traits::{self, ObligationCause, SelectionContext, TraitEngineExt};
+use rustc::ty::query::Providers;
use rustc::ty::{ParamEnvAnd, TyCtxt};
use rustc_data_structures::sync::Lrc;
+use std::sync::atomic::Ordering;
use syntax::ast::DUMMY_NODE_ID;
use syntax_pos::DUMMY_SP;
-use util;
-use std::sync::atomic::Ordering;
-crate fn normalize_projection_ty<'tcx>(
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ normalize_projection_ty,
+ ..*p
+ };
+}
+
+fn normalize_projection_ty<'tcx>(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
goal: CanonicalProjectionGoal<'tcx>,
) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, NormalizationResult<'tcx>>>>, NoSolution> {
debug!("normalize_provider(goal={:#?})", goal);
- tcx.sess.perf_stats.normalize_projection_ty.fetch_add(1, Ordering::Relaxed);
- tcx.infer_ctxt().enter(|ref infcx| {
- let (
- ParamEnvAnd {
+ tcx.sess
+ .perf_stats
+ .normalize_projection_ty
+ .fetch_add(1, Ordering::Relaxed);
+ tcx.infer_ctxt().enter_canonical_trait_query(
+ &goal,
+ |infcx,
+ fulfill_cx,
+ ParamEnvAnd {
+ param_env,
+ value: goal,
+ }| {
+ let selcx = &mut SelectionContext::new(infcx);
+ let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
+ let mut obligations = vec![];
+ let answer = traits::normalize_projection_type(
+ selcx,
param_env,
- value: goal,
- },
- canonical_inference_vars,
- ) = infcx.instantiate_canonical_with_fresh_inference_vars(DUMMY_SP, &goal);
- let fulfill_cx = &mut FulfillmentContext::new();
- let selcx = &mut SelectionContext::new(infcx);
- let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
- let mut obligations = vec![];
- let answer =
- traits::normalize_projection_type(selcx, param_env, goal, cause, 0, &mut obligations);
- fulfill_cx.register_predicate_obligations(infcx, obligations);
-
- // Now that we have fulfilled as much as we can, create a solution
- // from what we've learned.
- util::make_query_response(
- infcx,
- canonical_inference_vars,
- NormalizationResult { normalized_ty: answer },
- fulfill_cx,
- )
- })
+ goal,
+ cause,
+ 0,
+ &mut obligations,
+ );
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+ Ok(NormalizationResult {
+ normalized_ty: answer,
+ })
+ },
+ )
}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use rustc::infer::canonical::{Canonical, QueryResult};
+use rustc::infer::InferCtxt;
+use rustc::traits::query::type_op::eq::Eq;
+use rustc::traits::query::type_op::normalize::Normalize;
+use rustc::traits::query::type_op::prove_predicate::ProvePredicate;
+use rustc::traits::query::type_op::subtype::Subtype;
+use rustc::traits::query::{Fallible, NoSolution};
+use rustc::traits::{FulfillmentContext, Normalized, Obligation, ObligationCause, TraitEngine,
+ TraitEngineExt};
+use rustc::ty::query::Providers;
+use rustc::ty::{FnSig, Lift, ParamEnvAnd, PolyFnSig, Predicate, Ty, TyCtxt, TypeFoldable};
+use rustc_data_structures::sync::Lrc;
+use std::fmt;
+
+crate fn provide(p: &mut Providers) {
+ *p = Providers {
+ type_op_eq,
+ type_op_prove_predicate,
+ type_op_subtype,
+ type_op_normalize_ty,
+ type_op_normalize_predicate,
+ type_op_normalize_fn_sig,
+ type_op_normalize_poly_fn_sig,
+ ..*p
+ };
+}
+
+fn type_op_eq<'tcx>(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Eq<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ let (param_env, Eq { a, b }) = key.into_parts();
+ Ok(infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .eq(a, b)?
+ .into_value_registering_obligations(infcx, fulfill_cx))
+ })
+}
+
+fn type_op_normalize<T>(
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ fulfill_cx: &mut FulfillmentContext<'tcx>,
+ key: ParamEnvAnd<'tcx, Normalize<T>>,
+) -> Fallible<T>
+where
+ T: fmt::Debug + TypeFoldable<'tcx> + Lift<'gcx>,
+{
+ let (param_env, Normalize { value }) = key.into_parts();
+ let Normalized { value, obligations } = infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .normalize(&value)?;
+ fulfill_cx.register_predicate_obligations(infcx, obligations);
+ Ok(value)
+}
+
+fn type_op_normalize_ty(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Ty<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Ty<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_predicate(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<Predicate<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, Predicate<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_fn_sig(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<FnSig<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, FnSig<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_normalize_poly_fn_sig(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Normalize<PolyFnSig<'tcx>>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, PolyFnSig<'tcx>>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, type_op_normalize)
+}
+
+fn type_op_subtype<'tcx>(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, Subtype<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ let (param_env, Subtype { sub, sup }) = key.into_parts();
+ Ok(infcx
+ .at(&ObligationCause::dummy(), param_env)
+ .sup(sup, sub)?
+ .into_value_registering_obligations(infcx, fulfill_cx))
+ })
+}
+
+fn type_op_prove_predicate<'tcx>(
+ tcx: TyCtxt<'_, 'tcx, 'tcx>,
+ canonicalized: Canonical<'tcx, ParamEnvAnd<'tcx, ProvePredicate<'tcx>>>,
+) -> Result<Lrc<Canonical<'tcx, QueryResult<'tcx, ()>>>, NoSolution> {
+ tcx.infer_ctxt()
+ .enter_canonical_trait_query(&canonicalized, |infcx, fulfill_cx, key| {
+ let (param_env, ProvePredicate { predicate }) = key.into_parts();
+ fulfill_cx.register_predicate_obligation(
+ infcx,
+ Obligation::new(ObligationCause::dummy(), param_env, predicate),
+ );
+ Ok(())
+ })
+}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-use rustc::infer::InferCtxt;
-use rustc::infer::canonical::{CanonicalVarValues, Canonicalize, Certainty, QueryResult};
-use rustc::infer::region_constraints::{Constraint, RegionConstraintData};
-use rustc::traits::{FulfillmentContext, TraitEngine};
-use rustc::traits::query::NoSolution;
-use rustc::ty;
-use std::fmt::Debug;
-
-/// The canonicalization form of `QueryResult<'tcx, T>`.
-type CanonicalizedQueryResult<'gcx, 'tcx, T> =
- <QueryResult<'tcx, T> as Canonicalize<'gcx, 'tcx>>::Canonicalized;
-
-crate fn make_query_response<'gcx, 'tcx, T>(
- infcx: &InferCtxt<'_, 'gcx, 'tcx>,
- inference_vars: CanonicalVarValues<'tcx>,
- answer: T,
- fulfill_cx: &mut FulfillmentContext<'tcx>,
-) -> Result<CanonicalizedQueryResult<'gcx, 'tcx, T>, NoSolution>
-where
- T: Debug,
- QueryResult<'tcx, T>: Canonicalize<'gcx, 'tcx>,
-{
- let tcx = infcx.tcx;
-
- debug!(
- "make_query_response(\
- inference_vars={:?}, \
- answer={:?})",
- inference_vars, answer,
- );
-
- // Select everything, returning errors.
- let true_errors = match fulfill_cx.select_where_possible(infcx) {
- Ok(()) => vec![],
- Err(errors) => errors,
- };
- debug!("true_errors = {:#?}", true_errors);
-
- if !true_errors.is_empty() {
- // FIXME -- we don't indicate *why* we failed to solve
- debug!("make_query_response: true_errors={:#?}", true_errors);
- return Err(NoSolution);
- }
-
- // Anything left unselected *now* must be an ambiguity.
- let ambig_errors = match fulfill_cx.select_all_or_error(infcx) {
- Ok(()) => vec![],
- Err(errors) => errors,
- };
- debug!("ambig_errors = {:#?}", ambig_errors);
-
- let region_obligations = infcx.take_registered_region_obligations();
-
- let region_constraints = infcx.with_region_constraints(|region_constraints| {
- let RegionConstraintData {
- constraints,
- verifys,
- givens,
- } = region_constraints;
-
- assert!(verifys.is_empty());
- assert!(givens.is_empty());
-
- let mut outlives: Vec<_> = constraints
- .into_iter()
- .map(|(k, _)| match *k {
- // Swap regions because we are going from sub (<=) to outlives
- // (>=).
- Constraint::VarSubVar(v1, v2) => ty::OutlivesPredicate(
- tcx.mk_region(ty::ReVar(v2)).into(),
- tcx.mk_region(ty::ReVar(v1)),
- ),
- Constraint::VarSubReg(v1, r2) => {
- ty::OutlivesPredicate(r2.into(), tcx.mk_region(ty::ReVar(v1)))
- }
- Constraint::RegSubVar(r1, v2) => {
- ty::OutlivesPredicate(tcx.mk_region(ty::ReVar(v2)).into(), r1)
- }
- Constraint::RegSubReg(r1, r2) => ty::OutlivesPredicate(r2.into(), r1),
- })
- .map(ty::Binder::dummy) // no bound regions in the code above
- .collect();
-
- outlives.extend(
- region_obligations
- .into_iter()
- .map(|(_, r_o)| ty::OutlivesPredicate(r_o.sup_type.into(), r_o.sub_region))
- .map(ty::Binder::dummy) // no bound regions in the code above
- );
-
- outlives
- });
-
- let certainty = if ambig_errors.is_empty() {
- Certainty::Proven
- } else {
- Certainty::Ambiguous
- };
-
- let (canonical_result, _) = infcx.canonicalize_response(&QueryResult {
- var_values: inference_vars,
- region_constraints,
- certainty,
- value: answer,
- });
-
- debug!(
- "make_query_response: canonical_result = {:#?}",
- canonical_result
- );
-
- Ok(canonical_result)
-}
}
struct ConvertedBinding<'tcx> {
- item_name: ast::Name,
+ item_name: ast::Ident,
ty: Ty<'tcx>,
span: Span,
}
let assoc_bindings = generic_args.bindings.iter().map(|binding| {
ConvertedBinding {
- item_name: binding.name,
+ item_name: binding.ident,
ty: self.ast_ty_to_ty(&binding.ty),
span: binding.span,
}
fn trait_defines_associated_type_named(&self,
trait_def_id: DefId,
- assoc_name: ast::Name)
+ assoc_name: ast::Ident)
-> bool
{
self.tcx().associated_items(trait_def_id).any(|item| {
item.kind == ty::AssociatedKind::Type &&
- self.tcx().hygienic_eq(assoc_name, item.name, trait_def_id)
+ self.tcx().hygienic_eq(assoc_name, item.ident, trait_def_id)
})
}
}?;
let (assoc_ident, def_scope) =
- tcx.adjust_ident(binding.item_name.to_ident(), candidate.def_id(), ref_id);
+ tcx.adjust_ident(binding.item_name, candidate.def_id(), ref_id);
let assoc_ty = tcx.associated_items(candidate.def_id()).find(|i| {
- i.kind == ty::AssociatedKind::Type && i.name.to_ident() == assoc_ident
+ i.kind == ty::AssociatedKind::Type && i.ident.modern() == assoc_ident
}).expect("missing associated type");
if !assoc_ty.vis.is_accessible_from(def_scope, tcx) {
let trait_def_id = assoc_item.container.id();
struct_span_err!(tcx.sess, span, E0191,
"the value of the associated type `{}` (from the trait `{}`) must be specified",
- assoc_item.name,
+ assoc_item.ident,
tcx.item_path_str(trait_def_id))
.span_label(span, format!(
- "missing associated type `{}` value", assoc_item.name))
+ "missing associated type `{}` value", assoc_item.ident))
.emit();
}
// any ambiguity.
fn find_bound_for_assoc_item(&self,
ty_param_def_id: DefId,
- assoc_name: ast::Name,
+ assoc_name: ast::Ident,
span: Span)
-> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
{
fn one_bound_for_assoc_type<I>(&self,
mut bounds: I,
ty_param_name: &str,
- assoc_name: ast::Name,
+ assoc_name: ast::Ident,
span: Span)
-> Result<ty::PolyTraitRef<'tcx>, ErrorReported>
where I: Iterator<Item=ty::PolyTraitRef<'tcx>>
for bound in bounds {
let bound_span = self.tcx().associated_items(bound.def_id()).find(|item| {
item.kind == ty::AssociatedKind::Type &&
- self.tcx().hygienic_eq(assoc_name, item.name, bound.def_id())
+ self.tcx().hygienic_eq(assoc_name, item.ident, bound.def_id())
})
.and_then(|item| self.tcx().hir.span_if_local(item.def_id));
-> (Ty<'tcx>, Def)
{
let tcx = self.tcx();
- let assoc_name = item_segment.name;
+ let assoc_name = item_segment.ident;
debug!("associated_path_def_to_ty: {:?}::{}", ty, assoc_name);
let candidates =
traits::supertraits(tcx, ty::Binder::bind(trait_ref))
- .filter(|r| self.trait_defines_associated_type_named(r.def_id(),
- assoc_name));
+ .filter(|r| self.trait_defines_associated_type_named(r.def_id(), assoc_name));
match self.one_bound_for_assoc_type(candidates, "Self", assoc_name, span) {
Ok(bound) => bound,
};
let trait_did = bound.def_id();
- let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_name.to_ident(), trait_did, ref_id);
+ let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_name, trait_did, ref_id);
let item = tcx.associated_items(trait_did).find(|i| {
Namespace::from(i.kind) == Namespace::Type &&
- i.name.to_ident() == assoc_ident
+ i.ident.modern() == assoc_ident
})
.expect("missing associated type");
self.report_ambiguous_associated_type(span,
"Type",
&path_str,
- &item_segment.name.as_str());
+ &item_segment.ident.as_str());
return tcx.types.err;
};
use rustc::ty::adjustment::{Adjustment, Adjust, OverloadedDeref};
use syntax_pos::Span;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use std::iter;
ty::ProjectionTy::from_ref_and_name(
tcx,
trait_ref,
- Symbol::intern("Target"),
+ Ident::from_str("Target"),
),
cause,
0,
use rustc::ty::{self, TyCtxt, TypeFoldable, Ty};
use rustc::ty::adjustment::{Adjustment, Adjust, AllowTwoPhase, AutoBorrow, AutoBorrowMutability};
use rustc_target::spec::abi;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use syntax_pos::Span;
use rustc::hir;
MethodCallee<'tcx>)> {
// Try the options that are least restrictive on the caller first.
for &(opt_trait_def_id, method_name, borrow) in
- &[(self.tcx.lang_items().fn_trait(), Symbol::intern("call"), true),
- (self.tcx.lang_items().fn_mut_trait(), Symbol::intern("call_mut"), true),
- (self.tcx.lang_items().fn_once_trait(), Symbol::intern("call_once"), false)] {
+ &[(self.tcx.lang_items().fn_trait(), Ident::from_str("call"), true),
+ (self.tcx.lang_items().fn_mut_trait(), Ident::from_str("call_mut"), true),
+ (self.tcx.lang_items().fn_once_trait(), Ident::from_str("call_once"), false)] {
let trait_def_id = match opt_trait_def_id {
Some(def_id) => def_id,
None => continue,
fn trivial_cast_lint(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
let t_cast = self.cast_ty;
let t_expr = self.expr_ty;
- if t_cast.is_numeric() && t_expr.is_numeric() {
- fcx.tcx.lint_node(
- lint::builtin::TRIVIAL_NUMERIC_CASTS,
- self.expr.id,
- self.span,
- &format!("trivial numeric cast: `{}` as `{}`. Cast can be \
- replaced by coercion, this might require type \
- ascription or a temporary variable",
- fcx.ty_to_string(t_expr),
- fcx.ty_to_string(t_cast)));
+ let type_asc_or = if fcx.tcx.features().type_ascription {
+ "type ascription or "
} else {
- fcx.tcx.lint_node(
- lint::builtin::TRIVIAL_CASTS,
- self.expr.id,
- self.span,
- &format!("trivial cast: `{}` as `{}`. Cast can be \
- replaced by coercion, this might require type \
- ascription or a temporary variable",
- fcx.ty_to_string(t_expr),
- fcx.ty_to_string(t_cast)));
- }
-
+ ""
+ };
+ let (adjective, lint) = if t_cast.is_numeric() && t_expr.is_numeric() {
+ ("numeric ", lint::builtin::TRIVIAL_NUMERIC_CASTS)
+ } else {
+ ("", lint::builtin::TRIVIAL_CASTS)
+ };
+ let mut err = fcx.tcx.struct_span_lint_node(
+ lint,
+ self.expr.id,
+ self.span,
+ &format!("trivial {}cast: `{}` as `{}`",
+ adjective,
+ fcx.ty_to_string(t_expr),
+ fcx.ty_to_string(t_cast)));
+ err.help(&format!("cast can be replaced by coercion; this might \
+ require {}a temporary variable", type_asc_or));
+ err.emit();
}
pub fn check(mut self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) {
span: impl_m_span,
body_id: impl_m_node_id,
code: ObligationCauseCode::CompareImplMethodObligation {
- item_name: impl_m.name,
+ item_name: impl_m.ident.name,
impl_item_def_id: impl_m.def_id,
trait_item_def_id: trait_m.def_id,
},
cause.span(&tcx),
E0053,
"method `{}` has an incompatible type for trait",
- trait_m.name);
+ trait_m.ident);
infcx.note_type_err(&mut diag,
&cause,
E0195,
"lifetime parameters or bounds on method `{}` do not match \
the trait declaration",
- impl_m.name);
+ impl_m.ident);
err.span_label(span, "lifetimes do not match method in trait");
if let Some(sp) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(tcx.sess.codemap().def_span(sp),
E0185,
"method `{}` has a `{}` declaration in the impl, but \
not in the trait",
- trait_m.name,
+ trait_m.ident,
self_descr);
err.span_label(impl_m_span, format!("`{}` used in impl", self_descr));
if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(span, format!("trait method declared without `{}`", self_descr));
} else {
- err.note_trait_signature(trait_m.name.to_string(),
+ err.note_trait_signature(trait_m.ident.to_string(),
trait_m.signature(&tcx));
}
err.emit();
E0186,
"method `{}` has a `{}` declaration in the trait, but \
not in the impl",
- trait_m.name,
+ trait_m.ident,
self_descr);
err.span_label(impl_m_span, format!("expected `{}` in impl", self_descr));
if let Some(span) = tcx.hir.span_if_local(trait_m.def_id) {
err.span_label(span, format!("`{}` used in trait", self_descr));
} else {
- err.note_trait_signature(trait_m.name.to_string(),
+ err.note_trait_signature(trait_m.ident.to_string(),
trait_m.signature(&tcx));
}
err.emit();
E0049,
"method `{}` has {} type parameter{} but its trait \
declaration has {} type parameter{}",
- trait_m.name,
+ trait_m.ident,
num_impl_m_type_params,
if num_impl_m_type_params == 1 { "" } else { "s" },
num_trait_m_type_params,
E0050,
"method `{}` has {} parameter{} but the declaration in \
trait `{}` has {}",
- trait_m.name,
+ trait_m.ident,
impl_number_args,
if impl_number_args == 1 { "" } else { "s" },
tcx.item_path_str(trait_m.def_id),
format!("{} parameter", trait_number_args)
}));
} else {
- err.note_trait_signature(trait_m.name.to_string(),
+ err.note_trait_signature(trait_m.ident.to_string(),
trait_m.signature(&tcx));
}
err.span_label(impl_span,
impl_span,
E0643,
"method `{}` has incompatible signature for trait",
- trait_m.name);
+ trait_m.ident);
err.span_label(trait_span, "declaration in trait here");
match (impl_synthetic, trait_synthetic) {
// The case where the impl method uses `impl Trait` but the trait method uses
E0326,
"implemented const `{}` has an incompatible type for \
trait",
- trait_c.name);
+ trait_c.ident);
let trait_c_node_id = tcx.hir.as_local_node_id(trait_c.def_id);
let trait_c_span = trait_c_node_id.map(|trait_c_node_id| {
let methods = self.get_conversion_methods(expr.span, expected, checked_ty);
if let Ok(expr_text) = self.tcx.sess.codemap().span_to_snippet(expr.span) {
let suggestions = iter::repeat(expr_text).zip(methods.iter())
- .map(|(receiver, method)| format!("{}.{}()", receiver, method.name))
+ .map(|(receiver, method)| format!("{}.{}()", receiver, method.ident))
.collect::<Vec<_>>();
if !suggestions.is_empty() {
err.span_suggestions(expr.span,
})), 1) = (self.tcx.hir.find(parent), decl.inputs.len()) {
let self_ty = self.tables.borrow().node_id_to_type(expr[0].hir_id);
let self_ty = format!("{:?}", self_ty);
- let name = path.name.as_str();
+ let name = path.ident.as_str();
let is_as_ref_able = (
self_ty.starts_with("&std::option::Option") ||
self_ty.starts_with("&std::result::Result") ||
use rustc::middle::region;
use rustc::ty::subst::{Subst, Substs, UnpackedKind};
use rustc::ty::{self, Ty, TyCtxt};
-use rustc::traits::{ObligationCause, TraitEngine};
+use rustc::traits::{ObligationCause, TraitEngine, TraitEngineExt};
use util::common::ErrorReported;
use syntax::ast;
allow_private: bool)
-> bool {
let mode = probe::Mode::MethodCall;
- match self.probe_for_name(method_name.span, mode, method_name.name,
+ match self.probe_for_name(method_name.span, mode, method_name,
IsSuggestion(false), self_ty, call_expr_id,
ProbeScope::TraitsInScope) {
Ok(..) => true,
self_expr: &'gcx hir::Expr)
-> Result<MethodCallee<'tcx>, MethodError<'tcx>> {
debug!("lookup(method_name={}, self_ty={:?}, call_expr={:?}, self_expr={:?})",
- segment.name,
+ segment.ident,
self_ty,
call_expr,
self_expr);
let pick = self.lookup_probe(
span,
- segment.name,
+ segment.ident,
self_ty,
call_expr,
ProbeScope::TraitsInScope
// We probe again, taking all traits into account (not only those in scope).
let candidates =
match self.lookup_probe(span,
- segment.name,
+ segment.ident,
self_ty,
call_expr,
ProbeScope::AllTraits) {
fn lookup_probe(&self,
span: Span,
- method_name: ast::Name,
+ method_name: ast::Ident,
self_ty: Ty<'tcx>,
call_expr: &'gcx hir::Expr,
scope: ProbeScope)
/// of this method is basically the same as confirmation.
pub fn lookup_method_in_trait(&self,
span: Span,
- m_name: ast::Name,
+ m_name: ast::Ident,
trait_def_id: DefId,
self_ty: Ty<'tcx>,
opt_input_types: Option<&[Ty<'tcx>]>)
// Trait must have a method named `m_name` and it should not have
// type parameters or early-bound regions.
let tcx = self.tcx;
- let method_item = self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
+ let method_item =
+ self.associated_item(trait_def_id, m_name, Namespace::Value).unwrap();
let def_id = method_item.def_id;
let generics = tcx.generics_of(def_id);
assert_eq!(generics.params.len(), 0);
pub fn resolve_ufcs(&self,
span: Span,
- method_name: ast::Name,
+ method_name: ast::Ident,
self_ty: Ty<'tcx>,
expr_id: ast::NodeId)
-> Result<Def, MethodError<'tcx>> {
/// Find item with name `item_name` defined in impl/trait `def_id`
/// and return it, or `None`, if no such item was defined there.
- pub fn associated_item(&self, def_id: DefId, item_name: ast::Name, ns: Namespace)
+ pub fn associated_item(&self, def_id: DefId, item_name: ast::Ident, ns: Namespace)
-> Option<ty::AssociatedItem> {
- self.tcx.associated_items(def_id)
- .find(|item| Namespace::from(item.kind) == ns &&
- self.tcx.hygienic_eq(item_name, item.name, def_id))
+ self.tcx.associated_items(def_id).find(|item| {
+ Namespace::from(item.kind) == ns &&
+ self.tcx.hygienic_eq(item_name, item.ident, def_id)
+ })
}
}
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
span: Span,
mode: Mode,
- method_name: Option<ast::Name>,
+ method_name: Option<ast::Ident>,
return_type: Option<Ty<'tcx>>,
steps: Rc<Vec<CandidateStep<'tcx>>>,
inherent_candidates: Vec<Candidate<'tcx>>,
pub fn probe_for_name(&self,
span: Span,
mode: Mode,
- item_name: ast::Name,
+ item_name: ast::Ident,
is_suggestion: IsSuggestion,
self_ty: Ty<'tcx>,
scope_expr_id: ast::NodeId,
fn probe_op<OP,R>(&'a self,
span: Span,
mode: Mode,
- method_name: Option<ast::Name>,
+ method_name: Option<ast::Ident>,
return_type: Option<Ty<'tcx>>,
is_suggestion: IsSuggestion,
self_ty: Ty<'tcx>,
fn new(fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
span: Span,
mode: Mode,
- method_name: Option<ast::Name>,
+ method_name: Option<ast::Ident>,
return_type: Option<Ty<'tcx>>,
steps: Rc<Vec<CandidateStep<'tcx>>>,
is_suggestion: IsSuggestion)
{
let is_accessible = if let Some(name) = self.method_name {
let item = candidate.item;
- let def_scope =
- self.tcx.adjust_ident(name.to_ident(), item.container.id(), self.body_id).1;
+ let def_scope = self.tcx.adjust_ident(name, item.container.id(), self.body_id).1;
item.vis.is_accessible_from(def_scope, self.tcx)
} else {
true
Ok(())
}
- fn candidate_method_names(&self) -> Vec<ast::Name> {
+ fn candidate_method_names(&self) -> Vec<ast::Ident> {
let mut set = FxHashSet();
let mut names: Vec<_> = self.inherent_candidates
.iter()
true
}
})
- .map(|candidate| candidate.item.name)
+ .map(|candidate| candidate.item.ident)
.filter(|&name| set.insert(name))
.collect();
Ok(None)
} else {
let best_name = {
- let names = applicable_close_candidates.iter().map(|cand| &cand.name);
+ let names = applicable_close_candidates.iter().map(|cand| &cand.ident.name);
find_best_match_for_name(names,
&self.method_name.unwrap().as_str(),
None)
}.unwrap();
Ok(applicable_close_candidates
.into_iter()
- .find(|method| method.name == best_name))
+ .find(|method| method.ident.name == best_name))
}
})
}
let max_dist = max(name.as_str().len(), 3) / 3;
self.tcx.associated_items(def_id)
.filter(|x| {
- let dist = lev_distance(&*name.as_str(), &x.name.as_str());
+ let dist = lev_distance(&*name.as_str(), &x.ident.as_str());
Namespace::from(x.kind) == Namespace::Value && dist > 0
&& dist <= max_dist
})
pub fn report_method_error(&self,
span: Span,
rcvr_ty: Ty<'tcx>,
- item_name: ast::Name,
+ item_name: ast::Ident,
rcvr_expr: Option<&hir::Expr>,
error: MethodError<'tcx>,
args: Option<&'gcx [hir::Expr]>) {
match ty.sty {
ty::TyAdt(def, substs) if !def.is_enum() => {
let variant = &def.non_enum_variant();
- if let Some(index) =
- self.tcx.find_field_index(item_name.to_ident(), variant) {
+ if let Some(index) = self.tcx.find_field_index(item_name, variant) {
let field = &variant.fields[index];
let snippet = tcx.sess.codemap().span_to_snippet(expr.span);
let expr_string = match snippet {
report_function!(expr.span, expr_string);
} else if let hir::ExprPath(hir::QPath::Resolved(_, ref path)) = expr.node {
if let Some(segment) = path.segments.last() {
- report_function!(expr.span, segment.name);
+ report_function!(expr.span, segment.ident);
}
}
}
}
if let Some(lev_candidate) = lev_candidate {
- err.help(&format!("did you mean `{}`?", lev_candidate.name));
+ err.help(&format!("did you mean `{}`?", lev_candidate.ident));
}
err.emit();
}
err: &mut DiagnosticBuilder,
span: Span,
rcvr_ty: Ty<'tcx>,
- item_name: ast::Name,
+ item_name: ast::Ident,
rcvr_expr: Option<&hir::Expr>,
valid_out_of_scope_traits: Vec<DefId>) {
if self.suggest_valid_traits(err, valid_out_of_scope_traits) {
// Add pattern bindings.
fn visit_pat(&mut self, p: &'gcx hir::Pat) {
- if let PatKind::Binding(_, _, ref path1, _) = p.node {
+ if let PatKind::Binding(_, _, ident, _) = p.node {
let var_ty = self.assign(p.span, p.id, None);
self.fcx.require_type_is_sized(var_ty, p.span,
traits::VariableType(p.id));
debug!("Pattern binding {} is assigned to {} with type {:?}",
- path1.node,
+ ident,
self.fcx.ty_to_string(
self.fcx.locals.borrow().get(&p.id).unwrap().clone()),
var_ty);
// The check for a non-trivial pattern is a hack to avoid duplicate warnings
// for simple cases like `fn foo(x: Trait)`,
// where we would error once on the parameter as a whole, and once on the binding `x`.
- if arg.pat.simple_name().is_none() {
+ if arg.pat.simple_ident().is_none() {
fcx.require_type_is_sized(arg_ty, decl.output.span(), traits::MiscObligation);
}
tcx.sess, impl_item.span, E0520,
"`{}` specializes an item from a parent `impl`, but \
that item is not marked `default`",
- impl_item.name);
+ impl_item.ident);
err.span_label(impl_item.span, format!("cannot specialize default item `{}`",
- impl_item.name));
+ impl_item.ident));
match tcx.span_of_impl(parent_impl) {
Ok(span) => {
err.span_label(span, "parent `impl` is here");
err.note(&format!("to specialize, `{}` in the parent `impl` must be marked `default`",
- impl_item.name));
+ impl_item.ident));
}
Err(cname) => {
err.note(&format!("parent implementation is in crate `{}`", cname));
hir::ImplItemKind::Type(_) => ty::AssociatedKind::Type
};
- let parent = ancestors.defs(tcx, trait_item.name, kind, trait_def.def_id).skip(1).next()
+ let parent = ancestors.defs(tcx, trait_item.ident, kind, trait_def.def_id).skip(1).next()
.map(|node_item| node_item.map(|parent| parent.defaultness));
if let Some(parent) = parent {
let ty_impl_item = tcx.associated_item(tcx.hir.local_def_id(impl_item.id));
let ty_trait_item = tcx.associated_items(impl_trait_ref.def_id)
.find(|ac| Namespace::from(&impl_item.node) == Namespace::from(ac.kind) &&
- tcx.hygienic_eq(ty_impl_item.name, ac.name, impl_trait_ref.def_id))
+ tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id))
.or_else(|| {
// Not compatible, but needed for the error message
tcx.associated_items(impl_trait_ref.def_id)
- .find(|ac| tcx.hygienic_eq(ty_impl_item.name, ac.name, impl_trait_ref.def_id))
+ .find(|ac| tcx.hygienic_eq(ty_impl_item.ident, ac.ident, impl_trait_ref.def_id))
});
// Check that impl definition matches trait definition
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0323,
"item `{}` is an associated const, \
which doesn't match its trait `{}`",
- ty_impl_item.name,
+ ty_impl_item.ident,
impl_trait_ref);
err.span_label(impl_item.span, "does not match trait");
// We can only get the spans from local trait definition
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0324,
"item `{}` is an associated method, \
which doesn't match its trait `{}`",
- ty_impl_item.name,
+ ty_impl_item.ident,
impl_trait_ref);
err.span_label(impl_item.span, "does not match trait");
if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0325,
"item `{}` is an associated type, \
which doesn't match its trait `{}`",
- ty_impl_item.name,
+ ty_impl_item.ident,
impl_trait_ref);
err.span_label(impl_item.span, "does not match trait");
if let Some(trait_span) = tcx.hir.span_if_local(ty_trait_item.def_id) {
let associated_type_overridden = overridden_associated_type.is_some();
for trait_item in tcx.associated_items(impl_trait_ref.def_id) {
let is_implemented = trait_def.ancestors(tcx, impl_id)
- .defs(tcx, trait_item.name, trait_item.kind, impl_trait_ref.def_id)
+ .defs(tcx, trait_item.ident, trait_item.kind, impl_trait_ref.def_id)
.next()
.map(|node_item| !node_item.node.is_from_trait())
.unwrap_or(false);
if !trait_item.defaultness.has_value() {
missing_items.push(trait_item);
} else if associated_type_overridden {
- invalidated_items.push(trait_item.name);
+ invalidated_items.push(trait_item.ident);
}
}
}
let mut err = struct_span_err!(tcx.sess, impl_span, E0046,
"not all trait items implemented, missing: `{}`",
missing_items.iter()
- .map(|trait_item| trait_item.name.to_string())
+ .map(|trait_item| trait_item.ident.to_string())
.collect::<Vec<_>>().join("`, `"));
err.span_label(impl_span, format!("missing `{}` in implementation",
missing_items.iter()
- .map(|trait_item| trait_item.name.to_string())
+ .map(|trait_item| trait_item.ident.to_string())
.collect::<Vec<_>>().join("`, `")));
for trait_item in missing_items {
if let Some(span) = tcx.hir.span_if_local(trait_item.def_id) {
- err.span_label(span, format!("`{}` from trait", trait_item.name));
+ err.span_label(span, format!("`{}` from trait", trait_item.ident));
} else {
- err.note_trait_signature(trait_item.name.to_string(),
+ err.note_trait_signature(trait_item.ident.to_string(),
trait_item.signature(&tcx));
}
}
span_err!(tcx.sess, invalidator.span, E0399,
"the following trait items need to be reimplemented \
as `{}` was overridden: `{}`",
- invalidator.name,
+ invalidator.ident,
invalidated_items.iter()
.map(|name| name.to_string())
.collect::<Vec<_>>().join("`, `"))
None
}
- fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, Symbol) {
+ fn resolve_place_op(&self, op: PlaceOp, is_mut: bool) -> (Option<DefId>, ast::Ident) {
let (tr, name) = match (op, is_mut) {
(PlaceOp::Deref, false) =>
(self.tcx.lang_items().deref_trait(), "deref"),
(PlaceOp::Index, true) =>
(self.tcx.lang_items().index_mut_trait(), "index_mut"),
};
- (tr, Symbol::intern(name))
+ (tr, ast::Ident::from_str(name))
}
fn try_overloaded_place_op(&self,
Ok(method)
}
Err(error) => {
- if segment.name != keywords::Invalid.name() {
+ if segment.ident.name != keywords::Invalid.name() {
self.report_method_error(span,
rcvr_t,
- segment.name,
+ segment.ident,
Some(rcvr),
error,
Some(args));
// ... except when we try to 'break rust;'.
// ICE this expression in particular (see #43162).
if let hir::ExprPath(hir::QPath::Resolved(_, ref path)) = e.node {
- if path.segments.len() == 1 && path.segments[0].name == "rust" {
+ if path.segments.len() == 1 && path.segments[0].ident.name == "rust" {
fatally_break_rust(self.tcx.sess);
}
}
}
}
- hir::ExprContinue(_) => { tcx.types.never }
+ hir::ExprContinue(destination) => {
+ if let Ok(_) = destination.target_id {
+ tcx.types.never
+ } else {
+ // There was an error, make typecheck fail
+ tcx.types.err
+ }
+ }
hir::ExprRet(ref expr_opt) => {
if self.ret_coercion.is_none() {
struct_span_err!(self.tcx.sess, expr.span, E0572,
// errors with default match binding modes. See #44614.
return (*cached_def, Some(ty), slice::from_ref(&**item_segment))
}
- let item_name = item_segment.name;
+ let item_name = item_segment.ident;
let def = match self.resolve_ufcs(span, item_name, ty, node_id) {
Ok(def) => def,
Err(error) => {
method::MethodError::PrivateMatch(def, _) => def,
_ => Def::Err,
};
- if item_name != keywords::Invalid.name() {
+ if item_name.name != keywords::Invalid.name() {
self.report_method_error(span, ty, item_name, None, error, None);
}
def
use rustc::infer::type_variable::TypeVariableOrigin;
use errors;
use syntax_pos::Span;
-use syntax::symbol::Symbol;
+use syntax::ast::Ident;
use rustc::hir;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
trait_did);
let method = trait_did.and_then(|trait_did| {
- let opname = Symbol::intern(opname);
+ let opname = Ident::from_str(opname);
self.lookup_method_in_trait(span, opname, trait_did, lhs_ty, Some(other_tys))
});
}
let param = &hir_generics.params[index];
- report_bivariance(tcx, param.span, param.name.name());
+ report_bivariance(tcx, param.span, param.name.ident().name);
}
}
let name_and_namespace = |def_id| {
let item = self.tcx.associated_item(def_id);
- (item.name, Namespace::from(item.kind))
+ (item.ident, Namespace::from(item.kind))
};
let impl_items1 = self.tcx.associated_item_def_ids(impl1);
convert_variant_ctor(tcx, struct_def.id());
}
},
- hir::ItemExistential(..) |
+ hir::ItemExistential(..) => {}
hir::ItemTy(..) | hir::ItemStatic(..) | hir::ItemConst(..) | hir::ItemFn(..) => {
tcx.generics_of(def_id);
tcx.type_of(def_id);
let early_lifetimes = early_bound_lifetimes_from_generics(tcx, ast_generics);
params.extend(early_lifetimes.enumerate().map(|(i, param)| {
ty::GenericParamDef {
- name: param.name.name().as_interned_str(),
+ name: param.name.ident().as_interned_str(),
index: own_start + i as u32,
def_id: tcx.hir.local_def_id(param.id),
pure_wrt_drop: param.pure_wrt_drop,
let mut i = 0;
params.extend(ast_generics.params.iter().filter_map(|param| match param.kind {
GenericParamKind::Type { ref default, synthetic, .. } => {
- if param.name.name() == keywords::SelfType.name() {
+ if param.name.ident().name == keywords::SelfType.name() {
span_bug!(param.span, "`Self` should not be the name of a regular parameter");
}
let ty_param = ty::GenericParamDef {
index: type_start + i as u32,
- name: param.name.name().as_interned_str(),
+ name: param.name.ident().as_interned_str(),
def_id: tcx.hir.local_def_id(param.id),
pure_wrt_drop: param.pure_wrt_drop,
kind: ty::GenericParamDefKind::Type {
ItemExistential(hir::ExistTy { impl_trait_fn: None, .. }) => unimplemented!(),
// existential types desugared from impl Trait
ItemExistential(hir::ExistTy { impl_trait_fn: Some(owner), .. }) => {
- tcx.typeck_tables_of(owner).concrete_existential_types
- .get(&def_id)
- .cloned()
- .unwrap_or_else(|| {
- // This can occur if some error in the
- // owner fn prevented us from populating
- // the `concrete_existential_types` table.
- tcx.sess.delay_span_bug(
- DUMMY_SP,
- &format!(
- "owner {:?} has no existential type for {:?} in its tables",
- owner,
- def_id,
- ),
- );
-
- tcx.types.err
- })
+ tcx.typeck_tables_of(owner).concrete_existential_types[&def_id]
},
ItemTrait(..) | ItemTraitAlias(..) |
ItemMod(..) |
let region = tcx.mk_region(ty::ReEarlyBound(ty::EarlyBoundRegion {
def_id: tcx.hir.local_def_id(param.id),
index,
- name: param.name.name().as_interned_str(),
+ name: param.name.ident().as_interned_str(),
}));
index += 1;
for param in &ast_generics.params {
match param.kind {
GenericParamKind::Type { .. } => {
- let name = param.name.name().as_interned_str();
+ let name = param.name.ident().as_interned_str();
let param_ty = ty::ParamTy::new(index, name).to_ty(tcx);
index += 1;
}
});
} else if attr.check_name("export_name") {
- if let s @ Some(_) = attr.value_str() {
- codegen_fn_attrs.export_name = s;
+ if let Some(s) = attr.value_str() {
+ if s.as_str().contains("\0") {
+ // `#[export_name = ...]` will be converted to a null-terminated string,
+ // so it may not contain any null characters.
+ struct_span_err!(tcx.sess, attr.span, E0648,
+ "`export_name` may not contain null characters")
+ .emit();
+ }
+ codegen_fn_attrs.export_name = Some(s);
} else {
struct_span_err!(tcx.sess, attr.span, E0558,
- "export_name attribute has invalid format")
+ "`export_name` attribute has invalid format")
.span_label(attr.span, "did you mean #[export_name=\"*\"]?")
.emit();
}
// The ordering relation for strings can't be evaluated at compile time,
// so this doesn't work:
match string {
- "hello" ... "world" => {}
+ "hello" ..= "world" => {}
_ => {}
}
let x = 1u8;
match x {
- 0u8...3i8 => (),
+ 0u8..=3i8 => (),
// error: mismatched types in range: expected u8, found i8
_ => ()
}
let x = 1u8;
match x {
- 0u8...3u8 => (), // ok!
+ 0u8..=3u8 => (), // ok!
_ => ()
}
```
Erroneous code example:
```ignore (error-emitted-at-codegen-which-cannot-be-handled-by-compile_fail)
-#[export_name] // error: export_name attribute has invalid format
+#[export_name] // error: `export_name` attribute has invalid format
pub fn something() {}
fn main() {}
```
"##,
+E0648: r##"
+`export_name` attributes may not contain null characters (`\0`).
+
+```compile_fail,E0648
+#[export_name="\0foo"] // error: `export_name` may not contain null characters
+pub fn bar() {}
+```
+"##,
+
E0689: r##"
This error indicates that the numeric value for the method being passed exists
but the type of the numeric value or binding could not be identified.
hir::ImplItemKind::Type(_) => &mut seen_type_items,
_ => &mut seen_value_items,
};
- match seen_items.entry(impl_item.name) {
+ match seen_items.entry(impl_item.ident.modern()) {
Occupied(entry) => {
let mut err = struct_span_err!(tcx.sess, impl_item.span, E0201,
"duplicate definitions with name `{}`:",
- impl_item.name);
+ impl_item.ident);
err.span_label(*entry.get(),
format!("previous definition of `{}` here",
- impl_item.name));
+ impl_item.ident));
err.span_label(impl_item.span, "duplicate definition");
err.emit();
}
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::query::Providers;
-use rustc::traits::{ObligationCause, ObligationCauseCode, TraitEngine};
+use rustc::traits::{ObligationCause, ObligationCauseCode, TraitEngine, TraitEngineExt};
use session::{CompileIncomplete, config};
use util::common::time;
[dependencies]
pulldown-cmark = { version = "0.1.2", default-features = false }
-minifier = "0.0.11"
+minifier = "0.0.14"
tempfile = "3"
let mut segments = path.segments.into_vec();
let last = segments.pop().unwrap();
- let real_name = name.map(|name| Symbol::intern(&name));
+ let real_name = name.map(|name| Ident::from_str(&name));
segments.push(hir::PathSegment::new(
- real_name.unwrap_or(last.name),
+ real_name.unwrap_or(last.ident),
self.generics_to_path_params(generics.clone()),
false,
));
match param.kind {
ty::GenericParamDefKind::Lifetime => {
let name = if param.name == "" {
- hir::ParamName::Plain(keywords::StaticLifetime.name())
+ hir::ParamName::Plain(keywords::StaticLifetime.ident())
} else {
- hir::ParamName::Plain(param.name.as_symbol())
+ hir::ParamName::Plain(ast::Ident::from_interned_str(param.name))
};
args.push(hir::GenericArg::Lifetime(hir::Lifetime {
}));
}
ty::GenericParamDefKind::Type {..} => {
- args.push(hir::GenericArg::Type(P(self.ty_param_to_ty(param.clone()))));
+ args.push(hir::GenericArg::Type(self.ty_param_to_ty(param.clone())));
}
}
}
span: DUMMY_SP,
def: Def::TyParam(param.def_id),
segments: HirVec::from_vec(vec![
- hir::PathSegment::from_name(param.name.as_symbol())
+ hir::PathSegment::from_ident(Ident::from_interned_str(param.name))
]),
}),
)),
let provided = trait_.def_id().map(|did| {
tcx.provided_trait_methods(did)
.into_iter()
- .map(|meth| meth.name.to_string())
+ .map(|meth| meth.ident.to_string())
.collect()
}).unwrap_or(FxHashSet());
use syntax::symbol::{Symbol, InternedString};
use syntax_pos::{self, DUMMY_SP, Pos, FileName};
-use rustc::middle::const_val::ConstVal;
+use rustc::mir::interpret::ConstValue;
use rustc::middle::privacy::AccessLevels;
use rustc::middle::resolve_lifetime as rl;
use rustc::ty::fold::TypeFolder;
Def::Struct(did) | Def::Union(did) | Def::Enum(did) | Def::TyAlias(did) => {
let item = cx.tcx.inherent_impls(did).iter()
.flat_map(|imp| cx.tcx.associated_items(*imp))
- .find(|item| item.name == item_name);
+ .find(|item| item.ident.name == item_name);
if let Some(item) = item {
let out = match item.kind {
ty::AssociatedKind::Method if is_val => "method",
Def::Trait(did) => {
let item = cx.tcx.associated_item_def_ids(did).iter()
.map(|item| cx.tcx.associated_item(*item))
- .find(|item| item.name == item_name);
+ .find(|item| item.ident.name == item_name);
if let Some(item) = item {
let kind = match item.kind {
ty::AssociatedKind::Const if is_val => "associatedconstant",
_ => {}
}
}
- Lifetime(self.name.name().to_string())
+ Lifetime(self.name.ident().to_string())
}
}
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
- let name = bounds.next().unwrap().name.name();
- let mut s = format!("{}: {}", self.name.name(), name);
+ let name = bounds.next().unwrap().name.ident();
+ let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
- s.push_str(&format!(" + {}", bound.name.name()));
+ s.push_str(&format!(" + {}", bound.name.ident()));
}
Lifetime(s)
} else {
- Lifetime(self.name.name().to_string())
+ Lifetime(self.name.ident().to_string())
}
}
_ => panic!(),
GenericBound::Outlives(_) => panic!("cleaning a trait got a lifetime"),
};
Type::QPath {
- name: cx.tcx.associated_item(self.item_def_id).name.clean(cx),
+ name: cx.tcx.associated_item(self.item_def_id).ident.name.clean(cx),
self_type: box self.self_ty().clean(cx),
trait_: box trait_
}
hir::GenericBound::Outlives(lt) => lt,
_ => panic!(),
});
- let name = bounds.next().unwrap().name.name();
- let mut s = format!("{}: {}", self.name.name(), name);
+ let name = bounds.next().unwrap().name.ident();
+ let mut s = format!("{}: {}", self.name.ident(), name);
for bound in bounds {
- s.push_str(&format!(" + {}", bound.name.name()));
+ s.push_str(&format!(" + {}", bound.name.ident()));
}
s
} else {
- self.name.name().to_string()
+ self.name.ident().to_string()
};
(name, GenericParamDefKind::Lifetime)
}
hir::GenericParamKind::Type { ref default, synthetic, .. } => {
- (self.name.name().clean(cx), GenericParamDefKind::Type {
+ (self.name.ident().name.clean(cx), GenericParamDefKind::Type {
did: cx.tcx.hir.local_def_id(self.id),
bounds: self.bounds.clean(cx),
default: default.clean(cx),
pub values: Vec<Argument>,
}
-impl<'a> Clean<Arguments> for (&'a [P<hir::Ty>], &'a [Spanned<ast::Name>]) {
+impl<'a> Clean<Arguments> for (&'a [hir::Ty], &'a [ast::Ident]) {
fn clean(&self, cx: &DocContext) -> Arguments {
Arguments {
values: self.0.iter().enumerate().map(|(i, ty)| {
- let mut name = self.1.get(i).map(|n| n.node.to_string())
+ let mut name = self.1.get(i).map(|ident| ident.to_string())
.unwrap_or(String::new());
if name.is_empty() {
name = "_".to_string();
}
}
-impl<'a> Clean<Arguments> for (&'a [P<hir::Ty>], hir::BodyId) {
+impl<'a> Clean<Arguments> for (&'a [hir::Ty], hir::BodyId) {
fn clean(&self, cx: &DocContext) -> Arguments {
let body = cx.tcx.hir.body(self.1);
}
impl<'a, A: Copy> Clean<FnDecl> for (&'a hir::FnDecl, A)
- where (&'a [P<hir::Ty>], A): Clean<Arguments>
+ where (&'a [hir::Ty], A): Clean<Arguments>
{
fn clean(&self, cx: &DocContext) -> FnDecl {
FnDecl {
}
};
Item {
- name: Some(self.name.clean(cx)),
+ name: Some(self.ident.name.clean(cx)),
attrs: self.attrs.clean(cx),
source: self.span.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
}, true),
};
Item {
- name: Some(self.name.clean(cx)),
+ name: Some(self.ident.name.clean(cx)),
source: self.span.clean(cx),
attrs: self.attrs.clean(cx),
def_id: cx.tcx.hir.local_def_id(self.id),
}
}
ty::AssociatedKind::Type => {
- let my_name = self.name.clean(cx);
+ let my_name = self.ident.name.clean(cx);
if let ty::TraitContainer(did) = self.container {
// When loading a cross-crate associated type, the bounds for this type
};
Item {
- name: Some(self.name.clean(cx)),
+ name: Some(self.ident.name.clean(cx)),
visibility,
stability: get_stability(cx, self.def_id),
deprecation: get_deprecation(cx, self.def_id),
}
});
if let Some(ty) = type_.cloned() {
- ty_substs.insert(ty_param_def, ty.into_inner().clean(cx));
+ ty_substs.insert(ty_param_def, ty.clean(cx));
} else if let Some(default) = default.clone() {
ty_substs.insert(ty_param_def,
default.into_inner().clean(cx));
segments: segments.into(),
};
Type::QPath {
- name: p.segments.last().unwrap().name.clean(cx),
+ name: p.segments.last().unwrap().ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
}
segments: vec![].into(),
};
Type::QPath {
- name: segment.name.clean(cx),
+ name: segment.ident.name.clean(cx),
self_type: box qself.clean(cx),
trait_: box resolve_type(cx, trait_path.clean(cx), self.id)
}
ty::TySlice(ty) => Slice(box ty.clean(cx)),
ty::TyArray(ty, n) => {
let mut n = cx.tcx.lift(&n).unwrap();
- if let ConstVal::Unevaluated(def_id, substs) = n.val {
+ if let ConstValue::Unevaluated(def_id, substs) = n.val {
let param_env = cx.tcx.param_env(def_id);
let cid = GlobalId {
instance: ty::Instance::new(def_id, substs),
let mut bindings = vec![];
for pb in obj.projection_bounds() {
bindings.push(TypeBinding {
- name: cx.tcx.associated_item(pb.item_def_id()).name.clean(cx),
+ name: cx.tcx.associated_item(pb.item_def_id()).ident.name.clean(cx),
ty: pb.skip_binder().ty.clean(cx)
});
}
if proj.projection_ty.trait_ref(cx.tcx) == *trait_ref.skip_binder() {
Some(TypeBinding {
name: cx.tcx.associated_item(proj.projection_ty.item_def_id)
- .name.clean(cx),
+ .ident.name.clean(cx),
ty: proj.ty.clean(cx),
})
} else {
impl Clean<PathSegment> for hir::PathSegment {
fn clean(&self, cx: &DocContext) -> PathSegment {
PathSegment {
- name: self.name.clean(cx),
+ name: self.ident.name.clean(cx),
args: self.with_generic_args(|generic_args| generic_args.clean(cx))
}
}
fn qpath_to_string(p: &hir::QPath) -> String {
let segments = match *p {
hir::QPath::Resolved(_, ref path) => &path.segments,
- hir::QPath::TypeRelative(_, ref segment) => return segment.name.to_string(),
+ hir::QPath::TypeRelative(_, ref segment) => return segment.ident.to_string(),
};
let mut s = String::new();
if i > 0 {
s.push_str("::");
}
- if seg.name != keywords::CrateRoot.name() {
- s.push_str(&*seg.name.as_str());
+ if seg.ident.name != keywords::CrateRoot.name() {
+ s.push_str(&*seg.ident.as_str());
}
}
s
let provided = trait_.def_id().map(|did| {
cx.tcx.provided_trait_methods(did)
.into_iter()
- .map(|meth| meth.name.to_string())
+ .map(|meth| meth.ident.to_string())
.collect()
}).unwrap_or(FxHashSet());
match p.node {
PatKind::Wild => "_".to_string(),
- PatKind::Binding(_, _, ref p, _) => p.node.to_string(),
+ PatKind::Binding(_, _, ident, _) => ident.to_string(),
PatKind::TupleStruct(ref p, ..) | PatKind::Path(ref p) => qpath_to_string(p),
PatKind::Struct(ref name, ref fields, etc) => {
format!("{} {{ {}{} }}", qpath_to_string(name),
fn print_const(cx: &DocContext, n: &ty::Const) -> String {
match n.val {
- ConstVal::Unevaluated(def_id, _) => {
+ ConstValue::Unevaluated(def_id, _) => {
if let Some(node_id) = cx.tcx.hir.as_local_node_id(def_id) {
print_const_expr(cx, cx.tcx.hir.body_owned_by(node_id))
} else {
inline::print_inlined_const(cx, def_id)
}
},
- ConstVal::Value(..) => {
+ _ => {
let mut s = String::new();
::rustc::mir::fmt_const_val(&mut s, n).unwrap();
// array lengths are obviously usize
impl Clean<TypeBinding> for hir::TypeBinding {
fn clean(&self, cx: &DocContext) -> TypeBinding {
TypeBinding {
- name: self.name.clean(cx),
+ name: self.ident.name.clean(cx),
ty: self.ty.clean(cx)
}
}
span: DUMMY_SP,
def: def_ctor(def_id),
segments: hir::HirVec::from_vec(apb.names.iter().map(|s| hir::PathSegment {
- name: ast::Name::intern(&s),
+ ident: ast::Ident::from_str(&s),
args: None,
infer_types: false,
}).collect())
let intra_link_resolution_failure_name = lint::builtin::INTRA_DOC_LINK_RESOLUTION_FAILURE.name;
let warnings_lint_name = lint::builtin::WARNINGS.name;
let lints = lint::builtin::HardwiredLints.get_lints()
- .iter()
- .chain(rustc_lint::SoftLints.get_lints())
+ .into_iter()
+ .chain(rustc_lint::SoftLints.get_lints().into_iter())
.filter_map(|lint| {
if lint.name == warnings_lint_name ||
lint.name == intra_link_resolution_failure_name {
use std::default::Default;
use std::error;
use std::fmt::{self, Display, Formatter, Write as FmtWrite};
+use std::ffi::OsStr;
use std::fs::{self, File, OpenOptions};
use std::io::prelude::*;
use std::io::{self, BufWriter, BufReader};
// Add all the static files. These may already exist, but we just
// overwrite them anyway to make sure that they're fresh and up-to-date.
- write(cx.dst.join(&format!("rustdoc{}.css", cx.shared.resource_suffix)),
- include_bytes!("static/rustdoc.css"))?;
- write(cx.dst.join(&format!("settings{}.css", cx.shared.resource_suffix)),
- include_bytes!("static/settings.css"))?;
+ write_minify(cx.dst.join(&format!("rustdoc{}.css", cx.shared.resource_suffix)),
+ include_str!("static/rustdoc.css"),
+ enable_minification)?;
+ write_minify(cx.dst.join(&format!("settings{}.css", cx.shared.resource_suffix)),
+ include_str!("static/settings.css"),
+ enable_minification)?;
// To avoid "light.css" to be overwritten, we'll first run over the received themes and only
// then we'll run over the "official" styles.
include_bytes!("static/brush.svg"))?;
write(cx.dst.join(&format!("wheel{}.svg", cx.shared.resource_suffix)),
include_bytes!("static/wheel.svg"))?;
- write(cx.dst.join(&format!("light{}.css", cx.shared.resource_suffix)),
- include_bytes!("static/themes/light.css"))?;
+ write_minify(cx.dst.join(&format!("light{}.css", cx.shared.resource_suffix)),
+ include_str!("static/themes/light.css"),
+ enable_minification)?;
themes.insert("light".to_owned());
- write(cx.dst.join(&format!("dark{}.css", cx.shared.resource_suffix)),
- include_bytes!("static/themes/dark.css"))?;
+ write_minify(cx.dst.join(&format!("dark{}.css", cx.shared.resource_suffix)),
+ include_str!("static/themes/dark.css"),
+ enable_minification)?;
themes.insert("dark".to_owned());
let mut themes: Vec<&String> = themes.iter().collect();
if let Some(ref css) = cx.shared.css_file_extension {
let out = cx.dst.join(&format!("theme{}.css", cx.shared.resource_suffix));
- try_err!(fs::copy(css, out), css);
+ if !enable_minification {
+ try_err!(fs::copy(css, out), css);
+ } else {
+ let mut f = try_err!(File::open(css), css);
+ let mut buffer = String::with_capacity(1000);
+
+ try_err!(f.read_to_string(&mut buffer), css);
+ write_minify(out, &buffer, enable_minification)?;
+ }
}
- write(cx.dst.join(&format!("normalize{}.css", cx.shared.resource_suffix)),
- include_bytes!("static/normalize.css"))?;
+ write_minify(cx.dst.join(&format!("normalize{}.css", cx.shared.resource_suffix)),
+ include_str!("static/normalize.css"),
+ enable_minification)?;
write(cx.dst.join("FiraSans-Regular.woff"),
include_bytes!("static/FiraSans-Regular.woff"))?;
write(cx.dst.join("FiraSans-Medium.woff"),
fn write_minify(dst: PathBuf, contents: &str, enable_minification: bool) -> Result<(), Error> {
if enable_minification {
- write(dst, minifier::js::minify(contents).as_bytes())
+ if dst.extension() == Some(&OsStr::new("css")) {
+ let res = try_none!(minifier::css::minify(contents).ok(), &dst);
+ write(dst, res.as_bytes())
+ } else {
+ write(dst, minifier::js::minify(contents).as_bytes())
+ }
} else {
write(dst, contents.as_bytes())
}
}
pub fn main() {
- const STACK_SIZE: usize = 32_000_000; // 32MB
+ let thread_stack_size: usize = if cfg!(target_os = "haiku") {
+ 16_000_000 // 16MB on Haiku
+ } else {
+ 32_000_000 // 32MB on other platforms
+ };
rustc_driver::set_sigpipe_handler();
env_logger::init();
- let res = std::thread::Builder::new().stack_size(STACK_SIZE).spawn(move || {
+ let res = std::thread::Builder::new().stack_size(thread_stack_size).spawn(move || {
syntax::with_globals(move || {
get_args().map(|args| main_args(&args)).unwrap_or(1)
})
}
fn visit_trait_item(&mut self, item: &'hir hir::TraitItem) {
- self.visit_testable(item.name.to_string(), &item.attrs, |this| {
+ self.visit_testable(item.ident.to_string(), &item.attrs, |this| {
intravisit::walk_trait_item(this, item);
});
}
fn visit_impl_item(&mut self, item: &'hir hir::ImplItem) {
- self.visit_testable(item.name.to_string(), &item.attrs, |this| {
+ self.visit_testable(item.ident.to_string(), &item.attrs, |this| {
intravisit::walk_impl_item(this, item);
});
}
buf <<= 4;
match byte {
- b'A'...b'F' => buf |= byte - b'A' + 10,
- b'a'...b'f' => buf |= byte - b'a' + 10,
- b'0'...b'9' => buf |= byte - b'0',
+ b'A'..=b'F' => buf |= byte - b'A' + 10,
+ b'a'..=b'f' => buf |= byte - b'a' + 10,
+ b'0'..=b'9' => buf |= byte - b'0',
b' '|b'\r'|b'\n'|b'\t' => {
buf >>= 4;
continue
self.bump();
// A leading '0' must be the only digit before the decimal point.
- if let '0' ... '9' = self.ch_or_null() {
+ if let '0' ..= '9' = self.ch_or_null() {
return self.error(InvalidNumber)
}
},
- '1' ... '9' => {
+ '1' ..= '9' => {
while !self.eof() {
match self.ch_or_null() {
- c @ '0' ... '9' => {
+ c @ '0' ..= '9' => {
accum = accum.wrapping_mul(10);
accum = accum.wrapping_add((c as u64) - ('0' as u64));
// Make sure a digit follows the decimal place.
match self.ch_or_null() {
- '0' ... '9' => (),
+ '0' ..= '9' => (),
_ => return self.error(InvalidNumber)
}
let mut dec = 1.0;
while !self.eof() {
match self.ch_or_null() {
- c @ '0' ... '9' => {
+ c @ '0' ..= '9' => {
dec /= 10.0;
res += (((c as isize) - ('0' as isize)) as f64) * dec;
self.bump();
// Make sure a digit follows the exponent place.
match self.ch_or_null() {
- '0' ... '9' => (),
+ '0' ..= '9' => (),
_ => return self.error(InvalidNumber)
}
while !self.eof() {
match self.ch_or_null() {
- c @ '0' ... '9' => {
+ c @ '0' ..= '9' => {
exp *= 10;
exp += (c as usize) - ('0' as usize);
while i < 4 && !self.eof() {
self.bump();
n = match self.ch_or_null() {
- c @ '0' ... '9' => n * 16 + ((c as u16) - ('0' as u16)),
+ c @ '0' ..= '9' => n * 16 + ((c as u16) - ('0' as u16)),
'a' | 'A' => n * 16 + 10,
'b' | 'B' => n * 16 + 11,
'c' | 'C' => n * 16 + 12,
'r' => res.push('\r'),
't' => res.push('\t'),
'u' => match self.decode_hex_escape()? {
- 0xDC00 ... 0xDFFF => {
+ 0xDC00 ..= 0xDFFF => {
return self.error(LoneLeadingSurrogateInHexEscape)
}
// Non-BMP characters are encoded as a sequence of
// two hex escapes, representing UTF-16 surrogates.
- n1 @ 0xD800 ... 0xDBFF => {
+ n1 @ 0xD800 ..= 0xDBFF => {
match (self.next_char(), self.next_char()) {
(Some('\\'), Some('u')) => (),
_ => return self.error(UnexpectedEndOfHexEscape),
'n' => { self.parse_ident("ull", NullValue) }
't' => { self.parse_ident("rue", BooleanValue(true)) }
'f' => { self.parse_ident("alse", BooleanValue(false)) }
- '0' ... '9' | '-' => self.parse_number(),
+ '0' ..= '9' | '-' => self.parse_number(),
'"' => match self.parse_str() {
Ok(s) => StringValue(s),
Err(e) => Error(e),
// except according to those terms.
#[inline]
-pub fn write_to_vec(vec: &mut Vec<u8>, position: usize, byte: u8) {
- if position == vec.len() {
- vec.push(byte);
- } else {
- vec[position] = byte;
- }
+pub fn write_to_vec(vec: &mut Vec<u8>, byte: u8) {
+ vec.push(byte);
}
#[cfg(target_pointer_width = "32")]
macro_rules! impl_write_unsigned_leb128 {
($fn_name:ident, $int_ty:ident) => (
#[inline]
- pub fn $fn_name(out: &mut Vec<u8>, start_position: usize, mut value: $int_ty) -> usize {
- let mut position = start_position;
+ pub fn $fn_name(out: &mut Vec<u8>, mut value: $int_ty) {
for _ in 0 .. leb128_size!($int_ty) {
let mut byte = (value & 0x7F) as u8;
value >>= 7;
byte |= 0x80;
}
- write_to_vec(out, position, byte);
- position += 1;
+ write_to_vec(out, byte);
if value == 0 {
break;
}
}
-
- position - start_position
}
)
}
/// The callback `write` is called once for each position
/// that is to be written to with the byte to be encoded
/// at that position.
-pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W) -> usize
- where W: FnMut(usize, u8)
+pub fn write_signed_leb128_to<W>(mut value: i128, mut write: W)
+ where W: FnMut(u8)
{
- let mut position = 0;
-
loop {
let mut byte = (value as u8) & 0x7f;
value >>= 7;
byte |= 0x80; // Mark this byte to show that more bytes will follow.
}
- write(position, byte);
- position += 1;
+ write(byte);
if !more {
break;
}
}
- position
}
-pub fn write_signed_leb128(out: &mut Vec<u8>, start_position: usize, value: i128) -> usize {
- write_signed_leb128_to(value, |i, v| write_to_vec(out, start_position+i, v))
+pub fn write_signed_leb128(out: &mut Vec<u8>, value: i128) {
+ write_signed_leb128_to(value, |v| write_to_vec(out, v))
}
#[inline]
let mut stream = Vec::new();
for x in 0..62 {
- let pos = stream.len();
- let bytes_written = $write_fn_name(&mut stream, pos, (3u64 << x) as $int_ty);
- assert_eq!(stream.len(), pos + bytes_written);
+ $write_fn_name(&mut stream, (3u64 << x) as $int_ty);
}
let mut position = 0;
let values: Vec<_> = (-500..500).map(|i| i * 0x12345789ABCDEF).collect();
let mut stream = Vec::new();
for &x in &values {
- let pos = stream.len();
- let bytes_written = write_signed_leb128(&mut stream, pos, x);
- assert_eq!(stream.len(), pos + bytes_written);
+ write_signed_leb128(&mut stream, x);
}
let mut pos = 0;
for &x in &values {
#![feature(box_syntax)]
#![feature(core_intrinsics)]
#![feature(specialization)]
+#![feature(never_type)]
#![cfg_attr(test, feature(test))]
pub use self::serialize::{Decoder, Encoder, Decodable, Encodable};
use leb128::{self, read_signed_leb128, write_signed_leb128};
use std::borrow::Cow;
-use std::io::{self, Write};
use serialize;
// -----------------------------------------------------------------------------
// Encoder
// -----------------------------------------------------------------------------
-pub type EncodeResult = io::Result<()>;
+pub type EncodeResult = Result<(), !>;
-pub struct Encoder<'a> {
- pub cursor: &'a mut io::Cursor<Vec<u8>>,
+pub struct Encoder {
+ pub data: Vec<u8>,
}
-impl<'a> Encoder<'a> {
- pub fn new(cursor: &'a mut io::Cursor<Vec<u8>>) -> Encoder<'a> {
- Encoder { cursor: cursor }
+impl Encoder {
+ pub fn new(data: Vec<u8>) -> Encoder {
+ Encoder { data }
}
- pub fn emit_raw_bytes(&mut self, s: &[u8]) -> EncodeResult {
- self.cursor.write_all(s)
+ pub fn into_inner(self) -> Vec<u8> {
+ self.data
}
-}
+ pub fn emit_raw_bytes(&mut self, s: &[u8]) {
+ self.data.extend_from_slice(s);
+ }
+}
macro_rules! write_uleb128 {
($enc:expr, $value:expr, $fun:ident) => {{
- let pos = $enc.cursor.position() as usize;
- let bytes_written = leb128::$fun($enc.cursor.get_mut(), pos, $value);
- $enc.cursor.set_position((pos + bytes_written) as u64);
+ leb128::$fun(&mut $enc.data, $value);
Ok(())
}}
}
macro_rules! write_sleb128 {
($enc:expr, $value:expr) => {{
- let pos = $enc.cursor.position() as usize;
- let bytes_written = write_signed_leb128($enc.cursor.get_mut(), pos, $value as i128);
- $enc.cursor.set_position((pos + bytes_written) as u64);
+ write_signed_leb128(&mut $enc.data, $value as i128);
Ok(())
}}
}
-impl<'a> serialize::Encoder for Encoder<'a> {
- type Error = io::Error;
+impl serialize::Encoder for Encoder {
+ type Error = !;
#[inline]
fn emit_nil(&mut self) -> EncodeResult {
#[inline]
fn emit_u8(&mut self, v: u8) -> EncodeResult {
- let pos = self.cursor.position() as usize;
- leb128::write_to_vec(self.cursor.get_mut(), pos, v);
- self.cursor.set_position((pos + 1) as u64);
+ self.data.push(v);
Ok(())
}
#[inline]
fn emit_str(&mut self, v: &str) -> EncodeResult {
self.emit_usize(v.len())?;
- let _ = self.cursor.write_all(v.as_bytes());
+ self.emit_raw_bytes(v.as_bytes());
Ok(())
}
}
-impl<'a> Encoder<'a> {
+impl Encoder {
#[inline]
pub fn position(&self) -> usize {
- self.cursor.position() as usize
+ self.data.len()
}
}
#[cfg(test)]
mod tests {
use serialize::{Encodable, Decodable};
- use std::io::Cursor;
use std::fmt::Debug;
use super::{Encoder, Decoder};
fn check_round_trip<T: Encodable + Decodable + PartialEq + Debug>(values: Vec<T>) {
- let mut cursor = Cursor::new(Vec::new());
+ let mut encoder = Encoder::new(Vec::new());
for value in &values {
- let mut encoder = Encoder::new(&mut cursor);
Encodable::encode(&value, &mut encoder).unwrap();
}
- let data = cursor.into_inner();
+ let data = encoder.into_inner();
let mut decoder = Decoder::new(&data[..], 0);
for value in values {
/// Calculates the Euclidean modulo (self mod rhs), which is never negative.
///
- /// In particular, the result `n` satisfies `0 <= n < rhs.abs()`.
+ /// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
+ /// most cases. However, due to a floating point round-off error it can
+ /// result in `r == rhs.abs()`, violating the mathematical definition, if
+ /// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
+ /// This result is not an element of the function's codomain, but it is the
+ /// closest floating point number in the real numbers and thus fulfills the
+ /// property `self == self.div_euc(rhs) * rhs + self.mod_euc(rhs)`
+ /// approximatively.
///
/// # Examples
///
/// assert_eq!((-a).mod_euc(b), 1.0);
/// assert_eq!(a.mod_euc(-b), 3.0);
/// assert_eq!((-a).mod_euc(-b), 1.0);
+ /// // limitation due to round-off error
+ /// assert!((-std::f32::EPSILON).mod_euc(3.0) != 0.0);
/// ```
#[inline]
#[unstable(feature = "euclidean_division", issue = "49048")]
/// Calculates the Euclidean modulo (self mod rhs), which is never negative.
///
- /// In particular, the result `n` satisfies `0 <= n < rhs.abs()`.
+ /// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
+ /// most cases. However, due to a floating point round-off error it can
+ /// result in `r == rhs.abs()`, violating the mathematical definition, if
+ /// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
+ /// This result is not an element of the function's codomain, but it is the
+ /// closest floating point number in the real numbers and thus fulfills the
+ /// property `self == self.div_euc(rhs) * rhs + self.mod_euc(rhs)`
+ /// approximatively.
///
/// # Examples
///
/// assert_eq!((-a).mod_euc(b), 1.0);
/// assert_eq!(a.mod_euc(-b), 3.0);
/// assert_eq!((-a).mod_euc(-b), 1.0);
+ /// // limitation due to round-off error
+ /// assert!((-std::f64::EPSILON).mod_euc(3.0) != 0.0);
/// ```
#[inline]
#[unstable(feature = "euclidean_division", issue = "49048")]
impl<T: Generator<Yield = ()>> Future for GenFuture<T> {
type Output = T::Return;
fn poll(self: PinMut<Self>, cx: &mut task::Context) -> Poll<Self::Output> {
- set_task_cx(cx, || match unsafe { PinMut::get_mut(self).0.resume() } {
+ set_task_cx(cx, || match unsafe { PinMut::get_mut_unchecked(self).0.resume() } {
GeneratorState::Yielded(()) => Poll::Pending,
GeneratorState::Complete(x) => Poll::Ready(x),
})
/// The multi-argument form of this macro panics with a string and has the
/// [`format!`] syntax for building a string.
///
+/// See also the macro [`compile_error!`], for raising errors during compilation.
+///
/// [runwrap]: ../std/result/enum.Result.html#method.unwrap
/// [`Option`]: ../std/option/enum.Option.html#method.unwrap
/// [`Result`]: ../std/result/enum.Result.html
/// [`format!`]: ../std/macro.format.html
+/// [`compile_error!`]: ../std/macro.compile_error.html
/// [book]: ../book/second-edition/ch09-01-unrecoverable-errors-with-panic.html
///
/// # Current implementation
/// Unconditionally causes compilation to fail with the given error message when encountered.
///
/// This macro should be used when a crate uses a conditional compilation strategy to provide
- /// better error messages for erroneous conditions.
+ /// better error messages for erroneous conditions. It's the compiler-level form of [`panic!`],
+ /// which emits an error at *runtime*, rather than during compilation.
///
/// # Examples
///
/// Two such examples are macros and `#[cfg]` environments.
///
- /// Emit better compiler error if a macro is passed invalid values.
+ /// Emit better compiler error if a macro is passed invalid values. Without the final branch,
+ /// the compiler would still emit an error, but the error's message would not mention the two
+ /// valid values.
///
/// ```compile_fail
/// macro_rules! give_me_foo_or_bar {
/// #[cfg(not(any(feature = "foo", feature = "bar")))]
/// compile_error!("Either feature \"foo\" or \"bar\" must be enabled for this crate.")
/// ```
+ ///
+ /// [`panic!`]: ../std/macro.panic.html
#[stable(feature = "compile_error_macro", since = "1.20.0")]
#[macro_export]
macro_rules! compile_error {
impl<'a, F: Future> Future for AssertUnwindSafe<F> {
type Output = F::Output;
- fn poll(mut self: PinMut<Self>, cx: &mut task::Context) -> Poll<Self::Output> {
- unsafe {
- let pinned_field = PinMut::new_unchecked(
- &mut PinMut::get_mut(self.reborrow()).0
- );
-
- pinned_field.poll(cx)
- }
+ fn poll(self: PinMut<Self>, cx: &mut task::Context) -> Poll<Self::Output> {
+ let pinned_field = unsafe { PinMut::map_unchecked(self, |x| &mut x.0) };
+ pinned_field.poll(cx)
}
}
// (missing things in `libc` which is empty) so just omit everything
// with an empty module
#[unstable(issue = "0", feature = "std_internals")]
+ #[allow(missing_docs)]
pub mod unix_ext {}
} else {
// On other platforms like Windows document the bare bones of unix
cfg_if! {
if #[cfg(windows)] {
// On windows we'll just be documenting what's already available
+ #[allow(missing_docs)]
pub use self::ext as windows_ext;
} else if #[cfg(any(target_os = "cloudabi", target_arch = "wasm32"))] {
// On CloudABI and wasm right now the shim below doesn't compile, so
// just omit it
#[unstable(issue = "0", feature = "std_internals")]
+ #[allow(missing_docs)]
pub mod windows_ext {}
} else {
// On all other platforms (aka linux/osx/etc) then pull in a "minimal"
#[cfg(any(target_os = "linux", target_os = "emscripten", target_os = "l4re"))]
use libc::{stat64, fstat64, lstat64, off64_t, ftruncate64, lseek64, dirent64, readdir64_r, open64};
+#[cfg(any(target_os = "linux", target_os = "emscripten"))]
+use libc::fstatat64;
#[cfg(any(target_os = "linux", target_os = "emscripten", target_os = "android"))]
-use libc::{fstatat, dirfd};
+use libc::dirfd;
#[cfg(target_os = "android")]
-use libc::{stat as stat64, fstat as fstat64, lstat as lstat64, lseek64,
+use libc::{stat as stat64, fstat as fstat64, fstatat as fstatat64, lstat as lstat64, lseek64,
dirent as dirent64, open as open64};
#[cfg(not(any(target_os = "linux",
target_os = "emscripten",
}
#[derive(Clone)]
-pub struct ReadDir(Arc<InnerReadDir>);
+pub struct ReadDir {
+ inner: Arc<InnerReadDir>,
+ end_of_stream: bool,
+}
struct Dir(*mut libc::DIR);
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
// This will only be called from std::fs::ReadDir, which will add a "ReadDir()" frame.
// Thus the result will be e g 'ReadDir("/home")'
- fmt::Debug::fmt(&*self.0.root, f)
+ fmt::Debug::fmt(&*self.inner.root, f)
}
}
// is safe to use in threaded applications and it is generally preferred
// over the readdir_r(3C) function.
super::os::set_errno(0);
- let entry_ptr = libc::readdir(self.0.dirp.0);
+ let entry_ptr = libc::readdir(self.inner.dirp.0);
if entry_ptr.is_null() {
// NULL can mean either the end is reached or an error occurred.
// So we had to clear errno beforehand to check for an error now.
#[cfg(not(any(target_os = "solaris", target_os = "fuchsia")))]
fn next(&mut self) -> Option<io::Result<DirEntry>> {
+ if self.end_of_stream {
+ return None;
+ }
+
unsafe {
let mut ret = DirEntry {
entry: mem::zeroed(),
};
let mut entry_ptr = ptr::null_mut();
loop {
- if readdir64_r(self.0.dirp.0, &mut ret.entry, &mut entry_ptr) != 0 {
+ if readdir64_r(self.inner.dirp.0, &mut ret.entry, &mut entry_ptr) != 0 {
+ if entry_ptr.is_null() {
+ // We encountered an error (which will be returned in this iteration), but
+ // we also reached the end of the directory stream. The `end_of_stream`
+ // flag is enabled to make sure that we return `None` in the next iteration
+ // (instead of looping forever)
+ self.end_of_stream = true;
+ }
return Some(Err(Error::last_os_error()))
}
if entry_ptr.is_null() {
impl DirEntry {
pub fn path(&self) -> PathBuf {
- self.dir.0.root.join(OsStr::from_bytes(self.name_bytes()))
+ self.dir.inner.root.join(OsStr::from_bytes(self.name_bytes()))
}
pub fn file_name(&self) -> OsString {
#[cfg(any(target_os = "linux", target_os = "emscripten", target_os = "android"))]
pub fn metadata(&self) -> io::Result<FileAttr> {
- let fd = cvt(unsafe {dirfd(self.dir.0.dirp.0)})?;
+ let fd = cvt(unsafe {dirfd(self.dir.inner.dirp.0)})?;
let mut stat: stat64 = unsafe { mem::zeroed() };
cvt(unsafe {
- fstatat(fd,
- self.entry.d_name.as_ptr(),
- &mut stat as *mut _ as *mut _,
- libc::AT_SYMLINK_NOFOLLOW)
+ fstatat64(fd, self.entry.d_name.as_ptr(), &mut stat, libc::AT_SYMLINK_NOFOLLOW)
})?;
Ok(FileAttr { stat: stat })
}
Err(Error::last_os_error())
} else {
let inner = InnerReadDir { dirp: Dir(ptr), root };
- Ok(ReadDir(Arc::new(inner)))
+ Ok(ReadDir{
+ inner: Arc::new(inner),
+ end_of_stream: false,
+ })
}
}
}
let p = cstr(p)?;
let mut stat: stat64 = unsafe { mem::zeroed() };
cvt(unsafe {
- stat64(p.as_ptr(), &mut stat as *mut _ as *mut _)
+ stat64(p.as_ptr(), &mut stat)
})?;
Ok(FileAttr { stat: stat })
}
let p = cstr(p)?;
let mut stat: stat64 = unsafe { mem::zeroed() };
cvt(unsafe {
- lstat64(p.as_ptr(), &mut stat as *mut _ as *mut _)
+ lstat64(p.as_ptr(), &mut stat)
})?;
Ok(FileAttr { stat: stat })
}
mod imp {
#[link(name = "zircon")]
extern {
- fn zx_cprng_draw_new(buffer: *mut u8, len: usize) -> i32;
- }
-
- fn getrandom(buf: &mut [u8]) -> Result<usize, i32> {
- unsafe {
- let status = zx_cprng_draw_new(buf.as_mut_ptr(), buf.len());
- if status == 0 {
- Ok(buf.len())
- } else {
- Err(status)
- }
- }
+ fn zx_cprng_draw(buffer: *mut u8, len: usize);
}
pub fn fill_bytes(v: &mut [u8]) {
- let mut buf = v;
- while !buf.is_empty() {
- let ret = getrandom(buf);
- match ret {
- Err(err) => {
- panic!("kernel zx_cprng_draw call failed! (returned {}, buf.len() {})",
- err, buf.len())
- }
- Ok(actual) => {
- let move_buf = buf;
- buf = &mut move_buf[(actual as usize)..];
- }
- }
- }
+ unsafe { zx_cprng_draw(v.as_mut_ptr(), v.len()) }
}
}
let candidate = &s[i + llvm.len()..];
let all_hex = candidate.chars().all(|c| {
match c {
- 'A' ... 'F' | '0' ... '9' => true,
+ 'A' ..= 'F' | '0' ..= '9' => true,
_ => false,
}
});
#[inline]
pub fn from_u32(value: u32) -> Option<CodePoint> {
match value {
- 0 ... 0x10FFFF => Some(CodePoint { value: value }),
+ 0 ..= 0x10FFFF => Some(CodePoint { value: value }),
_ => None
}
}
#[inline]
pub fn to_char(&self) -> Option<char> {
match self.value {
- 0xD800 ... 0xDFFF => None,
+ 0xD800 ..= 0xDFFF => None,
_ => Some(unsafe { char::from_u32_unchecked(self.value) })
}
}
/// like concatenating ill-formed UTF-16 strings effectively would.
#[inline]
pub fn push(&mut self, code_point: CodePoint) {
- if let trail @ 0xDC00...0xDFFF = code_point.to_u32() {
+ if let trail @ 0xDC00..=0xDFFF = code_point.to_u32() {
if let Some(lead) = (&*self).final_lead_surrogate() {
let len_without_lead_surrogate = self.len() - 3;
self.bytes.truncate(len_without_lead_surrogate);
#[inline]
pub fn ascii_byte_at(&self, position: usize) -> u8 {
match self.bytes[position] {
- ascii_byte @ 0x00 ... 0x7F => ascii_byte,
+ ascii_byte @ 0x00 ..= 0x7F => ascii_byte,
_ => 0xFF
}
}
return None
}
match &self.bytes[(len - 3)..] {
- &[0xED, b2 @ 0xA0...0xAF, b3] => Some(decode_surrogate(b2, b3)),
+ &[0xED, b2 @ 0xA0..=0xAF, b3] => Some(decode_surrogate(b2, b3)),
_ => None
}
}
return None
}
match &self.bytes[..3] {
- &[0xED, b2 @ 0xB0...0xBF, b3] => Some(decode_surrogate(b2, b3)),
+ &[0xED, b2 @ 0xB0..=0xBF, b3] => Some(decode_surrogate(b2, b3)),
_ => None
}
}
///
/// This will lazily initialize the value if this thread has not referenced
/// this key yet. If the key has been destroyed (which may happen if this is called
- /// in a destructor), this function will return a `ThreadLocalError`.
+ /// in a destructor), this function will return an [`AccessError`](struct.AccessError.html).
///
/// # Panics
///
/// A literal
Lit(P<Expr>),
/// A range pattern, e.g. `1...2`, `1..=2` or `1..2`
- Range(P<Expr>, P<Expr>, RangeEnd),
+ Range(P<Expr>, P<Expr>, Spanned<RangeEnd>),
/// `[a, b, ..i, y, z]` is represented as:
/// `PatKind::Slice(box [a, b], Some(i), box [y, z])`
Slice(Vec<P<Pat>>, Option<P<Pat>>, Vec<P<Pat>>),
/// An arm of a 'match'.
///
-/// E.g. `0...10 => { println!("match!") }` as in
+/// E.g. `0..=10 => { println!("match!") }` as in
///
/// ```
/// match 123 {
-/// 0...10 => { println!("match!") },
+/// 0..=10 => { println!("match!") },
/// _ => { println!("no match!") },
/// }
/// ```
}
}
- /// Creates a new filemap without setting its line information. If you don't
- /// intend to set the line information yourself, you should use new_filemap_and_lines.
+ /// Creates a new filemap.
/// This does not ensure that only one FileMap exists per file name.
pub fn new_filemap(&self, filename: FileName, src: String) -> Lrc<FileMap> {
let start_pos = self.next_start_pos();
filemap
}
- /// Creates a new filemap and sets its line information.
- /// This does not ensure that only one FileMap exists per file name.
- pub fn new_filemap_and_lines(&self, filename: &Path, src: &str) -> Lrc<FileMap> {
- let fm = self.new_filemap(filename.to_owned().into(), src.to_owned());
- let mut byte_pos: u32 = fm.start_pos.0;
- for line in src.lines() {
- // register the start of this line
- fm.next_line(BytePos(byte_pos));
-
- // update byte_pos to include this line and the \n at the end
- byte_pos += line.len() as u32 + 1;
- }
- fm
- }
-
-
/// Allocates a new FileMap representing a source file from an external
/// crate. The source code of such an "imported filemap" is not available,
/// but we still know enough to generate accurate debuginfo location
external_src: Lock::new(ExternalSource::AbsentOk),
start_pos,
end_pos,
- lines: Lock::new(file_local_lines),
- multibyte_chars: Lock::new(file_local_multibyte_chars),
- non_narrow_chars: Lock::new(file_local_non_narrow_chars),
+ lines: file_local_lines,
+ multibyte_chars: file_local_multibyte_chars,
+ non_narrow_chars: file_local_non_narrow_chars,
name_hash,
});
match self.lookup_line(pos) {
Ok(FileMapAndLine { fm: f, line: a }) => {
let line = a + 1; // Line numbers start at 1
- let linebpos = (*f.lines.borrow())[a];
+ let linebpos = f.lines[a];
let linechpos = self.bytepos_to_file_charpos(linebpos);
let col = chpos - linechpos;
let col_display = {
- let non_narrow_chars = f.non_narrow_chars.borrow();
- let start_width_idx = non_narrow_chars
+ let start_width_idx = f
+ .non_narrow_chars
.binary_search_by_key(&linebpos, |x| x.pos())
.unwrap_or_else(|x| x);
- let end_width_idx = non_narrow_chars
+ let end_width_idx = f
+ .non_narrow_chars
.binary_search_by_key(&pos, |x| x.pos())
.unwrap_or_else(|x| x);
let special_chars = end_width_idx - start_width_idx;
- let non_narrow: usize =
- non_narrow_chars[start_width_idx..end_width_idx]
+ let non_narrow: usize = f
+ .non_narrow_chars[start_width_idx..end_width_idx]
.into_iter()
.map(|x| x.width())
.sum();
}
Err(f) => {
let col_display = {
- let non_narrow_chars = f.non_narrow_chars.borrow();
- let end_width_idx = non_narrow_chars
+ let end_width_idx = f
+ .non_narrow_chars
.binary_search_by_key(&pos, |x| x.pos())
.unwrap_or_else(|x| x);
- let non_narrow: usize =
- non_narrow_chars[0..end_width_idx]
+ let non_narrow: usize = f
+ .non_narrow_chars[0..end_width_idx]
.into_iter()
.map(|x| x.width())
.sum();
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
- for mbc in map.multibyte_chars.borrow().iter() {
+ for mbc in map.multibyte_chars.iter() {
debug!("{}-byte char at {:?}", mbc.bytes, mbc.pos);
if mbc.pos < bpos {
// every character is at least one byte, so we only
// count the actual extra bytes.
- total_extra_bytes += mbc.bytes - 1;
+ total_extra_bytes += mbc.bytes as u32 - 1;
// We should never see a byte position in the middle of a
// character
- assert!(bpos.to_usize() >= mbc.pos.to_usize() + mbc.bytes);
+ assert!(bpos.to_u32() >= mbc.pos.to_u32() + mbc.bytes as u32);
} else {
break;
}
}
- assert!(map.start_pos.to_usize() + total_extra_bytes <= bpos.to_usize());
- CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes)
+ assert!(map.start_pos.to_u32() + total_extra_bytes <= bpos.to_u32());
+ CharPos(bpos.to_usize() - map.start_pos.to_usize() - total_extra_bytes as usize)
}
// Return the index of the filemap (in self.files) which contains pos.
#[cfg(test)]
mod tests {
use super::*;
- use std::borrow::Cow;
use rustc_data_structures::sync::Lrc;
- #[test]
- fn t1 () {
- let cm = CodeMap::new(FilePathMapping::empty());
- let fm = cm.new_filemap(PathBuf::from("blork.rs").into(),
- "first line.\nsecond line".to_string());
- fm.next_line(BytePos(0));
- // Test we can get lines with partial line info.
- assert_eq!(fm.get_line(0), Some(Cow::from("first line.")));
- // TESTING BROKEN BEHAVIOR: line break declared before actual line break.
- fm.next_line(BytePos(10));
- assert_eq!(fm.get_line(1), Some(Cow::from(".")));
- fm.next_line(BytePos(12));
- assert_eq!(fm.get_line(2), Some(Cow::from("second line")));
- }
-
- #[test]
- #[should_panic]
- fn t2 () {
- let cm = CodeMap::new(FilePathMapping::empty());
- let fm = cm.new_filemap(PathBuf::from("blork.rs").into(),
- "first line.\nsecond line".to_string());
- // TESTING *REALLY* BROKEN BEHAVIOR:
- fm.next_line(BytePos(0));
- fm.next_line(BytePos(10));
- fm.next_line(BytePos(2));
- }
-
fn init_code_map() -> CodeMap {
let cm = CodeMap::new(FilePathMapping::empty());
- let fm1 = cm.new_filemap(PathBuf::from("blork.rs").into(),
- "first line.\nsecond line".to_string());
- let fm2 = cm.new_filemap(PathBuf::from("empty.rs").into(),
- "".to_string());
- let fm3 = cm.new_filemap(PathBuf::from("blork2.rs").into(),
- "first line.\nsecond line".to_string());
-
- fm1.next_line(BytePos(0));
- fm1.next_line(BytePos(12));
- fm2.next_line(fm2.start_pos);
- fm3.next_line(fm3.start_pos);
- fm3.next_line(fm3.start_pos + BytePos(12));
-
+ cm.new_filemap(PathBuf::from("blork.rs").into(),
+ "first line.\nsecond line".to_string());
+ cm.new_filemap(PathBuf::from("empty.rs").into(),
+ "".to_string());
+ cm.new_filemap(PathBuf::from("blork2.rs").into(),
+ "first line.\nsecond line".to_string());
cm
}
fn init_code_map_mbc() -> CodeMap {
let cm = CodeMap::new(FilePathMapping::empty());
// € is a three byte utf8 char.
- let fm1 =
- cm.new_filemap(PathBuf::from("blork.rs").into(),
- "fir€st €€€€ line.\nsecond line".to_string());
- let fm2 = cm.new_filemap(PathBuf::from("blork2.rs").into(),
- "first line€€.\n€ second line".to_string());
-
- fm1.next_line(BytePos(0));
- fm1.next_line(BytePos(28));
- fm2.next_line(fm2.start_pos);
- fm2.next_line(fm2.start_pos + BytePos(20));
-
- fm1.record_multibyte_char(BytePos(3), 3);
- fm1.record_multibyte_char(BytePos(9), 3);
- fm1.record_multibyte_char(BytePos(12), 3);
- fm1.record_multibyte_char(BytePos(15), 3);
- fm1.record_multibyte_char(BytePos(18), 3);
- fm2.record_multibyte_char(fm2.start_pos + BytePos(10), 3);
- fm2.record_multibyte_char(fm2.start_pos + BytePos(13), 3);
- fm2.record_multibyte_char(fm2.start_pos + BytePos(18), 3);
-
+ cm.new_filemap(PathBuf::from("blork.rs").into(),
+ "fir€st €€€€ line.\nsecond line".to_string());
+ cm.new_filemap(PathBuf::from("blork2.rs").into(),
+ "first line€€.\n€ second line".to_string());
cm
}
let cm = CodeMap::new(FilePathMapping::empty());
let inputtext = "aaaaa\nbbbbBB\nCCC\nDDDDDddddd\neee\n";
let selection = " \n ~~\n~~~\n~~~~~ \n \n";
- cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext);
+ cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_string());
let span = span_from_selection(inputtext, selection);
// check that we are extracting the text we thought we were extracting
let inputtext = "bbbb BB\ncc CCC\n";
let selection1 = " ~~\n \n";
let selection2 = " \n ~~~\n";
- cm.new_filemap_and_lines(Path::new("blork.rs"), inputtext);
+ cm.new_filemap(Path::new("blork.rs").to_owned().into(), inputtext.to_owned());
let span1 = span_from_selection(inputtext, selection1);
let span2 = span_from_selection(inputtext, selection2);
/// Whether the contents of the macro can use `unsafe`
/// without triggering the `unsafe_code` lint.
allow_internal_unsafe: bool,
+ /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
+ /// for a given macro.
+ local_inner_macros: bool,
/// The macro's feature name if it is unstable, and the stability feature
unstable_feature: Option<(Symbol, u32)>,
/// Edition of the crate in which the macro is defined
format: ExpnFormat::MacroAttribute(Symbol::intern(&pretty_name)),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
format: MacroAttribute(Symbol::intern(&format!("{}", attr.path))),
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: ext.edition(),
});
def_site_span: Option<Span>,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
// can't infer this type
unstable_feature: Option<(Symbol, u32)>,
edition| {
format: macro_bang_format(path),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
edition,
});
Ok(())
let opt_expanded = match *ext {
DeclMacro(ref expand, def_span, edition) => {
if let Err(dummy_span) = validate_and_set_expn_info(self, def_span.map(|(_, s)| s),
- false, false, None,
+ false, false, false, None,
edition) {
dummy_span
} else {
def_info,
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
} => {
if let Err(dummy_span) = validate_and_set_expn_info(self, def_info.map(|(_, s)| s),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition) {
dummy_span
format: macro_bang_format(path),
allow_internal_unstable,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
// FIXME probably want to follow macro_rules macros here.
allow_internal_unstable,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition,
});
format: MacroAttribute(pretty_name),
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: ext.edition(),
};
match String::from_utf8(buf) {
Ok(src) => {
+ let src_interned = Symbol::intern(&src);
+
// Add this input file to the code map to make it available as
// dependency information
- self.cx.codemap().new_filemap_and_lines(&filename, &src);
+ self.cx.codemap().new_filemap(filename.into(), src);
let include_info = vec![
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
dummy_spanned(file)))),
dummy_spanned(ast::NestedMetaItemKind::MetaItem(
attr::mk_name_value_item_str(Ident::from_str("contents"),
- dummy_spanned(Symbol::intern(&src))))),
+ dummy_spanned(src_interned)))),
];
let include_ident = Ident::from_str("include");
};
match String::from_utf8(bytes) {
Ok(src) => {
+ let interned_src = Symbol::intern(&src);
+
// Add this input file to the code map to make it available as
// dependency information
- cx.codemap().new_filemap_and_lines(&file, &src);
+ cx.codemap().new_filemap(file.into(), src);
- base::MacEager::expr(cx.expr_str(sp, Symbol::intern(&src)))
+ base::MacEager::expr(cx.expr_str(sp, interned_src))
}
Err(_) => {
cx.span_err(sp,
Ok(..) => {
// Add this input file to the code map to make it available as
// dependency information, but don't enter it's contents
- cx.codemap().new_filemap_and_lines(&file, "");
+ cx.codemap().new_filemap(file.into(), "".to_string());
base::MacEager::expr(cx.expr_lit(sp, ast::LitKind::ByteStr(Lrc::new(bytes))))
}
if body.legacy {
let allow_internal_unstable = attr::contains_name(&def.attrs, "allow_internal_unstable");
let allow_internal_unsafe = attr::contains_name(&def.attrs, "allow_internal_unsafe");
+ let mut local_inner_macros = false;
+ if let Some(macro_export) = attr::find_by_name(&def.attrs, "macro_export") {
+ if let Some(l) = macro_export.meta_item_list() {
+ local_inner_macros = attr::list_contains_name(&l, "local_inner_macros");
+ }
+ }
let unstable_feature = attr::find_stability(&sess.span_diagnostic,
&def.attrs, def.span).and_then(|stability| {
def_info: Some((def.id, def.span)),
allow_internal_unstable,
allow_internal_unsafe,
+ local_inner_macros,
unstable_feature,
edition,
}
use rustc_target::spec::abi::Abi;
use ast::{self, NodeId, PatKind, RangeEnd};
use attr;
+use codemap::Spanned;
use edition::{ALL_EDITIONS, Edition};
use syntax_pos::{Span, DUMMY_SP};
use errors::{DiagnosticBuilder, Handler, FatalError};
// Scoped attributes
(active, tool_attributes, "1.25.0", Some(44690), None),
+ // allow irrefutable patterns in if-let and while-let statements (RFC 2086)
+ (active, irrefutable_let_patterns, "1.27.0", Some(44495), None),
+
// Allows use of the :literal macro fragment specifier (RFC 1576)
(active, macro_literal_matcher, "1.27.0", Some(35625), None),
":literal fragment specifier is experimental and subject to change";
pub const EXPLAIN_UNSIZED_TUPLE_COERCION: &'static str =
- "Unsized tuple coercion is not stable enough for use and is subject to change";
+ "unsized tuple coercion is not stable enough for use and is subject to change";
pub const EXPLAIN_MACRO_AT_MOST_ONCE_REP: &'static str =
- "Using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
+ "using the `?` macro Kleene operator for \"at most one\" repetition is unstable";
pub const EXPLAIN_MACROS_IN_EXTERN: &'static str =
- "Macro invocations in `extern {}` blocks are experimental.";
+ "macro invocations in `extern {}` blocks are experimental.";
// mention proc-macros when enabled
pub const EXPLAIN_PROC_MACROS_IN_EXTERN: &'static str =
- "Macro and proc-macro invocations in `extern {}` blocks are experimental.";
+ "macro and proc-macro invocations in `extern {}` blocks are experimental.";
struct PostExpansionVisitor<'a> {
context: &'a Context<'a>,
pattern.span,
"box pattern syntax is experimental");
}
- PatKind::Range(_, _, RangeEnd::Excluded) => {
+ PatKind::Range(_, _, Spanned { node: RangeEnd::Excluded, .. }) => {
gate_feature_post!(&self, exclusive_range_pattern, pattern.span,
"exclusive range pattern syntax is experimental");
}
}
PatKind::Box(inner) => PatKind::Box(folder.fold_pat(inner)),
PatKind::Ref(inner, mutbl) => PatKind::Ref(folder.fold_pat(inner), mutbl),
- PatKind::Range(e1, e2, end) => {
+ PatKind::Range(e1, e2, Spanned { span, node: end }) => {
PatKind::Range(folder.fold_expr(e1),
folder.fold_expr(e2),
- folder.fold_range_end(end))
+ Spanned { span, node: folder.fold_range_end(end) })
},
PatKind::Slice(before, slice, after) => {
PatKind::Slice(before.move_map(|x| folder.fold_pat(x)),
let mut lines: Vec<String> = Vec::new();
// Count the number of chars since the start of the line by rescanning.
- let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos());
+ let mut src_index = rdr.src_index(rdr.filemap.line_begin_pos(rdr.pos));
let end_src_index = rdr.src_index(rdr.pos);
- assert!(src_index <= end_src_index);
+ assert!(src_index <= end_src_index,
+ "src_index={}, end_src_index={}, line_begin_pos={}",
+ src_index, end_src_index, rdr.filemap.line_begin_pos(rdr.pos).to_u32());
let mut n = 0;
while src_index < end_src_index {
let c = char_at(&rdr.src, src_index);
pub ch: Option<char>,
pub filemap: Lrc<syntax_pos::FileMap>,
/// Stop reading src at this index.
- end_src_index: usize,
- /// Whether to record new-lines and multibyte chars in filemap.
- /// This is only necessary the first time a filemap is lexed.
- /// If part of a filemap is being re-lexed, this should be set to false.
- save_new_lines_and_multibyte: bool,
+ pub end_src_index: usize,
// cached:
peek_tok: token::Token,
peek_span: Span,
ch: Some('\n'),
filemap,
end_src_index: src.len(),
- save_new_lines_and_multibyte: true,
// dummy values; not read
peek_tok: token::Eof,
peek_span: syntax_pos::DUMMY_SP,
let mut sr = StringReader::new_raw_internal(sess, begin.fm, None);
// Seek the lexer to the right byte range.
- sr.save_new_lines_and_multibyte = false;
sr.next_pos = span.lo();
sr.end_src_index = sr.src_index(span.hi());
/// Pushes a character to a message string for error reporting
fn push_escaped_char_for_msg(m: &mut String, c: char) {
match c {
- '\u{20}'...'\u{7e}' => {
+ '\u{20}'..='\u{7e}' => {
// Don't escape \, ' or " for user-facing messages
m.push(c);
}
let next_ch = char_at(&self.src, next_src_index);
let next_ch_len = next_ch.len_utf8();
- if self.ch.unwrap() == '\n' {
- if self.save_new_lines_and_multibyte {
- self.filemap.next_line(self.next_pos);
- }
- }
- if next_ch_len > 1 {
- if self.save_new_lines_and_multibyte {
- self.filemap.record_multibyte_char(self.next_pos, next_ch_len);
- }
- }
- self.filemap.record_width(self.next_pos, next_ch);
-
self.ch = Some(next_ch);
self.pos = self.next_pos;
self.next_pos = self.next_pos + Pos::from_usize(next_ch_len);
base = 16;
num_digits = self.scan_digits(16, 16);
}
- '0'...'9' | '_' | '.' | 'e' | 'E' => {
+ '0'..='9' | '_' | '.' | 'e' | 'E' => {
num_digits = self.scan_digits(10, 10) + 1;
}
_ => {
_ => panic!("can only parse `..`/`...`/`..=` for ranges \
(checked above)"),
};
+ let op_span = self.span;
// Parse range
let span = lo.to(self.prev_span);
let begin = self.mk_expr(span, ExprKind::Path(qself, path), ThinVec::new());
self.bump();
let end = self.parse_pat_range_end()?;
- pat = PatKind::Range(begin, end, end_kind);
+ let op = Spanned { span: op_span, node: end_kind };
+ pat = PatKind::Range(begin, end, op);
}
token::OpenDelim(token::Brace) => {
if qself.is_some() {
// Try to parse everything else as literal with optional minus
match self.parse_literal_maybe_minus() {
Ok(begin) => {
- if self.eat(&token::DotDotDot) {
+ let op_span = self.span;
+ if self.check(&token::DotDot) || self.check(&token::DotDotEq) ||
+ self.check(&token::DotDotDot) {
+ let end_kind = if self.eat(&token::DotDotDot) {
+ RangeEnd::Included(RangeSyntax::DotDotDot)
+ } else if self.eat(&token::DotDotEq) {
+ RangeEnd::Included(RangeSyntax::DotDotEq)
+ } else if self.eat(&token::DotDot) {
+ RangeEnd::Excluded
+ } else {
+ panic!("impossible case: we already matched \
+ on a range-operator token")
+ };
let end = self.parse_pat_range_end()?;
- pat = PatKind::Range(begin, end,
- RangeEnd::Included(RangeSyntax::DotDotDot));
- } else if self.eat(&token::DotDotEq) {
- let end = self.parse_pat_range_end()?;
- pat = PatKind::Range(begin, end,
- RangeEnd::Included(RangeSyntax::DotDotEq));
- } else if self.eat(&token::DotDot) {
- let end = self.parse_pat_range_end()?;
- pat = PatKind::Range(begin, end, RangeEnd::Excluded);
+ let op = Spanned { span: op_span, node: end_kind };
+ pat = PatKind::Range(begin, end, op);
} else {
pat = PatKind::Lit(begin);
}
if !allow_range_pat {
match pat.node {
- PatKind::Range(_, _, RangeEnd::Included(RangeSyntax::DotDotDot)) => {}
+ PatKind::Range(
+ _, _, Spanned { node: RangeEnd::Included(RangeSyntax::DotDotDot), .. }
+ ) => {},
PatKind::Range(..) => {
let mut err = self.struct_span_err(
pat.span,
fn err_dotdotdot_syntax(&self, span: Span) {
self.diagnostic().struct_span_err(span, {
- "`...` syntax cannot be used in expressions"
- }).help({
- "Use `..` if you need an exclusive range (a < b)"
- }).help({
- "or `..=` if you need an inclusive range (a <= b)"
- }).emit();
+ "unexpected token: `...`"
+ }).span_suggestion_with_applicability(
+ span, "use `..` for an exclusive range", "..".to_owned(),
+ Applicability::MaybeIncorrect
+ ).span_suggestion_with_applicability(
+ span, "or `..=` for an inclusive range", "..=".to_owned(),
+ Applicability::MaybeIncorrect
+ ).emit();
}
// Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
match self.token {
token::Ident(ident, false) if ident.name == keywords::Underscore.name() => {
self.bump(); // `_`
- Ok(Some(Ident::new(ident.name.gensymed(), ident.span)))
+ Ok(Some(ident.gensym()))
}
_ => self.parse_ident().map(Some),
}
use ast::{Attribute, MacDelimiter, GenericArg};
use util::parser::{self, AssocOp, Fixity};
use attr;
-use codemap::{self, CodeMap};
+use codemap::{self, CodeMap, Spanned};
use syntax_pos::{self, BytePos};
use syntax_pos::hygiene::{Mark, SyntaxContext};
use parse::token::{self, BinOpToken, Token};
self.print_pat(inner)?;
}
PatKind::Lit(ref e) => self.print_expr(&**e)?,
- PatKind::Range(ref begin, ref end, ref end_kind) => {
+ PatKind::Range(ref begin, ref end, Spanned { node: ref end_kind, .. }) => {
self.print_expr(begin)?;
self.s.space()?;
match *end_kind {
format: MacroAttribute(Symbol::intern("std_inject")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
sp.with_ctxt(SyntaxContext::empty().apply_mark(mark))
format: MacroAttribute(Symbol::intern("test")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
let output = Arc::new(Mutex::new(Vec::new()));
let code_map = Lrc::new(CodeMap::new(FilePathMapping::empty()));
- code_map.new_filemap_and_lines(Path::new("test.rs"), &file_text);
+ code_map.new_filemap(Path::new("test.rs").to_owned().into(), file_text.to_owned());
let primary_span = make_span(&file_text, &span_labels[0].start, &span_labels[0].end);
let mut msp = MultiSpan::from_span(primary_span);
if let Start = state {
match c {
- '1'...'9' => {
+ '1'..='9' => {
let end = at_next_cp_while(next, is_digit);
match end.next_cp() {
// Yes, this *is* the parameter.
state = WidthArg;
move_to!(next);
},
- '1' ... '9' => {
+ '1' ..= '9' => {
let end = at_next_cp_while(next, is_digit);
state = Prec;
width = Some(Num::from_str(at.slice_between(end).unwrap(), None));
}
}
},
- '0' ... '9' => {
+ '0' ..= '9' => {
let end = at_next_cp_while(next, is_digit);
state = Length;
precision = Some(Num::from_str(at.slice_between(end).unwrap(), None));
fn is_digit(c: char) -> bool {
match c {
- '0' ... '9' => true,
+ '0' ..= '9' => true,
_ => false
}
}
let start = s.find('$')?;
match s[start+1..].chars().next()? {
'$' => return Some((Substitution::Escape, &s[start+2..])),
- c @ '0' ... '9' => {
+ c @ '0' ..= '9' => {
let n = (c as u8) - b'0';
return Some((Substitution::Ordinal(n), &s[start+2..]));
},
fn is_ident_head(c: char) -> bool {
match c {
- 'a' ... 'z' | 'A' ... 'Z' | '_' => true,
+ 'a' ..= 'z' | 'A' ..= 'Z' | '_' => true,
_ => false
}
}
fn is_ident_tail(c: char) -> bool {
match c {
- '0' ... '9' => true,
+ '0' ..= '9' => true,
c => is_ident_head(c)
}
}
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
def_info: None,
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
format: MacroAttribute(Symbol::intern("proc_macro")),
allow_internal_unstable: true,
allow_internal_unsafe: false,
+ local_inner_macros: false,
edition: hygiene::default_edition(),
});
let span = DUMMY_SP.apply_mark(mark);
arena = { path = "../libarena" }
scoped-tls = { version = "0.1.1", features = ["nightly"] }
unicode-width = "0.1.4"
+cfg-if = "0.1.2"
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+use unicode_width::UnicodeWidthChar;
+use super::*;
+
+/// Find all newlines, multi-byte characters, and non-narrow characters in a
+/// FileMap.
+///
+/// This function will use an SSE2 enhanced implementation if hardware support
+/// is detected at runtime.
+pub fn analyze_filemap(
+ src: &str,
+ filemap_start_pos: BytePos)
+ -> (Vec<BytePos>, Vec<MultiByteChar>, Vec<NonNarrowChar>)
+{
+ let mut lines = vec![filemap_start_pos];
+ let mut multi_byte_chars = vec![];
+ let mut non_narrow_chars = vec![];
+
+ // Calls the right implementation, depending on hardware support available.
+ analyze_filemap_dispatch(src,
+ filemap_start_pos,
+ &mut lines,
+ &mut multi_byte_chars,
+ &mut non_narrow_chars);
+
+ // The code above optimistically registers a new line *after* each \n
+ // it encounters. If that point is already outside the filemap, remove
+ // it again.
+ if let Some(&last_line_start) = lines.last() {
+ let file_map_end = filemap_start_pos + BytePos::from_usize(src.len());
+ assert!(file_map_end >= last_line_start);
+ if last_line_start == file_map_end {
+ lines.pop();
+ }
+ }
+
+ (lines, multi_byte_chars, non_narrow_chars)
+}
+
+cfg_if! {
+ if #[cfg(all(any(target_arch = "x86", target_arch = "x86_64"),
+ not(stage0)))] {
+ fn analyze_filemap_dispatch(src: &str,
+ filemap_start_pos: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>) {
+ if is_x86_feature_detected!("sse2") {
+ unsafe {
+ analyze_filemap_sse2(src,
+ filemap_start_pos,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+ }
+ } else {
+ analyze_filemap_generic(src,
+ src.len(),
+ filemap_start_pos,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+
+ }
+ }
+
+ /// Check 16 byte chunks of text at a time. If the chunk contains
+ /// something other than printable ASCII characters and newlines, the
+ /// function falls back to the generic implementation. Otherwise it uses
+ /// SSE2 intrinsics to quickly find all newlines.
+ #[target_feature(enable = "sse2")]
+ unsafe fn analyze_filemap_sse2(src: &str,
+ output_offset: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>) {
+ #[cfg(target_arch = "x86")]
+ use std::arch::x86::*;
+ #[cfg(target_arch = "x86_64")]
+ use std::arch::x86_64::*;
+
+ const CHUNK_SIZE: usize = 16;
+
+ let src_bytes = src.as_bytes();
+
+ let chunk_count = src.len() / CHUNK_SIZE;
+
+ // This variable keeps track of where we should start decoding a
+ // chunk. If a multi-byte character spans across chunk boundaries,
+ // we need to skip that part in the next chunk because we already
+ // handled it.
+ let mut intra_chunk_offset = 0;
+
+ for chunk_index in 0 .. chunk_count {
+ let ptr = src_bytes.as_ptr() as *const __m128i;
+ // We don't know if the pointer is aligned to 16 bytes, so we
+ // use `loadu`, which supports unaligned loading.
+ let chunk = _mm_loadu_si128(ptr.offset(chunk_index as isize));
+
+ // For character in the chunk, see if its byte value is < 0, which
+ // indicates that it's part of a UTF-8 char.
+ let multibyte_test = _mm_cmplt_epi8(chunk, _mm_set1_epi8(0));
+ // Create a bit mask from the comparison results.
+ let multibyte_mask = _mm_movemask_epi8(multibyte_test);
+
+ // If the bit mask is all zero, we only have ASCII chars here:
+ if multibyte_mask == 0 {
+ assert!(intra_chunk_offset == 0);
+
+ // Check if there are any control characters in the chunk. All
+ // control characters that we can encounter at this point have a
+ // byte value less than 32 or ...
+ let control_char_test0 = _mm_cmplt_epi8(chunk, _mm_set1_epi8(32));
+ let control_char_mask0 = _mm_movemask_epi8(control_char_test0);
+
+ // ... it's the ASCII 'DEL' character with a value of 127.
+ let control_char_test1 = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(127));
+ let control_char_mask1 = _mm_movemask_epi8(control_char_test1);
+
+ let control_char_mask = control_char_mask0 | control_char_mask1;
+
+ if control_char_mask != 0 {
+ // Check for newlines in the chunk
+ let newlines_test = _mm_cmpeq_epi8(chunk, _mm_set1_epi8(b'\n' as i8));
+ let newlines_mask = _mm_movemask_epi8(newlines_test);
+
+ if control_char_mask == newlines_mask {
+ // All control characters are newlines, record them
+ let mut newlines_mask = 0xFFFF0000 | newlines_mask as u32;
+ let output_offset = output_offset +
+ BytePos::from_usize(chunk_index * CHUNK_SIZE + 1);
+
+ loop {
+ let index = newlines_mask.trailing_zeros();
+
+ if index >= CHUNK_SIZE as u32 {
+ // We have arrived at the end of the chunk.
+ break
+ }
+
+ lines.push(BytePos(index) + output_offset);
+
+ // Clear the bit, so we can find the next one.
+ newlines_mask &= (!1) << index;
+ }
+
+ // We are done for this chunk. All control characters were
+ // newlines and we took care of those.
+ continue
+ } else {
+ // Some of the control characters are not newlines,
+ // fall through to the slow path below.
+ }
+ } else {
+ // No control characters, nothing to record for this chunk
+ continue
+ }
+ }
+
+ // The slow path.
+ // There are control chars in here, fallback to generic decoding.
+ let scan_start = chunk_index * CHUNK_SIZE + intra_chunk_offset;
+ intra_chunk_offset = analyze_filemap_generic(
+ &src[scan_start .. ],
+ CHUNK_SIZE - intra_chunk_offset,
+ BytePos::from_usize(scan_start) + output_offset,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars
+ );
+ }
+
+ // There might still be a tail left to analyze
+ let tail_start = chunk_count * CHUNK_SIZE + intra_chunk_offset;
+ if tail_start < src.len() {
+ analyze_filemap_generic(&src[tail_start as usize ..],
+ src.len() - tail_start,
+ output_offset + BytePos::from_usize(tail_start),
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+ }
+ }
+ } else {
+
+ // The target (or compiler version) does not support SSE2 ...
+ fn analyze_filemap_dispatch(src: &str,
+ filemap_start_pos: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>) {
+ analyze_filemap_generic(src,
+ src.len(),
+ filemap_start_pos,
+ lines,
+ multi_byte_chars,
+ non_narrow_chars);
+ }
+ }
+}
+
+// `scan_len` determines the number of bytes in `src` to scan. Note that the
+// function can read past `scan_len` if a multi-byte character start within the
+// range but extends past it. The overflow is returned by the function.
+fn analyze_filemap_generic(src: &str,
+ scan_len: usize,
+ output_offset: BytePos,
+ lines: &mut Vec<BytePos>,
+ multi_byte_chars: &mut Vec<MultiByteChar>,
+ non_narrow_chars: &mut Vec<NonNarrowChar>)
+ -> usize
+{
+ assert!(src.len() >= scan_len);
+ let mut i = 0;
+ let src_bytes = src.as_bytes();
+
+ while i < scan_len {
+ let byte = unsafe {
+ // We verified that i < scan_len <= src.len()
+ *src_bytes.get_unchecked(i as usize)
+ };
+
+ // How much to advance in order to get to the next UTF-8 char in the
+ // string.
+ let mut char_len = 1;
+
+ if byte < 32 {
+ // This is an ASCII control character, it could be one of the cases
+ // that are interesting to us.
+
+ let pos = BytePos::from_usize(i) + output_offset;
+
+ match byte {
+ b'\n' => {
+ lines.push(pos + BytePos(1));
+ }
+ b'\t' => {
+ non_narrow_chars.push(NonNarrowChar::Tab(pos));
+ }
+ _ => {
+ non_narrow_chars.push(NonNarrowChar::ZeroWidth(pos));
+ }
+ }
+ } else if byte >= 127 {
+ // The slow path:
+ // This is either ASCII control character "DEL" or the beginning of
+ // a multibyte char. Just decode to `char`.
+ let c = (&src[i..]).chars().next().unwrap();
+ char_len = c.len_utf8();
+
+ let pos = BytePos::from_usize(i) + output_offset;
+
+ if char_len > 1 {
+ assert!(char_len >=2 && char_len <= 4);
+ let mbc = MultiByteChar {
+ pos,
+ bytes: char_len as u8,
+ };
+ multi_byte_chars.push(mbc);
+ }
+
+ // Assume control characters are zero width.
+ // FIXME: How can we decide between `width` and `width_cjk`?
+ let char_width = UnicodeWidthChar::width(c).unwrap_or(0);
+
+ if char_width != 1 {
+ non_narrow_chars.push(NonNarrowChar::new(pos, char_width));
+ }
+ }
+
+ i += char_len;
+ }
+
+ i - scan_len
+}
+
+
+
+macro_rules! test {
+ (case: $test_name:ident,
+ text: $text:expr,
+ filemap_start_pos: $filemap_start_pos:expr,
+ lines: $lines:expr,
+ multi_byte_chars: $multi_byte_chars:expr,
+ non_narrow_chars: $non_narrow_chars:expr,) => (
+
+ #[test]
+ fn $test_name() {
+
+ let (lines, multi_byte_chars, non_narrow_chars) =
+ analyze_filemap($text, BytePos($filemap_start_pos));
+
+ let expected_lines: Vec<BytePos> = $lines
+ .into_iter()
+ .map(|pos| BytePos(pos))
+ .collect();
+
+ assert_eq!(lines, expected_lines);
+
+ let expected_mbcs: Vec<MultiByteChar> = $multi_byte_chars
+ .into_iter()
+ .map(|(pos, bytes)| MultiByteChar {
+ pos: BytePos(pos),
+ bytes,
+ })
+ .collect();
+
+ assert_eq!(multi_byte_chars, expected_mbcs);
+
+ let expected_nncs: Vec<NonNarrowChar> = $non_narrow_chars
+ .into_iter()
+ .map(|(pos, width)| {
+ NonNarrowChar::new(BytePos(pos), width)
+ })
+ .collect();
+
+ assert_eq!(non_narrow_chars, expected_nncs);
+ })
+}
+
+test!(
+ case: empty_text,
+ text: "",
+ filemap_start_pos: 0,
+ lines: vec![],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newlines_short,
+ text: "a\nc",
+ filemap_start_pos: 0,
+ lines: vec![0, 2],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newlines_long,
+ text: "012345678\nabcdef012345678\na",
+ filemap_start_pos: 0,
+ lines: vec![0, 10, 26],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newline_and_multi_byte_char_in_same_chunk,
+ text: "01234β789\nbcdef0123456789abcdef",
+ filemap_start_pos: 0,
+ lines: vec![0, 11],
+ multi_byte_chars: vec![(5, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: newline_and_control_char_in_same_chunk,
+ text: "01234\u{07}6789\nbcdef0123456789abcdef",
+ filemap_start_pos: 0,
+ lines: vec![0, 11],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![(5, 0)],
+);
+
+test!(
+ case: multi_byte_char_short,
+ text: "aβc",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(1, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_long,
+ text: "0123456789abcΔf012345β",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(13, 2), (22, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary,
+ text: "0123456789abcdeΔ123456789abcdef01234",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(15, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: multi_byte_char_across_chunk_boundary_tail,
+ text: "0123456789abcdeΔ....",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![(15, 2)],
+ non_narrow_chars: vec![],
+);
+
+test!(
+ case: non_narrow_short,
+ text: "0\t2",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![(1, 4)],
+);
+
+test!(
+ case: non_narrow_long,
+ text: "01\t3456789abcdef01234567\u{07}9",
+ filemap_start_pos: 0,
+ lines: vec![0],
+ multi_byte_chars: vec![],
+ non_narrow_chars: vec![(2, 4), (24, 0)],
+);
+
+test!(
+ case: output_offset_all,
+ text: "01\t345\n789abcΔf01234567\u{07}9\nbcΔf",
+ filemap_start_pos: 1000,
+ lines: vec![0 + 1000, 7 + 1000, 27 + 1000],
+ multi_byte_chars: vec![(13 + 1000, 2), (29 + 1000, 2)],
+ non_narrow_chars: vec![(2 + 1000, 4), (24 + 1000, 0)],
+);
use GLOBALS;
use Span;
use edition::Edition;
-use symbol::{Ident, Symbol};
+use symbol::Symbol;
use serialize::{Encodable, Decodable, Encoder, Decoder};
use std::collections::HashMap;
marks: Vec<MarkData>,
syntax_contexts: Vec<SyntaxContextData>,
markings: HashMap<(SyntaxContext, Mark), SyntaxContext>,
- gensym_to_ctxt: HashMap<Symbol, Span>,
default_edition: Edition,
}
modern: SyntaxContext(0),
}],
markings: HashMap::new(),
- gensym_to_ctxt: HashMap::new(),
default_edition: Edition::Edition2015,
}
}
/// Whether the macro is allowed to use `unsafe` internally
/// even if the user crate has `#![forbid(unsafe_code)]`.
pub allow_internal_unsafe: bool,
+ /// Enables the macro helper hack (`ident!(...)` -> `$crate::ident!(...)`)
+ /// for a given macro.
+ pub local_inner_macros: bool,
/// Edition of the crate in which the macro is defined.
pub edition: Edition,
}
CompilerDesugaringKind::DotFill => "...",
CompilerDesugaringKind::QuestionMark => "?",
CompilerDesugaringKind::Catch => "do catch",
- CompilerDesugaringKind::ExistentialReturnType => "existental type",
+ CompilerDesugaringKind::ExistentialReturnType => "existential type",
})
}
}
Ok(SyntaxContext::empty()) // FIXME(jseyfried) intercrate hygiene
}
}
-
-impl Symbol {
- pub fn from_ident(ident: Ident) -> Symbol {
- HygieneData::with(|data| {
- let gensym = ident.name.gensymed();
- data.gensym_to_ctxt.insert(gensym, ident.span);
- gensym
- })
- }
-
- pub fn to_ident(self) -> Ident {
- HygieneData::with(|data| {
- match data.gensym_to_ctxt.get(&self) {
- Some(&span) => Ident::new(self.interned(), span),
- None => Ident::with_empty_ctxt(self),
- }
- })
- }
-}
#![feature(optin_builtin_traits)]
#![allow(unused_attributes)]
#![feature(specialization)]
+#![feature(stdsimd)]
use std::borrow::Cow;
use std::cell::Cell;
extern crate serialize;
extern crate serialize as rustc_serialize; // used by deriving
+#[macro_use]
+extern crate cfg_if;
+
extern crate unicode_width;
pub mod edition;
pub mod symbol;
+mod analyze_filemap;
+
pub struct Globals {
symbol_interner: Lock<symbol::Interner>,
span_interner: Lock<span_encoding::SpanInterner>,
pub const NO_EXPANSION: SyntaxContext = SyntaxContext::empty();
/// Identifies an offset of a multi-byte character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
pub struct MultiByteChar {
/// The absolute offset of the character in the CodeMap
pub pos: BytePos,
/// The number of bytes, >=2
- pub bytes: usize,
+ pub bytes: u8,
}
/// Identifies an offset of a non-narrow character in a FileMap
-#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq)]
+#[derive(Copy, Clone, RustcEncodable, RustcDecodable, Eq, PartialEq, Debug)]
pub enum NonNarrowChar {
/// Represents a zero-width character
ZeroWidth(BytePos),
/// The end position of this source in the CodeMap
pub end_pos: BytePos,
/// Locations of lines beginnings in the source code
- pub lines: Lock<Vec<BytePos>>,
+ pub lines: Vec<BytePos>,
/// Locations of multi-byte characters in the source code
- pub multibyte_chars: Lock<Vec<MultiByteChar>>,
+ pub multibyte_chars: Vec<MultiByteChar>,
/// Width of characters that are not narrow in the source code
- pub non_narrow_chars: Lock<Vec<NonNarrowChar>>,
+ pub non_narrow_chars: Vec<NonNarrowChar>,
/// A hash of the filename, used for speeding up the incr. comp. hashing.
pub name_hash: u128,
}
s.emit_struct_field("start_pos", 4, |s| self.start_pos.encode(s))?;
s.emit_struct_field("end_pos", 5, |s| self.end_pos.encode(s))?;
s.emit_struct_field("lines", 6, |s| {
- let lines = self.lines.borrow();
+ let lines = &self.lines[..];
// store the length
s.emit_u32(lines.len() as u32)?;
};
let bytes_per_diff: u8 = match max_line_length {
- 0 ... 0xFF => 1,
- 0x100 ... 0xFFFF => 2,
+ 0 ..= 0xFF => 1,
+ 0x100 ..= 0xFFFF => 2,
_ => 4
};
Ok(())
})?;
s.emit_struct_field("multibyte_chars", 7, |s| {
- (*self.multibyte_chars.borrow()).encode(s)
+ self.multibyte_chars.encode(s)
})?;
s.emit_struct_field("non_narrow_chars", 8, |s| {
- (*self.non_narrow_chars.borrow()).encode(s)
+ self.non_narrow_chars.encode(s)
})?;
s.emit_struct_field("name_hash", 9, |s| {
self.name_hash.encode(s)
src: None,
src_hash,
external_src: Lock::new(ExternalSource::AbsentOk),
- lines: Lock::new(lines),
- multibyte_chars: Lock::new(multibyte_chars),
- non_narrow_chars: Lock::new(non_narrow_chars),
+ lines,
+ multibyte_chars,
+ non_narrow_chars,
name_hash,
})
})
};
let end_pos = start_pos.to_usize() + src.len();
+ let (lines, multibyte_chars, non_narrow_chars) =
+ analyze_filemap::analyze_filemap(&src[..], start_pos);
+
FileMap {
name,
name_was_remapped,
external_src: Lock::new(ExternalSource::Unneeded),
start_pos,
end_pos: Pos::from_usize(end_pos),
- lines: Lock::new(Vec::new()),
- multibyte_chars: Lock::new(Vec::new()),
- non_narrow_chars: Lock::new(Vec::new()),
+ lines,
+ multibyte_chars,
+ non_narrow_chars,
name_hash,
}
}
- /// EFFECT: register a start-of-line offset in the
- /// table of line-beginnings.
- /// UNCHECKED INVARIANT: these offsets must be added in the right
- /// order and must be in the right places; there is shared knowledge
- /// about what ends a line between this file and parse.rs
- /// WARNING: pos param here is the offset relative to start of CodeMap,
- /// and CodeMap will append a newline when adding a filemap without a newline at the end,
- /// so the safe way to call this is with value calculated as
- /// filemap.start_pos + newline_offset_relative_to_the_start_of_filemap.
- pub fn next_line(&self, pos: BytePos) {
- // the new charpos must be > the last one (or it's the first one).
- let mut lines = self.lines.borrow_mut();
- let line_len = lines.len();
- assert!(line_len == 0 || ((*lines)[line_len - 1] < pos));
- lines.push(pos);
- }
-
/// Return the BytePos of the beginning of the current line.
- pub fn line_begin_pos(&self) -> BytePos {
- let lines = self.lines.borrow();
- match lines.last() {
- Some(&line_pos) => line_pos,
- None => self.start_pos,
- }
+ pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
+ let line_index = self.lookup_line(pos).unwrap();
+ self.lines[line_index]
}
/// Add externally loaded source.
}
let begin = {
- let lines = self.lines.borrow();
- let line = if let Some(line) = lines.get(line_number) {
+ let line = if let Some(line) = self.lines.get(line_number) {
line
} else {
return None;
}
}
- pub fn record_multibyte_char(&self, pos: BytePos, bytes: usize) {
- assert!(bytes >=2 && bytes <= 4);
- let mbc = MultiByteChar {
- pos,
- bytes,
- };
- self.multibyte_chars.borrow_mut().push(mbc);
- }
-
- #[inline]
- pub fn record_width(&self, pos: BytePos, ch: char) {
- let width = match ch {
- '\t' =>
- // Tabs will consume 4 columns.
- 4,
- '\n' =>
- // Make newlines take one column so that displayed spans can point them.
- 1,
- ch =>
- // Assume control characters are zero width.
- // FIXME: How can we decide between `width` and `width_cjk`?
- unicode_width::UnicodeWidthChar::width(ch).unwrap_or(0),
- };
- // Only record non-narrow characters.
- if width != 1 {
- self.non_narrow_chars.borrow_mut().push(NonNarrowChar::new(pos, width));
- }
- }
-
pub fn is_real_file(&self) -> bool {
self.name.is_real()
}
self.end_pos.0 - self.start_pos.0
}
pub fn count_lines(&self) -> usize {
- self.lines.borrow().len()
+ self.lines.len()
}
/// Find the line containing the given position. The return value is the
/// number. If the filemap is empty or the position is located before the
/// first line, None is returned.
pub fn lookup_line(&self, pos: BytePos) -> Option<usize> {
- let lines = self.lines.borrow();
- if lines.len() == 0 {
+ if self.lines.len() == 0 {
return None;
}
- let line_index = lookup_line(&lines[..], pos);
- assert!(line_index < lines.len() as isize);
+ let line_index = lookup_line(&self.lines[..], pos);
+ assert!(line_index < self.lines.len() as isize);
if line_index >= 0 {
Some(line_index as usize)
} else {
return (self.start_pos, self.end_pos);
}
- let lines = self.lines.borrow();
- assert!(line_index < lines.len());
- if line_index == (lines.len() - 1) {
- (lines[line_index], self.end_pos)
+ assert!(line_index < self.lines.len());
+ if line_index == (self.lines.len() - 1) {
+ (self.lines[line_index], self.end_pos)
} else {
- (lines[line_index], lines[line_index + 1])
+ (self.lines[line_index], self.lines[line_index + 1])
}
}
pub trait Pos {
fn from_usize(n: usize) -> Self;
fn to_usize(&self) -> usize;
+ fn from_u32(n: u32) -> Self;
+ fn to_u32(&self) -> u32;
}
/// A byte offset. Keep this small (currently 32-bits), as AST contains
fn from_usize(n: usize) -> BytePos { BytePos(n as u32) }
#[inline(always)]
- fn to_usize(&self) -> usize { let BytePos(n) = *self; n as usize }
+ fn to_usize(&self) -> usize { self.0 as usize }
+
+ #[inline(always)]
+ fn from_u32(n: u32) -> BytePos { BytePos(n) }
+
+ #[inline(always)]
+ fn to_u32(&self) -> u32 { self.0 }
}
impl Add for BytePos {
fn from_usize(n: usize) -> CharPos { CharPos(n) }
#[inline(always)]
- fn to_usize(&self) -> usize { let CharPos(n) = *self; n }
+ fn to_usize(&self) -> usize { self.0 }
+
+ #[inline(always)]
+ fn from_u32(n: u32) -> CharPos { CharPos(n as usize) }
+
+ #[inline(always)]
+ fn to_u32(&self) -> u32 { self.0 as u32}
}
impl Add for CharPos {
Ident::new(Symbol::intern(self.as_str().trim_left_matches('\'')), self.span)
}
+ /// "Normalize" ident for use in comparisons using "item hygiene".
+ /// Identifiers with same string value become same if they came from the same "modern" macro
+ /// (e.g. `macro` item, but not `macro_rules` item) and stay different if they came from
+ /// different "modern" macros.
+ /// Technically, this operation strips all non-opaque marks from ident's syntactic context.
pub fn modern(self) -> Ident {
Ident::new(self.name, self.span.modern())
}
pub fn as_str(self) -> LocalInternedString {
self.name.as_str()
}
+
+ pub fn as_interned_str(self) -> InternedString {
+ self.name.as_interned_str()
+ }
}
impl PartialEq for Ident {
return Err("stack is empty".to_string());
}
}
- ':' | '#' | ' ' | '.' | '0'...'9' => {
+ ':' | '#' | ' ' | '.' | '0'..='9' => {
let mut flags = Flags::new();
let mut fstate = FormatStateFlags;
match cur {
'#' => flags.alternate = true,
' ' => flags.space = true,
'.' => fstate = FormatStatePrecision,
- '0'...'9' => {
+ '0'..='9' => {
flags.width = cur as usize - '0' as usize;
fstate = FormatStateWidth;
}
(FormatStateFlags, ' ') => {
flags.space = true;
}
- (FormatStateFlags, '0'...'9') => {
+ (FormatStateFlags, '0'..='9') => {
flags.width = cur as usize - '0' as usize;
*fstate = FormatStateWidth;
}
(FormatStateFlags, '.') => {
*fstate = FormatStatePrecision;
}
- (FormatStateWidth, '0'...'9') => {
+ (FormatStateWidth, '0'..='9') => {
let old = flags.width;
flags.width = flags.width * 10 + (cur as usize - '0' as usize);
if flags.width < old {
(FormatStateWidth, '.') => {
*fstate = FormatStatePrecision;
}
- (FormatStatePrecision, '0'...'9') => {
+ (FormatStatePrecision, '0'..='9') => {
let old = flags.precision;
flags.precision = flags.precision * 10 + (cur as usize - '0' as usize);
if flags.precision < old {
#[link(name = "rust_test_helpers", kind = "static")]
extern {
#[no_output]
- //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
fn some_definitely_unknown_symbol_which_should_be_removed();
#[nop_attr]
- //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
fn rust_get_test_int() -> isize;
emit_input!(fn rust_dbg_extern_identity_u32(arg: u32) -> u32;);
- //~^ ERROR Macro and proc-macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro and proc-macro invocations in `extern {}` blocks are experimental.
}
let _: <<A>::B>::C; //~ ERROR cannot find type `A` in this scope
let _ = <<A>::B>::C; //~ ERROR cannot find type `A` in this scope
let <<A>::B>::C; //~ ERROR cannot find type `A` in this scope
- let 0 ... <<A>::B>::C; //~ ERROR cannot find type `A` in this scope
+ let 0 ..= <<A>::B>::C; //~ ERROR cannot find type `A` in this scope
//~^ ERROR only char and numeric types are allowed in range patterns
<<A>::B>::C; //~ ERROR cannot find type `A` in this scope
}
#![deny(const_err)]
pub const A: i8 = -std::i8::MIN; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
-//~| ERROR this expression will panic at runtime
pub const B: u8 = 200u8 + 200u8; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
pub const C: u8 = 200u8 * 4; //~ ERROR const_err
-//~^ ERROR this constant cannot be used
pub const D: u8 = 42u8 - (42u8 + 1); //~ ERROR const_err
-//~^ ERROR this constant cannot be used
pub const E: u8 = [5u8][1]; //~ ERROR const_err
-//~| ERROR this constant cannot be used
fn main() {
let _a = A;
#![deny(const_err)]
pub const A: i8 = -std::i8::MIN;
-//~^ ERROR E0080
-//~| ERROR attempt to negate with overflow
-//~| ERROR this expression will panic at runtime
-//~| ERROR this constant cannot be used
+//~^ ERROR this constant cannot be used
pub const B: i8 = A;
//~^ ERROR const_err
//~| ERROR const_err
// Make sure that the two uses get two errors.
const FOO: u8 = [5u8][1];
//~^ ERROR constant evaluation error
-//~| ERROR constant evaluation error
//~| index out of bounds: the len is 1 but the index is 1
fn main() {
black_box((FOO, FOO));
+ //~^ ERROR referenced constant has errors
+ //~| ERROR could not evaluate constant
}
//~^ ERROR this constant cannot be used
(
i8::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_I16: (i16,) =
//~^ ERROR this constant cannot be used
(
i16::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_I32: (i32,) =
//~^ ERROR this constant cannot be used
(
i32::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_I64: (i64,) =
//~^ ERROR this constant cannot be used
(
i64::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U8: (u8,) =
//~^ ERROR this constant cannot be used
(
u8::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U16: (u16,) = (
//~^ ERROR this constant cannot be used
u16::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U32: (u32,) = (
//~^ ERROR this constant cannot be used
u32::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
const VALS_U64: (u64,) =
//~^ ERROR this constant cannot be used
(
u64::MIN - 1,
- //~^ ERROR attempt to subtract with overflow
);
fn main() {
//~^ ERROR this constant cannot be used
(
i8::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_I16: (i16,) =
//~^ ERROR this constant cannot be used
(
i16::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_I32: (i32,) =
//~^ ERROR this constant cannot be used
(
i32::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_I64: (i64,) =
//~^ ERROR this constant cannot be used
(
i64::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U8: (u8,) =
//~^ ERROR this constant cannot be used
(
u8::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U16: (u16,) = (
//~^ ERROR this constant cannot be used
u16::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U32: (u32,) = (
//~^ ERROR this constant cannot be used
u32::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
const VALS_U64: (u64,) =
//~^ ERROR this constant cannot be used
(
u64::MAX + 1,
- //~^ ERROR attempt to add with overflow
);
fn main() {
//~^ ERROR this constant cannot be used
(
i8::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_I16: (i16,) =
//~^ ERROR this constant cannot be used
(
i16::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_I32: (i32,) =
//~^ ERROR this constant cannot be used
(
i32::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_I64: (i64,) =
//~^ ERROR this constant cannot be used
(
i64::MIN * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U8: (u8,) =
//~^ ERROR this constant cannot be used
(
u8::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U16: (u16,) = (
//~^ ERROR this constant cannot be used
u16::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U32: (u32,) = (
//~^ ERROR this constant cannot be used
u32::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
const VALS_U64: (u64,) =
//~^ ERROR this constant cannot be used
(
u64::MAX * 2,
- //~^ ERROR attempt to multiply with overflow
);
fn main() {
pub fn main() {
let x: Box<Trait> = Box::new(Foo);
let _y: &Trait = x; //~ ERROR E0308
- //~| expected type `&Trait`
- //~| found type `std::boxed::Box<Trait>`
+ //~| expected type `&dyn Trait`
+ //~| found type `std::boxed::Box<dyn Trait>`
}
let &&x = &&(&1isize as &T);
// n == m
- let &x = &1isize as &T; //~ ERROR type `&T` cannot be dereferenced
- let &&x = &(&1isize as &T); //~ ERROR type `&T` cannot be dereferenced
- let box x = box 1isize as Box<T>; //~ ERROR type `std::boxed::Box<T>` cannot be dereferenced
+ let &x = &1isize as &T; //~ ERROR type `&dyn T` cannot be dereferenced
+ let &&x = &(&1isize as &T); //~ ERROR type `&dyn T` cannot be dereferenced
+ let box x = box 1isize as Box<T>; //~ ERROR type `std::boxed::Box<dyn T>` cannot be dereferenced
// n > m
let &&x = &1isize as &T;
//~^ ERROR mismatched types
- //~| expected type `T`
+ //~| expected type `dyn T`
//~| found type `&_`
//~| expected trait T, found reference
let &&&x = &(&1isize as &T);
//~^ ERROR mismatched types
- //~| expected type `T`
+ //~| expected type `dyn T`
//~| found type `&_`
//~| expected trait T, found reference
let box box x = box 1isize as Box<T>;
//~^ ERROR mismatched types
- //~| expected type `T`
+ //~| expected type `dyn T`
//~| found type `std::boxed::Box<_>`
}
let z: Box<ToBar> = Box::new(Bar1 {f: 36});
f5.2 = Bar1 {f: 36};
//~^ ERROR mismatched types
- //~| expected type `ToBar`
+ //~| expected type `dyn ToBar`
//~| found type `Bar1`
//~| expected trait ToBar, found struct `Bar1`
//~| ERROR the size for value values of type
let z: Box<ToBar> = Box::new(Bar1 {f: 36});
f5.ptr = Bar1 {f: 36};
//~^ ERROR mismatched types
- //~| expected type `ToBar`
+ //~| expected type `dyn ToBar`
//~| found type `Bar1`
//~| expected trait ToBar, found struct `Bar1`
//~| ERROR the size for value values of type
// except according to those terms.
// ignore-tidy-linelength
-// compile-flags: --edition=2015 -Zunstable-options
+// edition:2015
// tests that editions work with the tyvar warning-turned-error
// except according to those terms.
// ignore-tidy-linelength
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
// tests that editions work with the tyvar warning-turned-error
--- /dev/null
+// gate-test-irrefutable_let_patterns
+
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[allow(irrefutable_let_patterns)]
+fn main() {
+ if let _ = 5 {}
+ //~^ ERROR 15:12: 15:13: irrefutable if-let pattern [E0162]
+}
let _: () = (box |_: isize| {}) as Box<FnOnce(isize)>;
//~^ ERROR mismatched types
//~| expected type `()`
- //~| found type `std::boxed::Box<std::ops::FnOnce(isize)>`
+ //~| found type `std::boxed::Box<dyn std::ops::FnOnce(isize)>`
let _: () = (box |_: isize, isize| {}) as Box<Fn(isize, isize)>;
//~^ ERROR mismatched types
//~| expected type `()`
- //~| found type `std::boxed::Box<std::ops::Fn(isize, isize)>`
+ //~| found type `std::boxed::Box<dyn std::ops::Fn(isize, isize)>`
let _: () = (box || -> isize { unimplemented!() }) as Box<FnMut() -> isize>;
//~^ ERROR mismatched types
//~| expected type `()`
- //~| found type `std::boxed::Box<std::ops::FnMut() -> isize>`
+ //~| found type `std::boxed::Box<dyn std::ops::FnMut() -> isize>`
needs_fn(1);
//~^ ERROR : std::ops::Fn<(isize,)>`
impl Foo for Baz {
fn bar(&mut self, other: &Foo) {}
//~^ ERROR method `bar` has an incompatible type for trait
- //~| expected type `fn(&mut Baz, &mut Foo)`
- //~| found type `fn(&mut Baz, &Foo)`
+ //~| expected type `fn(&mut Baz, &mut dyn Foo)`
+ //~| found type `fn(&mut Baz, &dyn Foo)`
}
fn main() {}
trait Foo {}
impl<'a> Foo for Foo+'a {}
-//~^ ERROR the object type `Foo + 'a` automatically implements the trait `Foo`
+//~^ ERROR the object type `(dyn Foo + 'a)` automatically implements the trait `Foo`
fn main() {}
let index = 6;
match i {
- 0...index => println!("winner"),
+ 0..=index => println!("winner"),
//~^ ERROR runtime values cannot be referenced in patterns
_ => println!("hello"),
}
fn main() {
size_of_copy::<Misc+Copy>();
//~^ ERROR only auto traits can be used as additional traits in a trait object
- //~| ERROR the trait bound `Misc: std::marker::Copy` is not satisfied
+ //~| ERROR the trait bound `dyn Misc: std::marker::Copy` is not satisfied
}
fn main() {
let t : &Trait = &get_function()();
- //~^ ERROR cannot move a value of type Trait + 'static
+ //~^ ERROR cannot move a value of type (dyn Trait + 'static)
}
//~| WARNING hard error
//~| ERROR floating-point types cannot be used in patterns
//~| WARNING hard error
- 39.0 ... 70.0 => {}, //~ ERROR floating-point types cannot be used in patterns
+ 39.0 ..= 70.0 => {}, //~ ERROR floating-point types cannot be used in patterns
//~| WARNING hard error
//~| ERROR floating-point types cannot be used in patterns
//~| WARNING hard error
#![feature(fn_traits)]
trait CallSingle<A, B> {
- fn call(&self, a: A) -> B where Self: Fn(A) -> B;
+ fn call(&self, a: A) -> B where Self: Sized, Self: Fn(A) -> B;
}
impl<A, B, F: Fn(A) -> B> CallSingle<A, B> for F {
fn main() {
(&5isize as &Foo).foo();
- //~^ ERROR: no method named `foo` found for type `&Foo` in the current scope
+ //~^ ERROR: no method named `foo` found for type `&dyn Foo` in the current scope
}
fn object_ref_with_static_bound_not_ok() {
assert_send::<&'static (Dummy+'static)>();
- //~^ ERROR `Dummy + 'static` cannot be shared between threads safely [E0277]
+ //~^ ERROR `(dyn Dummy + 'static)` cannot be shared between threads safely [E0277]
}
fn box_object_with_no_bound_not_ok<'a>() {
assert_send::<Box<Dummy>>();
- //~^ ERROR `Dummy` cannot be sent between threads safely
+ //~^ ERROR `dyn Dummy` cannot be sent between threads safely
}
fn object_with_send_bound_ok() {
// careful with object types, who knows what they close over...
fn test51<'a>() {
assert_send::<&'a Dummy>();
- //~^ ERROR `Dummy + 'a` cannot be shared between threads safely [E0277]
+ //~^ ERROR `(dyn Dummy + 'a)` cannot be shared between threads safely [E0277]
}
fn test52<'a>() {
assert_send::<&'a (Dummy+Sync)>();
// them not ok
fn test_71<'a>() {
assert_send::<Box<Dummy+'a>>();
- //~^ ERROR `Dummy + 'a` cannot be sent between threads safely
+ //~^ ERROR `(dyn Dummy + 'a)` cannot be sent between threads safely
}
fn main() { }
fn test50() {
assert_send::<&'static Dummy>();
- //~^ ERROR `Dummy + 'static` cannot be shared between threads safely [E0277]
+ //~^ ERROR `(dyn Dummy + 'static)` cannot be shared between threads safely [E0277]
}
fn test53() {
assert_send::<Box<Dummy>>();
- //~^ ERROR `Dummy` cannot be sent between threads safely
+ //~^ ERROR `dyn Dummy` cannot be sent between threads safely
}
// ...unless they are properly bounded
#[link(name = "rust_test_helpers", kind = "static")]
extern {
returns_isize!(rust_get_test_int);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
emits_nothing!();
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
}
let x: Box<HashMap<isize, isize>> = box HashMap::new();
let x: Box<Map<isize, isize>> = x;
let y: Box<Map<usize, isize>> = Box::new(x);
- //~^ ERROR `std::boxed::Box<Map<isize, isize>>: Map<usize, isize>` is not satisfied
+ //~^ ERROR `std::boxed::Box<dyn Map<isize, isize>>: Map<usize, isize>` is not satisfied
}
fn main() {
match 5 {
- 6 ... 1 => { }
+ 6 ..= 1 => { }
_ => { }
};
//~^^^ ERROR lower range bound must be less than or equal to upper
//~^^^ ERROR lower range bound must be less than upper
match 5u64 {
- 0xFFFF_FFFF_FFFF_FFFF ... 1 => { }
+ 0xFFFF_FFFF_FFFF_FFFF ..= 1 => { }
_ => { }
};
//~^^^ ERROR lower range bound must be less than or equal to upper
fn main() {
match "wow" {
- "bar" ... "foo" => { }
+ "bar" ..= "foo" => { }
};
//~^^ ERROR only char and numeric types are allowed in range
//~| start type: &'static str
//~| end type: &'static str
match "wow" {
- 10 ... "what" => ()
+ 10 ..= "what" => ()
};
//~^^ ERROR only char and numeric types are allowed in range
//~| start type: {integer}
//~| end type: &'static str
match 5 {
- 'c' ... 100 => { }
+ 'c' ..= 100 => { }
_ => { }
};
//~^^^ ERROR mismatched types
fn main() {
let x = 0;
match 1 {
- 0 ... x => {}
+ 0 ..= x => {}
//~^ ERROR runtime values cannot be referenced in patterns
};
}
pub unsafe fn test_Bar_load(p: &mut Bar, v: Bar) {
intrinsics::atomic_load(p);
- //~^ ERROR expected basic integer type, found `&std::ops::Fn()`
+ //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()`
}
pub unsafe fn test_Bar_store(p: &mut Bar, v: Bar) {
intrinsics::atomic_store(p, v);
- //~^ ERROR expected basic integer type, found `&std::ops::Fn()`
+ //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()`
}
pub unsafe fn test_Bar_xchg(p: &mut Bar, v: Bar) {
intrinsics::atomic_xchg(p, v);
- //~^ ERROR expected basic integer type, found `&std::ops::Fn()`
+ //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()`
}
pub unsafe fn test_Bar_cxchg(p: &mut Bar, v: Bar) {
intrinsics::atomic_cxchg(p, v, v);
- //~^ ERROR expected basic integer type, found `&std::ops::Fn()`
+ //~^ ERROR expected basic integer type, found `&dyn std::ops::Fn()`
}
pub unsafe fn test_Quux_load(p: &mut Quux, v: Quux) {
fn main() {
assert::<Rc<RefCell<i32>>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
fn main() {
assert::<Arc<RefCell<i32>>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
fn main() {
assert::<&RefCell<i32>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
fn main() {
assert::<*mut RefCell<i32>>();
//~^ ERROR the type `std::cell::UnsafeCell<i32>` may contain interior mutability and a
- //~| ERROR the type `std::cell::UnsafeCell<usize>` may contain interior mutability and a
+ //~| ERROR the type `std::cell::UnsafeCell<isize>` may contain interior mutability and a
}
trait Foo {}
fn take_foo<F:Foo>(f: F) {}
fn take_object(f: Box<Foo>) { take_foo(f); }
-//~^ ERROR `std::boxed::Box<Foo>: Foo` is not satisfied
+//~^ ERROR `std::boxed::Box<dyn Foo>: Foo` is not satisfied
fn main() {}
}
fn use_bar(t: Box<Bar>) {
- t.bar() //~ ERROR cannot move a value of type Bar
+ t.bar() //~ ERROR cannot move a value of type (dyn Bar + 'static)
}
fn main() { }
fn foo(value: i32) -> Option<$name> {
match value {
$( $value => Some($name::$variant), )* // PatKind::Lit
- $( $value ... 42 => Some($name::$variant), )* // PatKind::Range
+ $( $value ..= 42 => Some($name::$variant), )* // PatKind::Range
_ => None
}
}
});
fn main() {}
-
pub macro mac1() {
let _: Box<PubTr<AssocTy = u8>>;
- //~^ ERROR type `priv_trait::PubTr<AssocTy=u8> + '<empty>` is private
- //~| ERROR type `priv_trait::PubTr<AssocTy=u8> + '<empty>` is private
+ //~^ ERROR type `(dyn priv_trait::PubTr<AssocTy=u8> + '<empty>)` is private
+ //~| ERROR type `(dyn priv_trait::PubTr<AssocTy=u8> + '<empty>)` is private
type InSignatureTy2 = Box<PubTr<AssocTy = u8>>;
- //~^ ERROR type `priv_trait::PubTr<AssocTy=u8> + 'static` is private
+ //~^ ERROR type `(dyn priv_trait::PubTr<AssocTy=u8> + 'static)` is private
trait InSignatureTr2: PubTr<AssocTy = u8> {}
//~^ ERROR trait `priv_trait::PrivTr` is private
}
pub macro mac2() {
let _: Box<PrivTr<AssocTy = u8>>;
- //~^ ERROR type `priv_trait::PrivTr<AssocTy=u8> + '<empty>` is private
- //~| ERROR type `priv_trait::PrivTr<AssocTy=u8> + '<empty>` is private
+ //~^ ERROR type `(dyn priv_trait::PrivTr<AssocTy=u8> + '<empty>)` is private
+ //~| ERROR type `(dyn priv_trait::PrivTr<AssocTy=u8> + '<empty>)` is private
type InSignatureTy1 = Box<PrivTr<AssocTy = u8>>;
- //~^ ERROR type `priv_trait::PrivTr<AssocTy=u8> + 'static` is private
+ //~^ ERROR type `(dyn priv_trait::PrivTr<AssocTy=u8> + 'static)` is private
trait InSignatureTr1: PrivTr<AssocTy = u8> {}
//~^ ERROR trait `priv_trait::PrivTr` is private
}
m::leak_anon2(); //~ ERROR type `m::Priv` is private
m::leak_anon3(); //~ ERROR type `m::Priv` is private
- m::leak_dyn1(); //~ ERROR type `m::Trait + 'static` is private
+ m::leak_dyn1(); //~ ERROR type `(dyn m::Trait + 'static)` is private
m::leak_dyn2(); //~ ERROR type `m::Priv` is private
m::leak_dyn3(); //~ ERROR type `m::Priv` is private
match 10 {
<S as Tr>::A::f::<u8> => {}
//~^ ERROR expected unit struct/variant or constant, found method `<<S as Tr>::A>::f<u8>`
- 0 ... <S as Tr>::A::f::<u8> => {} //~ ERROR only char and numeric types are allowed in range
+ 0 ..= <S as Tr>::A::f::<u8> => {} //~ ERROR only char and numeric types are allowed in range
}
}
// except according to those terms.
-fn func((1, (Some(1), 2...3)): (isize, (Option<isize>, isize))) { }
+fn func((1, (Some(1), 2..=3)): (isize, (Option<isize>, isize))) { }
//~^ ERROR refutable pattern in function argument: `(_, _)` not covered
fn main() {
- let (1, (Some(1), 2...3)) = (1, (None, 2));
+ let (1, (Some(1), 2..=3)) = (1, (None, 2));
//~^ ERROR refutable pattern in local binding: `(_, _)` not covered
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
// except according to those terms.
// aux-build:xcrate.rs
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(crate_in_paths)]
#![feature(extern_absolute_paths)]
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// should-fail-irrefutable_let_patterns
+fn main() {
+ if let _ = 5 {}
+ //~^ ERROR irrefutable if-let pattern [E0162]
+}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(irrefutable_let_patterns)]
+
+// should-fail-irrefutable_let_patterns_with_gate
+fn main() {
+ if let _ = 5 {}
+ //~^ ERROR irrefutable if-let pattern [irrefutable_let_patterns]
+}
// A, B, C are resolved as inherent items, their traits don't need to be in scope
C::A; //~ ERROR associated constant `A` is private
//~^ ERROR the trait `assoc_const::C` cannot be made into an object
- //~| ERROR the trait bound `assoc_const::C: assoc_const::A` is not satisfied
+ //~| ERROR the trait bound `dyn assoc_const::C: assoc_const::A` is not satisfied
C::B; // ERROR the trait `assoc_const::C` cannot be made into an object
- //~^ ERROR the trait bound `assoc_const::C: assoc_const::B` is not satisfied
+ //~^ ERROR the trait bound `dyn assoc_const::C: assoc_const::B` is not satisfied
C::C; // OK
}
impl CompareToInts for i64 { }
fn with_obj(c: &CompareToInts) -> bool {
- c.same_as(22) //~ ERROR `CompareToInts: CompareTo<i32>` is not satisfied
+ c.same_as(22) //~ ERROR `dyn CompareToInts: CompareTo<i32>` is not satisfied
}
fn with_trait<C:CompareToInts>(c: &C) -> bool {
}
fn with_ufcs1<C:CompareToInts>(c: &C) -> bool {
- CompareToInts::same_as(c, 22) //~ ERROR `CompareToInts: CompareTo<i32>` is not satisfied
+ CompareToInts::same_as(c, 22) //~ ERROR `dyn CompareToInts: CompareTo<i32>` is not satisfied
}
fn with_ufcs2<C:CompareToInts>(c: &C) -> bool {
// unsize trait
let x: &Bar = &Bar;
- let _ = x as &Foo; //~ERROR trivial cast: `&Bar` as `&Foo`
- let _ = x as *const Foo; //~ERROR trivial cast: `&Bar` as `*const Foo`
+ let _ = x as &Foo; //~ERROR trivial cast: `&Bar` as `&dyn Foo`
+ let _ = x as *const Foo; //~ERROR trivial cast: `&Bar` as `*const dyn Foo`
let _: &Foo = x;
let _: *const Foo = x;
let x: &mut Bar = &mut Bar;
- let _ = x as &mut Foo; //~ERROR trivial cast: `&mut Bar` as `&mut Foo`
- let _ = x as *mut Foo; //~ERROR trivial cast: `&mut Bar` as `*mut Foo`
+ let _ = x as &mut Foo; //~ERROR trivial cast: `&mut Bar` as `&mut dyn Foo`
+ let _ = x as *mut Foo; //~ERROR trivial cast: `&mut Bar` as `*mut dyn Foo`
let _: &mut Foo = x;
let _: *mut Foo = x;
let x: Box<Bar> = Box::new(Bar);
- let _ = x as Box<Foo>; //~ERROR trivial cast: `std::boxed::Box<Bar>` as `std::boxed::Box<Foo>`
+ let _ = x as Box<Foo>; //~ERROR `std::boxed::Box<Bar>` as `std::boxed::Box<dyn Foo>`
let x: Box<Bar> = Box::new(Bar);
let _: Box<Foo> = x;
// functions
fn baz(_x: i32) {}
- let _ = &baz as &Fn(i32); //~ERROR trivial cast: `&fn(i32) {main::baz}` as `&std::ops::Fn(i32)`
+ let _ = &baz as &Fn(i32); //~ERROR `&fn(i32) {main::baz}` as `&dyn std::ops::Fn(i32)`
let _: &Fn(i32) = &baz;
let x = |_x: i32| {};
let _ = &x as &Fn(i32); //~ERROR trivial cast
//~^ ERROR mismatched types
//~| Perhaps two different versions of crate `crate_a1`
//~| expected trait `main::a::Bar`
- //~| expected type `std::boxed::Box<main::a::Bar + 'static>`
- //~| found type `std::boxed::Box<main::a::Bar>`
+ //~| expected type `std::boxed::Box<(dyn main::a::Bar + 'static)>`
+ //~| found type `std::boxed::Box<dyn main::a::Bar>`
}
}
let x: i32 = 5;
let y = x as MyAdd<i32>;
//~^ ERROR E0038
- //~| ERROR cast to unsized type: `i32` as `MyAdd<i32>`
+ //~| ERROR cast to unsized type: `i32` as `dyn MyAdd<i32>`
}
struct Bar<T:Eq+?Sized> { value: Box<T> }
trait Foo {
- fn bar(&self) where Bar<Self>: Copy;
+ fn bar(&self) where Self: Sized, Bar<Self>: Copy;
//~^ ERROR E0277
//
// Here, Eq ought to be implemented.
// fn main::{{closure}}(_1: [closure@NodeId(22) d:&'19s D]) -> i32 {
// let mut _0: i32;
// ...
-// let _2: &'15_0rs D;
+// let _2: &'16_0rs D;
// ...
// let mut _3: i32;
// bb0: {
// StorageLive(_2);
-// _2 = &'15_0rs (*(_1.0: &'19s D));
+// _2 = &'16_0rs (*(_1.0: &'19s D));
// StorageLive(_3);
// _3 = ((*_2).0: i32);
// _0 = move _3;
// StorageDead(_3);
-// EndRegion('15_0rs);
+// EndRegion('16_0rs);
// StorageDead(_2);
// return;
// }
// fn main::{{closure}}(_1: [closure@NodeId(22) d:D]) -> i32 {
// let mut _0: i32;
// ...
-// let _2: &'15_0rs D;
+// let _2: &'16_0rs D;
// ...
// let mut _3: i32;
// bb0: {
// StorageLive(_2);
-// _2 = &'15_0rs (_1.0: D);
+// _2 = &'16_0rs (_1.0: D);
// StorageLive(_3);
// _3 = ((*_2).0: i32);
// _0 = move _3;
// StorageDead(_3);
-// EndRegion('15_0rs);
+// EndRegion('16_0rs);
// StorageDead(_2);
// drop(_1) -> [return: bb2, unwind: bb1];
// }
// bb0: {
// Validate(Acquire, [_1: &ReFree(DefId(0/1:11 ~ validate_1[317d]::main[0]::{{closure}}[0]), BrEnv) [closure@NodeId(50)], _2: &ReFree(DefId(0/1:11 ~ validate_1[317d]::main[0]::{{closure}}[0]), BrAnon(0)) mut i32]);
// StorageLive(_3);
-// Validate(Suspend(ReScope(Remainder(BlockRemainder { block: ItemLocalId(22), first_statement_index: 0 }))), [(*_2): i32]);
+// Validate(Suspend(ReScope(Remainder(BlockRemainder { block: ItemLocalId(25), first_statement_index: 0 }))), [(*_2): i32]);
// _3 = &ReErased (*_2);
-// Validate(Acquire, [(*_3): i32/ReScope(Remainder(BlockRemainder { block: ItemLocalId(22), first_statement_index: 0 })) (imm)]);
+// Validate(Acquire, [(*_3): i32/ReScope(Remainder(BlockRemainder { block: ItemLocalId(25), first_statement_index: 0 })) (imm)]);
// StorageLive(_4);
// _4 = (*_3);
// _0 = move _4;
// StorageDead(_4);
-// EndRegion(ReScope(Remainder(BlockRemainder { block: ItemLocalId(22), first_statement_index: 0 })));
+// EndRegion(ReScope(Remainder(BlockRemainder { block: ItemLocalId(25), first_statement_index: 0 })));
// StorageDead(_3);
// return;
// }
// StorageLive(_3);
// StorageLive(_4);
// StorageLive(_5);
-// Validate(Suspend(ReScope(Node(ItemLocalId(9)))), [(*_2): i32]);
+// Validate(Suspend(ReScope(Node(ItemLocalId(12)))), [(*_2): i32]);
// _5 = &ReErased mut (*_2);
-// Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(9)))]);
+// Validate(Acquire, [(*_5): i32/ReScope(Node(ItemLocalId(12)))]);
// _4 = move _5 as *mut i32 (Misc);
// _3 = move _4;
-// EndRegion(ReScope(Node(ItemLocalId(9))));
+// EndRegion(ReScope(Node(ItemLocalId(12))));
// StorageDead(_4);
// StorageDead(_5);
// Validate(Release, [_0: bool, _3: *mut i32]);
use std::ops::RangeToInclusive;
fn return_range_to() -> RangeToInclusive<i32> {
- return ...1; //~ERROR `...` syntax cannot be used in expressions
- //~^HELP Use `..` if you need an exclusive range (a < b)
- //~^^HELP or `..=` if you need an inclusive range (a <= b)
+ return ...1; //~ERROR unexpected token: `...`
+ //~^HELP use `..` for an exclusive range
+ //~^^HELP or `..=` for an inclusive range
}
pub fn main() {
- let x = ...0; //~ERROR `...` syntax cannot be used in expressions
- //~^HELP Use `..` if you need an exclusive range (a < b)
- //~^^HELP or `..=` if you need an inclusive range (a <= b)
+ let x = ...0; //~ERROR unexpected token: `...`
+ //~^HELP use `..` for an exclusive range
+ //~^^HELP or `..=` for an inclusive range
- let x = 5...5; //~ERROR `...` syntax cannot be used in expressions
- //~^HELP Use `..` if you need an exclusive range (a < b)
- //~^^HELP or `..=` if you need an inclusive range (a <= b)
+ let x = 5...5; //~ERROR unexpected token: `...`
+ //~^HELP use `..` for an exclusive range
+ //~^^HELP or `..=` for an inclusive range
- for _ in 0...1 {} //~ERROR `...` syntax cannot be used in expressions
- //~^HELP Use `..` if you need an exclusive range (a < b)
- //~^^HELP or `..=` if you need an inclusive range (a <= b)
+ for _ in 0...1 {} //~ERROR unexpected token: `...`
+ //~^HELP use `..` for an exclusive range
+ //~^^HELP or `..=` for an inclusive range
}
-
--- /dev/null
+-include ../tools.mk
+
+ifdef IS_WINDOWS
+# Do nothing on MSVC.
+all:
+ exit 0
+else
+all:
+ $(RUSTC) --emit=obj app.rs
+ nm $(TMPDIR)/app.o | $(CGREP) rust_begin_unwind
+ nm $(TMPDIR)/app.o | $(CGREP) rust_eh_personality
+ nm $(TMPDIR)/app.o | $(CGREP) rust_oom
+endif
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![crate_type = "bin"]
+#![feature(lang_items)]
+#![feature(panic_implementation)]
+#![no_main]
+#![no_std]
+
+use core::panic::PanicInfo;
+
+#[panic_implementation]
+fn panic(_: &PanicInfo) -> ! {
+ loop {}
+}
+
+#[lang = "eh_personality"]
+fn eh() {}
+
+#[lang = "oom"]
+fn oom() {}
// FIXME: Allow attributes in pattern constexprs?
// would require parens in patterns to allow disambiguation...
+ // —which is now available under the `pattern_parentheses` feature gate
+ // (tracking issue #51087)
reject_expr_parse("match 0 {
- 0...#[attr] 10 => ()
+ 0..=#[attr] 10 => ()
}");
reject_expr_parse("match 0 {
- 0...#[attr] -10 => ()
+ 0..=#[attr] -10 => ()
}");
reject_expr_parse("match 0 {
- 0...-#[attr] 10 => ()
+ 0..=-#[attr] 10 => ()
}");
reject_expr_parse("match 0 {
- 0...#[attr] FOO => ()
+ 0..=#[attr] FOO => ()
}");
// make sure we don't catch this bug again...
def_info: None,
allow_internal_unstable: false,
allow_internal_unsafe: false,
+ local_inner_macros: false,
unstable_feature: None,
edition: hygiene::default_edition(),
});
fn encode_json<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
write!(wr, "{}", json::as_json(val));
}
-fn encode_opaque<T: Encodable>(val: &T, wr: &mut Cursor<Vec<u8>>) {
+fn encode_opaque<T: Encodable>(val: &T, wr: Vec<u8>) {
let mut encoder = opaque::Encoder::new(wr);
val.encode(&mut encoder);
}
pub fn main() {
let target = Foo{baz: false,};
- let mut wr = Cursor::new(Vec::new());
let proto = WireProtocol::JSON;
match proto {
- WireProtocol::JSON => encode_json(&target, &mut wr),
- WireProtocol::Opaque => encode_opaque(&target, &mut wr)
+ WireProtocol::JSON => encode_json(&target, &mut Cursor::new(Vec::new())),
+ WireProtocol::Opaque => encode_opaque(&target, Vec::new())
}
}
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(irrefutable_let_patterns)]
+
+// must-compile-successfully-irrefutable_let_patterns_with_gate
+#[allow(irrefutable_let_patterns)]
+fn main() {
+ if let _ = 5 {}
+
+ while let _ = 5 {
+ break;
+ }
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(raw_identifiers)]
}
match 100 {
- b'a' ... b'z' => {},
+ b'a' ..= b'z' => {},
_ => panic!()
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
pub fn main() {
let x = 2;
let x_message = match x {
- 0 ... 1 => { "not many".to_string() }
+ 0 ..= 1 => { "not many".to_string() }
_ => { "lots".to_string() }
};
assert_eq!(x_message, "lots".to_string());
let y = 2;
let y_message = match y {
- 0 ... 1 => { "not many".to_string() }
+ 0 ..= 1 => { "not many".to_string() }
_ => { "lots".to_string() }
};
assert_eq!(y_message, "lots".to_string());
let z = 1u64;
let z_message = match z {
- 0 ... 1 => { "not many".to_string() }
+ 0 ..= 1 => { "not many".to_string() }
_ => { "lots".to_string() }
};
assert_eq!(z_message, "not many".to_string());
assert_eq!(3, match (x, y) {
(1, 1) => 1,
(2, 2) => 2,
- (1...2, 2) => 3,
+ (1..=2, 2) => 3,
_ => 4,
});
assert_eq!(3, match ((x, y),) {
((1, 1),) => 1,
((2, 2),) => 2,
- ((1...2, 2),) => 3,
+ ((1..=2, 2),) => 3,
_ => 4,
});
}
fn lit_shadow_range() {
assert_eq!(2, match 1 {
1 if false => 1,
- 1...2 => 2,
+ 1..=2 => 2,
_ => 3
});
assert_eq!(2, match x+1 {
0 => 0,
1 if false => 1,
- 1...2 => 2,
+ 1..=2 => 2,
_ => 3
});
assert_eq!(2, match val() {
1 if false => 1,
- 1...2 => 2,
+ 1..=2 => 2,
_ => 3
});
assert_eq!(2, match CONST {
0 => 0,
1 if false => 1,
- 1...2 => 2,
+ 1..=2 => 2,
_ => 3
});
// value is out of the range of second arm, should match wildcard pattern
assert_eq!(3, match 3 {
1 if false => 1,
- 1...2 => 2,
+ 1..=2 => 2,
_ => 3
});
}
fn range_shadow_lit() {
assert_eq!(2, match 1 {
- 1...2 if false => 1,
+ 1..=2 if false => 1,
1 => 2,
_ => 3
});
let x = 0;
assert_eq!(2, match x+1 {
0 => 0,
- 1...2 if false => 1,
+ 1..=2 if false => 1,
1 => 2,
_ => 3
});
assert_eq!(2, match val() {
- 1...2 if false => 1,
+ 1..=2 if false => 1,
1 => 2,
_ => 3
});
assert_eq!(2, match CONST {
0 => 0,
- 1...2 if false => 1,
+ 1..=2 if false => 1,
1 => 2,
_ => 3
});
// ditto
assert_eq!(3, match 3 {
- 1...2 if false => 1,
+ 1..=2 if false => 1,
1 => 2,
_ => 3
});
fn range_shadow_range() {
assert_eq!(2, match 1 {
- 0...2 if false => 1,
- 1...3 => 2,
+ 0..=2 if false => 1,
+ 1..=3 => 2,
_ => 3,
});
let x = 0;
assert_eq!(2, match x+1 {
100 => 0,
- 0...2 if false => 1,
- 1...3 => 2,
+ 0..=2 if false => 1,
+ 1..=3 => 2,
_ => 3,
});
assert_eq!(2, match val() {
- 0...2 if false => 1,
- 1...3 => 2,
+ 0..=2 if false => 1,
+ 1..=3 => 2,
_ => 3,
});
assert_eq!(2, match CONST {
100 => 0,
- 0...2 if false => 1,
- 1...3 => 2,
+ 0..=2 if false => 1,
+ 1..=3 => 2,
_ => 3,
});
// ditto
assert_eq!(3, match 5 {
- 0...2 if false => 1,
- 1...3 => 2,
+ 0..=2 if false => 1,
+ 1..=3 => 2,
_ => 3,
});
}
fn multi_pats_shadow_lit() {
assert_eq!(2, match 1 {
100 => 0,
- 0 | 1...10 if false => 1,
+ 0 | 1..=10 if false => 1,
1 => 2,
_ => 3,
});
fn multi_pats_shadow_range() {
assert_eq!(2, match 1 {
100 => 0,
- 0 | 1...10 if false => 1,
- 1...3 => 2,
+ 0 | 1..=10 if false => 1,
+ 1..=3 => 2,
_ => 3,
});
}
assert_eq!(2, match 1 {
100 => 0,
1 if false => 1,
- 0 | 1...10 => 2,
+ 0 | 1..=10 => 2,
_ => 3,
});
}
fn range_shadow_multi_pats() {
assert_eq!(2, match 1 {
100 => 0,
- 1...3 if false => 1,
- 0 | 1...10 => 2,
+ 1..=3 if false => 1,
+ 0 | 1..=10 => 2,
_ => 3,
});
}
fn main() {
let r = match (Foo::FooNullary, 'a') {
- (Foo::FooUint(..), 'a'...'z') => 1,
+ (Foo::FooUint(..), 'a'..='z') => 1,
(Foo::FooNullary, 'x') => 2,
_ => 0
};
assert_eq!(r, 0);
let r = match (Foo::FooUint(0), 'a') {
- (Foo::FooUint(1), 'a'...'z') => 1,
+ (Foo::FooUint(1), 'a'..='z') => 1,
(Foo::FooUint(..), 'x') => 2,
(Foo::FooNullary, 'a') => 3,
_ => 0
assert_eq!(r, 0);
let r = match ('a', Foo::FooUint(0)) {
- ('a'...'z', Foo::FooUint(1)) => 1,
+ ('a'..='z', Foo::FooUint(1)) => 1,
('x', Foo::FooUint(..)) => 2,
('a', Foo::FooNullary) => 3,
_ => 0
assert_eq!(r, 0);
let r = match ('a', 'a') {
- ('a'...'z', 'b') => 1,
- ('x', 'a'...'z') => 2,
+ ('a'..='z', 'b') => 1,
+ ('x', 'a'..='z') => 2,
_ => 0
};
assert_eq!(r, 0);
let r = match ('a', 'a') {
- ('a'...'z', 'b') => 1,
- ('x', 'a'...'z') => 2,
+ ('a'..='z', 'b') => 1,
+ ('x', 'a'..='z') => 2,
('a', 'a') => 3,
_ => 0
};
// Regression test for #18060: match arms were matching in the wrong order.
fn main() {
- assert_eq!(2, match (1, 3) { (0, 2...5) => 1, (1, 3) => 2, (_, 2...5) => 3, (_, _) => 4 });
- assert_eq!(2, match (1, 3) { (1, 3) => 2, (_, 2...5) => 3, (_, _) => 4 });
- assert_eq!(2, match (1, 7) { (0, 2...5) => 1, (1, 7) => 2, (_, 2...5) => 3, (_, _) => 4 });
+ assert_eq!(2, match (1, 3) { (0, 2..=5) => 1, (1, 3) => 2, (_, 2..=5) => 3, (_, _) => 4 });
+ assert_eq!(2, match (1, 3) { (1, 3) => 2, (_, 2..=5) => 3, (_, _) => 4 });
+ assert_eq!(2, match (1, 7) { (0, 2..=5) => 1, (1, 7) => 2, (_, 2..=5) => 3, (_, _) => 4 });
}
fn main() {
match '5' {
- LOW_RANGE...HIGH_RANGE => (),
+ LOW_RANGE..=HIGH_RANGE => (),
_ => ()
};
}
std::intrinsics::type_name::<NT>(),
// DST
std::intrinsics::type_name::<DST>()
- )}, ("[u8]", "str", "std::marker::Send", "NT", "DST"));
+ )}, ("[u8]", "str", "dyn std::marker::Send", "NT", "DST"));
}
fn main() {
match 42 {
- m::START...m::END => {},
- 0...m::END => {},
- m::START...59 => {},
+ m::START..=m::END => {},
+ 0..=m::END => {},
+ m::START..=59 => {},
_ => {},
}
}
+++ /dev/null
-// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we do not ICE when a default method implementation has
-// requirements (in this case, `Self : Baz`) that do not hold for some
-// specific impl (in this case, `Foo : Bar`). This causes problems
-// only when building a vtable, because that goes along and
-// instantiates all the methods, even those that could not otherwise
-// be called.
-
-// pretty-expanded FIXME #23616
-
-struct Foo {
- x: i32
-}
-
-trait Bar {
- fn bar(&self) where Self : Baz { self.baz(); }
-}
-
-trait Baz {
- fn baz(&self);
-}
-
-impl Bar for Foo {
-}
-
-fn main() {
- let x: &Bar = &Foo { x: 22 };
-}
let x = 'a';
let y = match x {
- 'a'...'b' if false => "one",
+ 'a'..='b' if false => "one",
'a' => "two",
- 'a'...'b' => "three",
+ 'a'..='b' => "three",
_ => panic!("what?"),
};
let x = 4;
match x {
ref r if *r < 0 => println!("got negative num {} < 0", r),
- e @ 1 ... 100 => println!("got number within range [1,100] {}", e),
+ e @ 1 ..= 100 => println!("got number within range [1,100] {}", e),
_ => println!("no"),
}
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(dead_code)]
+
+const PATH_DOT: &[u8] = &[b'.'];
+
+fn match_slice(element: &[u8]) {
+ match element {
+ &[] => {}
+ PATH_DOT => {}
+ _ => {}
+ }
+}
+
+fn main() {}
const FOO: f64 = 10.0;
match 0.0 {
- 0.0 ... FOO => (),
+ 0.0 ..= FOO => (),
_ => ()
}
}
}
macro_rules! catch_range {
- ($s:literal ... $e:literal) => {
- &format!("macro caught literal: {} ... {}", $s, $e)
+ ($s:literal ..= $e:literal) => {
+ &format!("macro caught literal: {} ..= {}", $s, $e)
};
- (($s:expr) ... ($e:expr)) => { // Must use ')' before '...'
- &format!("macro caught expr: {} ... {}", $s, $e)
+ (($s:expr) ..= ($e:expr)) => { // Must use ')' before '..='
+ &format!("macro caught expr: {} ..= {}", $s, $e)
};
}
macro_rules! pat_match {
- ($s:literal ... $e:literal) => {
+ ($s:literal ..= $e:literal) => {
match 3 {
- $s ... $e => "literal, in range",
+ $s ..= $e => "literal, in range",
_ => "literal, other",
}
};
assert_eq!(mtester!('c'), "macro caught literal: c");
assert_eq!(mtester!(-1.2), "macro caught literal: -1.2");
assert_eq!(two_negative_literals!(-2 -3), "macro caught literals: -2, -3");
- assert_eq!(catch_range!(2 ... 3), "macro caught literal: 2 ... 3");
+ assert_eq!(catch_range!(2 ..= 3), "macro caught literal: 2 ..= 3");
assert_eq!(match_attr!(#[attr] 1), "attr matched literal");
assert_eq!(test_user!(10, 20), "literal");
assert_eq!(mtester!(false), "macro caught literal: false");
assert_eq!(mtester!(true), "macro caught literal: true");
match_produced_attr!("a");
let _a = LiteralProduced;
- assert_eq!(pat_match!(1 ... 3), "literal, in range");
- assert_eq!(pat_match!(4 ... 6), "literal, other");
+ assert_eq!(pat_match!(1 ..= 3), "literal, in range");
+ assert_eq!(pat_match!(4 ..= 6), "literal, other");
// Cases where 'expr' catches
assert_eq!(mtester!((-1.2)), "macro caught expr: -1.2");
assert_eq!(only_expr!(-1.2), "macro caught expr: -1.2");
assert_eq!(mtester!((1 + 3)), "macro caught expr: 4");
assert_eq!(mtester_dbg!(()), "macro caught expr: ()");
- assert_eq!(catch_range!((1 + 1) ... (2 + 2)), "macro caught expr: 2 ... 4");
+ assert_eq!(catch_range!((1 + 1) ..= (2 + 2)), "macro caught expr: 2 ..= 4");
assert_eq!(match_attr!(#[attr] (1 + 2)), "attr matched expr");
assert_eq!(test_user!(10, (20 + 2)), "expr");
pub fn main() {
match 1 {
- 1 ... 3 => {}
+ 1 ..= 3 => {}
_ => panic!("should match range")
}
match 1 {
- 1 ... 3u16 => {}
+ 1 ..= 3u16 => {}
_ => panic!("should match range with inferred start type")
}
match 1 {
- 1u16 ... 3 => {}
+ 1u16 ..= 3 => {}
_ => panic!("should match range with inferred end type")
}
}
pub fn main() {
match 7 {
- s...e => (),
+ s..=e => (),
_ => (),
}
}
pub fn main() {
match 5_usize {
- 1_usize...5_usize => {}
+ 1_usize..=5_usize => {}
_ => panic!("should match range"),
}
match 1_usize {
_ => panic!("should match range start"),
}
match 5_usize {
- 6_usize...7_usize => panic!("shouldn't match range"),
+ 6_usize..=7_usize => panic!("shouldn't match range"),
_ => {}
}
match 7_usize {
}
match 5_usize {
1_usize => panic!("should match non-first range"),
- 2_usize...6_usize => {}
+ 2_usize..=6_usize => {}
_ => panic!("math is broken")
}
match 'c' {
- 'a'...'z' => {}
+ 'a'..='z' => {}
_ => panic!("should suppport char ranges")
}
match -3 {
- -7...5 => {}
+ -7..=5 => {}
_ => panic!("should match signed range")
}
match 3.0f64 {
- 1.0...5.0 => {}
+ 1.0..=5.0 => {}
_ => panic!("should match float range")
}
match -1.5f64 {
- -3.6...3.6 => {}
+ -3.6..=3.6 => {}
_ => panic!("should match negative float range")
}
match 3.5 {
pub fn main() {
let i = 5;
match &&&&i {
- 1 ... 3 => panic!(),
- 3 ... 8 => {},
+ 1 ..= 3 => panic!(),
+ 3 ..= 8 => {},
_ => panic!(),
}
}
// except according to those terms.
// aux-build:xcrate.rs
-// compile-flags: --edition=2018 -Zunstable-options
+// edition:2018
#![feature(extern_absolute_paths)]
//
// Regression test for #47075.
-// compile-flags: --test --edition=2018 -Zunstable-options
+// edition:2018
+// compile-flags: --test
#![feature(extern_absolute_paths)]
--- /dev/null
+// Copyright 2015 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Tests that it is possible to create a global allocator in a submodule, rather than in the crate
+// root.
+
+#![feature(alloc, allocator_api, global_allocator)]
+
+extern crate alloc;
+
+use std::{
+ alloc::{GlobalAlloc, Layout},
+ ptr,
+};
+
+struct MyAlloc;
+
+unsafe impl GlobalAlloc for MyAlloc {
+ unsafe fn alloc(&self, layout: Layout) -> *mut u8 {
+ ptr::null_mut()
+ }
+
+ unsafe fn dealloc(&self, ptr: *mut u8, layout: Layout) {}
+}
+
+mod submod {
+ use super::MyAlloc;
+
+ #[global_allocator]
+ static MY_HEAP: MyAlloc = MyAlloc; //~ ERROR global_allocator
+}
+
+fn main() {}
--- /dev/null
+error: `global_allocator` cannot be used in submodules
+ --> $DIR/allocator-submodule.rs:37:5
+ |
+LL | static MY_HEAP: MyAlloc = MyAlloc; //~ ERROR global_allocator
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
LL | g1(|_: (), _: ()| {}); //~ ERROR type mismatch
| ^^ -------------- found signature of `fn((), ()) -> _`
| |
- | expected signature of `for<'r> fn(&'r (), std::boxed::Box<for<'s> std::ops::Fn(&'s ()) + 'static>) -> _`
+ | expected signature of `for<'r> fn(&'r (), std::boxed::Box<(dyn for<'s> std::ops::Fn(&'s ()) + 'static)>) -> _`
|
note: required by `g1`
--> $DIR/anonymous-higher-ranked-lifetime.rs:33:1
LL | g3(|_: (), _: ()| {}); //~ ERROR type mismatch
| ^^ -------------- found signature of `fn((), ()) -> _`
| |
- | expected signature of `for<'s> fn(&'s (), std::boxed::Box<for<'r> std::ops::Fn(&'r ()) + 'static>) -> _`
+ | expected signature of `for<'s> fn(&'s (), std::boxed::Box<(dyn for<'r> std::ops::Fn(&'r ()) + 'static)>) -> _`
|
note: required by `g3`
--> $DIR/anonymous-higher-ranked-lifetime.rs:35:1
LL | h1(|_: (), _: (), _: (), _: ()| {}); //~ ERROR type mismatch
| ^^ ---------------------------- found signature of `fn((), (), (), ()) -> _`
| |
- | expected signature of `for<'r, 's> fn(&'r (), std::boxed::Box<for<'t0> std::ops::Fn(&'t0 ()) + 'static>, &'s (), for<'t0, 't1> fn(&'t0 (), &'t1 ())) -> _`
+ | expected signature of `for<'r, 's> fn(&'r (), std::boxed::Box<(dyn for<'t0> std::ops::Fn(&'t0 ()) + 'static)>, &'s (), for<'t0, 't1> fn(&'t0 (), &'t1 ())) -> _`
|
note: required by `h1`
--> $DIR/anonymous-higher-ranked-lifetime.rs:39:1
LL | h2(|_: (), _: (), _: (), _: ()| {}); //~ ERROR type mismatch
| ^^ ---------------------------- found signature of `fn((), (), (), ()) -> _`
| |
- | expected signature of `for<'r, 't0> fn(&'r (), std::boxed::Box<for<'s> std::ops::Fn(&'s ()) + 'static>, &'t0 (), for<'s, 't1> fn(&'s (), &'t1 ())) -> _`
+ | expected signature of `for<'r, 't0> fn(&'r (), std::boxed::Box<(dyn for<'s> std::ops::Fn(&'s ()) + 'static)>, &'t0 (), for<'s, 't1> fn(&'s (), &'t1 ())) -> _`
|
note: required by `h2`
--> $DIR/anonymous-higher-ranked-lifetime.rs:40:1
| ^^^^^^^^^^^^^^^^ the trait `Foo` cannot be made into an object
|
= note: method `foo` has a non-standard `self` type
- = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<std::boxed::Box<Foo>>` for `std::boxed::Box<usize>`
+ = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<std::boxed::Box<dyn Foo>>` for `std::boxed::Box<usize>`
error: aborting due to 2 previous errors
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(raw_identifiers)]
LL | let _y = a.y; //~ ERROR use of moved
| ^^^ value used here after move
|
- = note: move occurs because `a.y` has type `std::boxed::Box<isize>`, which does not implement the `Copy` trait
+ = note: move occurs because `a.x` has type `std::boxed::Box<isize>`, which does not implement the `Copy` trait
error[E0382]: use of moved value: `a.y`
--> $DIR/borrowck-box-insensitivity.rs:108:14
LL | let _y = a.y; //~ ERROR use of collaterally moved
| ^^^ value used here after move
|
- = note: move occurs because `a.y` has type `std::boxed::Box<isize>`, which does not implement the `Copy` trait
+ = note: move occurs because `a.x.x` has type `std::boxed::Box<isize>`, which does not implement the `Copy` trait
error: aborting due to 2 previous errors
//~| ERROR use of moved value: `(maybe as std::prelude::v1::Some).0` (Ast) [E0382]
//~| ERROR use of moved value: `maybe` (Mir) [E0382]
//~| ERROR use of moved value: `maybe` (Mir) [E0382]
- //~| ERROR use of moved value: `maybe.0` (Mir) [E0382]
+ //~| ERROR use of moved value (Mir) [E0382]
//~| ERROR borrow of moved value: `maybe` (Mir) [E0382]
}
}
LL | | }
| |_________^ value used here after move
|
- = note: move occurs because `maybe` has type `std::option::Option<std::vec::Vec<bool>>`, which does not implement the `Copy` trait
+ = note: move occurs because value has type `std::vec::Vec<bool>`, which does not implement the `Copy` trait
error[E0382]: borrow of moved value: `maybe` (Mir)
--> $DIR/issue-41962.rs:17:9
LL | | }
| |_________^ value borrowed here after move
|
- = note: move occurs because `maybe` has type `std::option::Option<std::vec::Vec<bool>>`, which does not implement the `Copy` trait
+ = note: move occurs because value has type `std::vec::Vec<bool>`, which does not implement the `Copy` trait
error[E0382]: use of moved value: `maybe` (Mir)
--> $DIR/issue-41962.rs:17:16
| | value moved here
| value used here after move
|
- = note: move occurs because `maybe` has type `std::option::Option<std::vec::Vec<bool>>`, which does not implement the `Copy` trait
+ = note: move occurs because value has type `std::vec::Vec<bool>`, which does not implement the `Copy` trait
-error[E0382]: use of moved value: `maybe.0` (Mir)
+error[E0382]: use of moved value (Mir)
--> $DIR/issue-41962.rs:17:21
|
LL | if let Some(thing) = maybe {
| ^^^^^ value moved here in previous iteration of loop
|
- = note: move occurs because `maybe.0` has type `std::vec::Vec<bool>`, which does not implement the `Copy` trait
+ = note: move occurs because value has type `std::vec::Vec<bool>`, which does not implement the `Copy` trait
error: aborting due to 6 previous errors
-error[E0620]: cast to unsized type: `&{integer}` as `std::marker::Send`
+error[E0620]: cast to unsized type: `&{integer}` as `dyn std::marker::Send`
--> $DIR/cast-to-unsized-trait-object-suggestion.rs:12:5
|
LL | &1 as Send; //~ ERROR cast to unsized
| |
| help: try casting to a reference instead: `&Send`
-error[E0620]: cast to unsized type: `std::boxed::Box<{integer}>` as `std::marker::Send`
+error[E0620]: cast to unsized type: `std::boxed::Box<{integer}>` as `dyn std::marker::Send`
--> $DIR/cast-to-unsized-trait-object-suggestion.rs:13:5
|
LL | Box::new(1) as Send; //~ ERROR cast to unsized
match 10 {
1..10 => {},
- 9...10 => {},
+ 9..=10 => {},
_ => {},
}
match 10 {
1..10 => {},
- 10...10 => {},
+ 10..=10 => {},
_ => {},
}
match 10 {
1..10 => {},
- 8...9 => {},
+ 8..=9 => {},
_ => {},
}
match 10 {
1..10 => {},
- 9...9 => {},
+ 9..=9 => {},
_ => {},
}
}
warning: unreachable pattern
--> $DIR/issue-43253.rs:45:9
|
-LL | 8...9 => {},
+LL | 8..=9 => {},
| ^^^^^
warning: unreachable pattern
--> $DIR/issue-43253.rs:51:9
|
-LL | 9...9 => {},
+LL | 9..=9 => {},
| ^^^^^
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ |_: [_; continue]| {}; //~ ERROR: `continue` outside of loop
+
+ while |_: [_; continue]| {} {} //~ ERROR: `break` or `continue` with no label
+
+ while |_: [_; break]| {} {} //~ ERROR: `break` or `continue` with no label
+}
--- /dev/null
+error[E0268]: `continue` outside of loop
+ --> $DIR/closure-array-break-length.rs:12:13
+ |
+LL | |_: [_; continue]| {}; //~ ERROR: `continue` outside of loop
+ | ^^^^^^^^ cannot break outside of a loop
+
+error[E0590]: `break` or `continue` with no label in the condition of a `while` loop
+ --> $DIR/closure-array-break-length.rs:14:19
+ |
+LL | while |_: [_; continue]| {} {} //~ ERROR: `break` or `continue` with no label
+ | ^^^^^^^^ unlabeled `continue` in the condition of a `while` loop
+
+error[E0590]: `break` or `continue` with no label in the condition of a `while` loop
+ --> $DIR/closure-array-break-length.rs:16:19
+ |
+LL | while |_: [_; break]| {} {} //~ ERROR: `break` or `continue` with no label
+ | ^^^^^ unlabeled `break` in the condition of a `while` loop
+
+error: aborting due to 3 previous errors
+
+Some errors occurred: E0268, E0590.
+For more information about an error, try `rustc --explain E0268`.
-warning: attempt to subtract with overflow
- --> $DIR/conditional_array_execution.rs:15:19
+warning: this constant cannot be used
+ --> $DIR/conditional_array_execution.rs:15:1
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/conditional_array_execution.rs:11:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/conditional_array_execution.rs:15:1
- |
-LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
- | |
- | attempt to subtract with overflow
-
-warning: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+warning: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
warning: this expression will panic at runtime
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/conditional_array_execution.rs:20:5
+error[E0080]: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:5
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
error[E0080]: erroneous constant used
- --> $DIR/conditional_array_execution.rs:20:5
+ --> $DIR/conditional_array_execution.rs:19:5
|
LL | println!("{}", FOO);
| ^^^^^^^^^^^^^^^---^^
|
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-error[E0080]: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+error[E0080]: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
error[E0080]: erroneous constant used
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
const X: u32 = 5;
const Y: u32 = 6;
const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
fn main() {
println!("{}", FOO);
-warning: attempt to subtract with overflow
- --> $DIR/conditional_array_execution.rs:15:19
+warning: this constant cannot be used
+ --> $DIR/conditional_array_execution.rs:15:1
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/conditional_array_execution.rs:11:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/conditional_array_execution.rs:15:1
- |
-LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
- | ^^^^^^^^^^^^^^^^^^-----^^^^^^^^^^^^^^^^^^^^^^^^^^^
- | |
- | attempt to subtract with overflow
-
-warning: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+warning: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
warning: this expression will panic at runtime
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/conditional_array_execution.rs:20:20
+error[E0080]: referenced constant has errors
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | const FOO: u32 = [X - Y, Y - X][(X < Y) as usize];
| ----- attempt to subtract with overflow
| ^^^
error[E0080]: erroneous constant used
- --> $DIR/conditional_array_execution.rs:20:20
+ --> $DIR/conditional_array_execution.rs:19:20
|
LL | println!("{}", FOO);
| ^^^ referenced constant has errors
fn main() {
const MIN: i8 = -5;
match 5i8 {
- MIN...-1 => {},
+ MIN..=-1 => {},
_ => {},
}
}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+
+pub trait Nullable {
+ const NULL: Self;
+
+ fn is_null(&self) -> bool;
+}
+
+impl<T> Nullable for *const T {
+ const NULL: Self = 0 as *const T;
+
+ fn is_null(&self) -> bool {
+ *self == Self::NULL
+ }
+}
+
+fn main() {
+}
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:20:20
- |
-LL | const X: u32 = 0-1;
- | ^^^
- |
-note: lint level defined here
- --> $DIR/issue-43197.rs:11:9
- |
-LL | #![warn(const_err)]
- | ^^^^^^^^^
-
warning: this constant cannot be used
--> $DIR/issue-43197.rs:20:5
|
| ^^^^^^^^^^^^^^^---^
| |
| attempt to subtract with overflow
-
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:23:24
|
-LL | const Y: u32 = foo(0-1);
- | ^^^
+note: lint level defined here
+ --> $DIR/issue-43197.rs:11:9
+ |
+LL | #![warn(const_err)]
+ | ^^^^^^^^^
warning: this constant cannot be used
- --> $DIR/issue-43197.rs:23:5
+ --> $DIR/issue-43197.rs:22:5
|
LL | const Y: u32 = foo(0-1);
| ^^^^^^^^^^^^^^^^^^^---^^
| |
| attempt to subtract with overflow
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:23
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:26
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:5
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:5
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:5
+ --> $DIR/issue-43197.rs:24:5
|
LL | println!("{} {}", X, Y);
| ^^^^^^^^^^^^^^^^^^-^^^^^
|
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:26
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:23
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
fn main() {
const X: u32 = 0-1;
- //~^ WARN attempt to subtract with overflow
- //~| WARN this constant cannot be used
+ //~^ WARN this constant cannot be used
const Y: u32 = foo(0-1);
- //~^ WARN attempt to subtract with overflow
- //~| WARN this constant cannot be used
+ //~^ WARN this constant cannot be used
println!("{} {}", X, Y);
//~^ WARN this expression will panic at runtime
//~| WARN this expression will panic at runtime
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:20:20
- |
-LL | const X: u32 = 0-1;
- | ^^^
- |
-note: lint level defined here
- --> $DIR/issue-43197.rs:11:9
- |
-LL | #![warn(const_err)]
- | ^^^^^^^^^
-
warning: this constant cannot be used
--> $DIR/issue-43197.rs:20:5
|
| ^^^^^^^^^^^^^^^---^
| |
| attempt to subtract with overflow
-
-warning: attempt to subtract with overflow
- --> $DIR/issue-43197.rs:23:24
|
-LL | const Y: u32 = foo(0-1);
- | ^^^
+note: lint level defined here
+ --> $DIR/issue-43197.rs:11:9
+ |
+LL | #![warn(const_err)]
+ | ^^^^^^^^^
warning: this constant cannot be used
- --> $DIR/issue-43197.rs:23:5
+ --> $DIR/issue-43197.rs:22:5
|
LL | const Y: u32 = foo(0-1);
| ^^^^^^^^^^^^^^^^^^^---^^
| |
| attempt to subtract with overflow
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:23
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-warning: referenced constant
- --> $DIR/issue-43197.rs:26:26
+warning: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
warning: this expression will panic at runtime
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:26
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:26
|
LL | const Y: u32 = foo(0-1);
| --- attempt to subtract with overflow
-...
+LL | //~^ WARN this constant cannot be used
LL | println!("{} {}", X, Y);
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:26
+ --> $DIR/issue-43197.rs:24:26
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/issue-43197.rs:26:23
+error[E0080]: referenced constant has errors
+ --> $DIR/issue-43197.rs:24:23
|
LL | const X: u32 = 0-1;
| --- attempt to subtract with overflow
| ^
error[E0080]: erroneous constant used
- --> $DIR/issue-43197.rs:26:23
+ --> $DIR/issue-43197.rs:24:23
|
LL | println!("{} {}", X, Y);
| ^ referenced constant has errors
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-44578.rs:35:5
|
LL | const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
|
= note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-44578.rs:35:20
|
LL | const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-44578.rs:35:20
|
LL | const AMT: usize = [A::AMT][(A::AMT > B::AMT) as usize];
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-50814-2.rs:26:5
|
LL | const BAR: usize = [5, 6, 7][T::BOO];
-error[E0080]: referenced constant
+error[E0080]: referenced constant has errors
--> $DIR/issue-50814.rs:27:5
|
LL | const MAX: u8 = A::MAX + B::MAX;
#![crate_type = "lib"]
pub const Z: u32 = 0 - 1;
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
pub type Foo = [i32; 0 - 1];
//~^ WARN attempt to subtract with overflow
-warning: attempt to subtract with overflow
- --> $DIR/pub_const_err.rs:16:20
+warning: this constant cannot be used
+ --> $DIR/pub_const_err.rs:16:1
|
LL | pub const Z: u32 = 0 - 1;
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^^-----^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/pub_const_err.rs:12:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/pub_const_err.rs:16:1
- |
-LL | pub const Z: u32 = 0 - 1;
- | ^^^^^^^^^^^^^^^^^^^-----^
- | |
- | attempt to subtract with overflow
-
warning: attempt to subtract with overflow
- --> $DIR/pub_const_err.rs:20:22
+ --> $DIR/pub_const_err.rs:19:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^
warning: this array length cannot be used
- --> $DIR/pub_const_err.rs:20:22
+ --> $DIR/pub_const_err.rs:19:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^ attempt to subtract with overflow
#![warn(const_err)]
pub const Z: u32 = 0 - 1;
-//~^ WARN attempt to subtract with overflow
-//~| WARN this constant cannot be used
+//~^ WARN this constant cannot be used
pub type Foo = [i32; 0 - 1];
//~^ WARN attempt to subtract with overflow
-warning: attempt to subtract with overflow
- --> $DIR/pub_const_err_bin.rs:14:20
+warning: this constant cannot be used
+ --> $DIR/pub_const_err_bin.rs:14:1
|
LL | pub const Z: u32 = 0 - 1;
- | ^^^^^
+ | ^^^^^^^^^^^^^^^^^^^-----^
+ | |
+ | attempt to subtract with overflow
|
note: lint level defined here
--> $DIR/pub_const_err_bin.rs:12:9
LL | #![warn(const_err)]
| ^^^^^^^^^
-warning: this constant cannot be used
- --> $DIR/pub_const_err_bin.rs:14:1
- |
-LL | pub const Z: u32 = 0 - 1;
- | ^^^^^^^^^^^^^^^^^^^-----^
- | |
- | attempt to subtract with overflow
-
warning: attempt to subtract with overflow
- --> $DIR/pub_const_err_bin.rs:18:22
+ --> $DIR/pub_const_err_bin.rs:17:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^
warning: this array length cannot be used
- --> $DIR/pub_const_err_bin.rs:18:22
+ --> $DIR/pub_const_err_bin.rs:17:22
|
LL | pub type Foo = [i32; 0 - 1];
| ^^^^^ attempt to subtract with overflow
fn main() {
let n: Int = 40;
match n {
- 0...10 => {},
- 10...BAR => {}, //~ ERROR lower range bound must be less than or equal to upper
+ 0..=10 => {},
+ 10..=BAR => {}, //~ ERROR lower range bound must be less than or equal to upper
_ => {},
}
}
error[E0030]: lower range bound must be less than or equal to upper
--> $DIR/ref_to_int_match.rs:15:9
|
-LL | 10...BAR => {}, //~ ERROR lower range bound must be less than or equal to upper
+LL | 10..=BAR => {}, //~ ERROR lower range bound must be less than or equal to upper
| ^^ lower bound larger than upper bound
error: aborting due to previous error
const ONE: usize = 1;
const TWO: usize = 2;
const LEN: usize = ONE - TWO;
-//~^ ERROR E0080
-//~| ERROR attempt to subtract with overflow
fn main() {
let a: [i8; LEN] = unimplemented!();
//~^ ERROR E0080
//~| ERROR E0080
+//~| ERROR E0080
+//~| ERROR E0080
}
-error: attempt to subtract with overflow
- --> $DIR/const-len-underflow-separate-spans.rs:17:20
+error[E0080]: referenced constant has errors
+ --> $DIR/const-len-underflow-separate-spans.rs:20:17
|
LL | const LEN: usize = ONE - TWO;
- | ^^^^^^^^^
- |
- = note: #[deny(const_err)] on by default
+ | --------- attempt to subtract with overflow
+...
+LL | let a: [i8; LEN] = unimplemented!();
+ | ^^^
-error[E0080]: constant evaluation error
- --> $DIR/const-len-underflow-separate-spans.rs:17:20
+error[E0080]: could not evaluate constant
+ --> $DIR/const-len-underflow-separate-spans.rs:20:17
|
-LL | const LEN: usize = ONE - TWO;
- | ^^^^^^^^^ attempt to subtract with overflow
+LL | let a: [i8; LEN] = unimplemented!();
+ | ^^^ referenced constant has errors
-error[E0080]: referenced constant
- --> $DIR/const-len-underflow-separate-spans.rs:22:12
+error[E0080]: referenced constant has errors
+ --> $DIR/const-len-underflow-separate-spans.rs:20:12
|
LL | const LEN: usize = ONE - TWO;
| --------- attempt to subtract with overflow
| ^^^^^^^^^
error[E0080]: could not evaluate constant expression
- --> $DIR/const-len-underflow-separate-spans.rs:22:12
+ --> $DIR/const-len-underflow-separate-spans.rs:20:12
|
LL | let a: [i8; LEN] = unimplemented!();
| ^^^^^---^
-error[E0277]: the size for value values of type `std::fmt::Debug + std::marker::Sync + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn std::fmt::Debug + std::marker::Sync + 'static)` cannot be known at compilation time
--> $DIR/const-unsized.rs:13:29
|
LL | const CONST_0: Debug+Sync = *(&0 as &(Debug+Sync));
| ^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `std::fmt::Debug + std::marker::Sync + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn std::fmt::Debug + std::marker::Sync + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: constant expressions must have a statically known size
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: constant expressions must have a statically known size
-error[E0277]: the size for value values of type `std::fmt::Debug + std::marker::Sync + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn std::fmt::Debug + std::marker::Sync + 'static)` cannot be known at compilation time
--> $DIR/const-unsized.rs:19:31
|
LL | static STATIC_1: Debug+Sync = *(&1 as &(Debug+Sync));
| ^^^^^^^^^^^^^^^^^^^^^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `std::fmt::Debug + std::marker::Sync + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn std::fmt::Debug + std::marker::Sync + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: constant expressions must have a statically known size
LL | type G = 'static + (Send)::AssocTy;
| ^^^^^^^^^^^^^^^^^^^^^^^^^ ambiguous associated type
|
- = note: specify the type using the syntax `<std::marker::Send + 'static as Trait>::AssocTy`
+ = note: specify the type using the syntax `<(dyn std::marker::Send + 'static) as Trait>::AssocTy`
error[E0223]: ambiguous associated type
--> $DIR/bad-assoc-ty.rs:43:10
LL | type H = Fn(u8) -> (u8)::Output;
| ^^^^^^^^^^^^^^^^^^^^^^ ambiguous associated type
|
- = note: specify the type using the syntax `<std::ops::Fn(u8) -> u8 + 'static as Trait>::Output`
+ = note: specify the type using the syntax `<(dyn std::ops::Fn(u8) -> u8 + 'static) as Trait>::Output`
error: aborting due to 15 previous errors
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2015.rs
// compile-pass
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2015.rs
// compile-pass
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2015.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
// aux-build:edition-kw-macro-2018.rs
#![feature(raw_identifiers)]
let s = "hoho";
match s {
- "hello" ... "world" => {}
+ "hello" ..= "world" => {}
//~^ ERROR only char and numeric types are allowed in range patterns
_ => {}
}
error[E0029]: only char and numeric types are allowed in range patterns
--> $DIR/E0029-teach.rs:17:9
|
-LL | "hello" ... "world" => {}
+LL | "hello" ..= "world" => {}
| ^^^^^^^^^^^^^^^^^^^ ranges require char or numeric types
|
= note: start type: &'static str
let s = "hoho";
match s {
- "hello" ... "world" => {}
+ "hello" ..= "world" => {}
//~^ ERROR only char and numeric types are allowed in range patterns
_ => {}
}
error[E0029]: only char and numeric types are allowed in range patterns
--> $DIR/E0029.rs:15:9
|
-LL | "hello" ... "world" => {}
+LL | "hello" ..= "world" => {}
| ^^^^^^^^^^^^^^^^^^^ ranges require char or numeric types
|
= note: start type: &'static str
fn main() {
match 5u32 {
- 1000 ... 5 => {}
+ 1000 ..= 5 => {}
//~^ ERROR lower range bound must be less than or equal to upper
}
}
error[E0030]: lower range bound must be less than or equal to upper
--> $DIR/E0030-teach.rs:15:9
|
-LL | 1000 ... 5 => {}
+LL | 1000 ..= 5 => {}
| ^^^^ lower bound larger than upper bound
|
= note: When matching against a range, the compiler verifies that the range is non-empty. Range patterns include both end-points, so this is equivalent to requiring the start of the range to be less than or equal to the end of the range.
fn main() {
match 5u32 {
- 1000 ... 5 => {}
+ 1000 ..= 5 => {}
//~^ ERROR lower range bound must be less than or equal to upper
}
}
error[E0030]: lower range bound must be less than or equal to upper
--> $DIR/E0030.rs:14:9
|
-LL | 1000 ... 5 => {}
+LL | 1000 ..= 5 => {}
| ^^^^ lower bound larger than upper bound
error: aborting due to previous error
|
= note: method `foo` has no receiver
-error[E0033]: type `&SomeTrait` cannot be dereferenced
+error[E0033]: type `&dyn SomeTrait` cannot be dereferenced
--> $DIR/E0033-teach.rs:23:9
|
LL | let &invalid = trait_obj;
- | ^^^^^^^^ type `&SomeTrait` cannot be dereferenced
+ | ^^^^^^^^ type `&dyn SomeTrait` cannot be dereferenced
|
= note: This error indicates that a pointer to a trait type cannot be implicitly dereferenced by a pattern. Every trait defines a type, but because the size of trait implementors isn't fixed, this type has no compile-time size. Therefore, all accesses to trait types must be through pointers. If you encounter this error you should try to avoid dereferencing the pointer.
|
= note: method `foo` has no receiver
-error[E0033]: type `&SomeTrait` cannot be dereferenced
+error[E0033]: type `&dyn SomeTrait` cannot be dereferenced
--> $DIR/E0033.rs:21:9
|
LL | let &invalid = trait_obj;
- | ^^^^^^^^ type `&SomeTrait` cannot be dereferenced
+ | ^^^^^^^^ type `&dyn SomeTrait` cannot be dereferenced
error: aborting due to 3 previous errors
fn main() {
let x = 1u8;
match x {
- 0u8...3i8 => (), //~ ERROR E0308
+ 0u8..=3i8 => (), //~ ERROR E0308
_ => ()
}
}
error[E0308]: mismatched types
--> $DIR/E0308-4.rs:14:9
|
-LL | 0u8...3i8 => (), //~ ERROR E0308
+LL | 0u8..=3i8 => (), //~ ERROR E0308
| ^^^^^^^^^ expected u8, found i8
error: aborting due to previous error
-error[E0558]: export_name attribute has invalid format
+error[E0558]: `export_name` attribute has invalid format
--> $DIR/E0558.rs:11:1
|
LL | #[export_name]
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[export_name="\0foo"] //~ ERROR E0648
+pub fn bar() {}
+
+fn main() {}
--- /dev/null
+error[E0648]: `export_name` may not contain null characters
+ --> $DIR/E0648.rs:11:1
+ |
+LL | #[export_name="/0foo"] //~ ERROR E0648
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0648`.
-> Box<for<'a> Id<impl Lt<'a>>>
//~^ ERROR `impl Trait` can only capture lifetimes bound at the fn or impl level [E0657]
{
- () //~ ERROR mismatched types
+ Box::new(())
}
struct Foo;
-> Box<for<'a> Id<impl Lt<'a>>>
//~^ ERROR `impl Trait` can only capture lifetimes bound at the fn or impl level
{
- () //~ ERROR mismatched types
+ Box::new(())
}
}
LL | -> Box<for<'a> Id<impl Lt<'a>>>
| ^^
-error[E0308]: mismatched types
- --> $DIR/E0657.rs:22:5
- |
-LL | () //~ ERROR mismatched types
- | ^^ expected struct `std::boxed::Box`, found ()
- |
- = note: expected type `std::boxed::Box<Id<_> + 'static>`
- found type `()`
-
-error[E0308]: mismatched types
- --> $DIR/E0657.rs:31:9
- |
-LL | () //~ ERROR mismatched types
- | ^^ expected struct `std::boxed::Box`, found ()
- |
- = note: expected type `std::boxed::Box<Id<_> + 'static>`
- found type `()`
-
-error: aborting due to 4 previous errors
+error: aborting due to 2 previous errors
-Some errors occurred: E0308, E0657.
-For more information about an error, try `rustc --explain E0308`.
+For more information about this error, try `rustc --explain E0657`.
LL | q as *const [i32]; //~ ERROR cannot cast
| ^^^^^^^^^^^^^^^^^
-error[E0606]: casting `usize` as `*mut Trait + 'static` is invalid
+error[E0606]: casting `usize` as `*mut (dyn Trait + 'static)` is invalid
--> $DIR/fat-ptr-cast.rs:32:37
|
LL | let t: *mut (Trait + 'static) = 0 as *mut _; //~ ERROR casting
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2015
+// edition:2015
#![feature(futures_api)]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
+
#![feature(futures_api)]
async fn foo() {} //~ ERROR async fn is unstable
error[E0658]: async fn is unstable (see issue #50547)
- --> $DIR/feature-gate-async-await.rs:14:1
+ --> $DIR/feature-gate-async-await.rs:15:1
|
LL | async fn foo() {} //~ ERROR async fn is unstable
| ^^^^^^^^^^^^^^^^^
= help: add #![feature(async_await)] to the crate attributes to enable
error[E0658]: async blocks are unstable (see issue #50547)
- --> $DIR/feature-gate-async-await.rs:17:13
+ --> $DIR/feature-gate-async-await.rs:18:13
|
LL | let _ = async {}; //~ ERROR async blocks are unstable
| ^^^^^^^^
= help: add #![feature(async_await)] to the crate attributes to enable
error[E0658]: async closures are unstable (see issue #50547)
- --> $DIR/feature-gate-async-await.rs:18:13
+ --> $DIR/feature-gate-async-await.rs:19:13
|
LL | let _ = async || {}; //~ ERROR async closures are unstable
| ^^^^^^^^^^^
// gate is not used.
macro_rules! m { ($(a)?) => {} }
-//~^ ERROR Using the `?` macro Kleene operator for "at most one" repetition is unstable
+//~^ ERROR using the `?` macro Kleene operator for "at most one" repetition is unstable
fn main() {
m!();
-error[E0658]: Using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075)
+error[E0658]: using the `?` macro Kleene operator for "at most one" repetition is unstable (see issue #48075)
--> $DIR/feature-gate-macro_at_most_once_rep.rs:14:20
|
LL | macro_rules! m { ($(a)?) => {} }
#[link(name = "rust_test_helpers", kind = "static")]
extern {
returns_isize!(rust_get_test_int);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
emits_nothing!();
- //~^ ERROR Macro invocations in `extern {}` blocks are experimental.
+ //~^ ERROR macro invocations in `extern {}` blocks are experimental.
}
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
--> $DIR/feature-gate-macros_in_extern.rs:29:5
|
LL | returns_isize!(rust_get_test_int);
|
= help: add #![feature(macros_in_extern)] to the crate attributes to enable
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
--> $DIR/feature-gate-macros_in_extern.rs:31:5
|
LL | takes_u32_returns_u32!(rust_dbg_extern_identity_u32);
|
= help: add #![feature(macros_in_extern)] to the crate attributes to enable
-error[E0658]: Macro invocations in `extern {}` blocks are experimental. (see issue #49476)
+error[E0658]: macro invocations in `extern {}` blocks are experimental. (see issue #49476)
--> $DIR/feature-gate-macros_in_extern.rs:33:5
|
LL | emits_nothing!();
= help: see issue #48214
= help: add #![feature(trivial_bounds)] to the crate attributes to enable
-error[E0277]: the size for value values of type `A + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn A + 'static)` cannot be known at compilation time
--> $DIR/feature-gate-trivial_bounds.rs:65:1
|
LL | / fn unsized_local() where Dst<A>: Sized { //~ ERROR
LL | | }
| |_^ doesn't have a size known at compile-time
|
- = help: within `Dst<A + 'static>`, the trait `std::marker::Sized` is not implemented for `A + 'static`
+ = help: within `Dst<(dyn A + 'static)>`, the trait `std::marker::Sized` is not implemented for `(dyn A + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
- = note: required because it appears within the type `Dst<A + 'static>`
+ = note: required because it appears within the type `Dst<(dyn A + 'static)>`
= help: see issue #48214
= help: add #![feature(trivial_bounds)] to the crate attributes to enable
fn main() {
let _ : &(Send,) = &((),);
- //~^ ERROR Unsized tuple coercion is not stable enough
+ //~^ ERROR unsized tuple coercion is not stable enough
}
-error[E0658]: Unsized tuple coercion is not stable enough for use and is subject to change (see issue #42877)
+error[E0658]: unsized tuple coercion is not stable enough for use and is subject to change (see issue #42877)
--> $DIR/feature-gate-unsized_tuple_coercion.rs:12:24
|
LL | let _ : &(Send,) = &((),);
-error[E0277]: `*mut std::ops::Fn() + 'static` cannot be shared between threads safely
+error[E0277]: `*mut (dyn std::ops::Fn() + 'static)` cannot be shared between threads safely
--> $DIR/send-sync.rs:18:5
|
LL | send(format_args!("{:?}", c)); //~ ERROR E0277
- | ^^^^ `*mut std::ops::Fn() + 'static` cannot be shared between threads safely
+ | ^^^^ `*mut (dyn std::ops::Fn() + 'static)` cannot be shared between threads safely
|
- = help: within `[std::fmt::ArgumentV1<'_>]`, the trait `std::marker::Sync` is not implemented for `*mut std::ops::Fn() + 'static`
- = note: required because it appears within the type `std::marker::PhantomData<*mut std::ops::Fn() + 'static>`
+ = help: within `[std::fmt::ArgumentV1<'_>]`, the trait `std::marker::Sync` is not implemented for `*mut (dyn std::ops::Fn() + 'static)`
+ = note: required because it appears within the type `std::marker::PhantomData<*mut (dyn std::ops::Fn() + 'static)>`
= note: required because it appears within the type `core::fmt::Void`
= note: required because it appears within the type `&core::fmt::Void`
= note: required because it appears within the type `std::fmt::ArgumentV1<'_>`
LL | fn send<T: Send>(_: T) {}
| ^^^^^^^^^^^^^^^^^^^^^^
-error[E0277]: `*mut std::ops::Fn() + 'static` cannot be shared between threads safely
+error[E0277]: `*mut (dyn std::ops::Fn() + 'static)` cannot be shared between threads safely
--> $DIR/send-sync.rs:19:5
|
LL | sync(format_args!("{:?}", c)); //~ ERROR E0277
- | ^^^^ `*mut std::ops::Fn() + 'static` cannot be shared between threads safely
+ | ^^^^ `*mut (dyn std::ops::Fn() + 'static)` cannot be shared between threads safely
|
- = help: within `std::fmt::Arguments<'_>`, the trait `std::marker::Sync` is not implemented for `*mut std::ops::Fn() + 'static`
- = note: required because it appears within the type `std::marker::PhantomData<*mut std::ops::Fn() + 'static>`
+ = help: within `std::fmt::Arguments<'_>`, the trait `std::marker::Sync` is not implemented for `*mut (dyn std::ops::Fn() + 'static)`
+ = note: required because it appears within the type `std::marker::PhantomData<*mut (dyn std::ops::Fn() + 'static)>`
= note: required because it appears within the type `core::fmt::Void`
= note: required because it appears within the type `&core::fmt::Void`
= note: required because it appears within the type `std::fmt::ArgumentV1<'_>`
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#[macro_export]
+macro_rules! helper1 {
+ () => ( struct S; )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! helper2 {
+ () => ( helper1!(); )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! public_macro {
+ () => ( helper2!(); )
+}
+
+#[macro_export(local_inner_macros)]
+macro_rules! public_macro_dynamic {
+ ($helper: ident) => ( $helper!(); )
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// aux-build:local_inner_macros.rs
+
+#![feature(use_extern_macros)]
+
+extern crate local_inner_macros;
+
+use local_inner_macros::{public_macro, public_macro_dynamic};
+
+public_macro!();
+
+macro_rules! local_helper {
+ () => ( struct Z; )
+}
+
+public_macro_dynamic!(local_helper);
+
+fn main() {
+ let s = S;
+ let z = Z;
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// `local_inner_macros` has no effect if `feature(use_extern_macros)` is not enabled
+
+// aux-build:local_inner_macros.rs
+// error-pattern: cannot find macro `helper2!` in this scope
+
+#[macro_use(public_macro)]
+extern crate local_inner_macros;
+
+public_macro!();
+
+fn main() {}
--- /dev/null
+error: cannot find macro `helper2!` in this scope
+ --> $DIR/local_inner_macros_disabled.rs:19:1
+ |
+LL | public_macro!();
+ | ^^^^^^^^^^^^^^^^
+ |
+ = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+error: aborting due to previous error
+
// return type, which can't depend on the obligation.
fn cycle1() -> impl Clone {
//~^ ERROR cycle detected
- //~| ERROR cycle detected
send(cycle2().clone());
- //~^ ERROR `std::rc::Rc<std::string::String>` cannot be sent between threads safely
Rc::new(Cell::new(5))
}
-error[E0391]: cycle detected when processing `cycle1::{{exist-impl-Trait}}`
- --> $DIR/auto-trait-leak.rs:24:16
- |
-LL | fn cycle1() -> impl Clone {
- | ^^^^^^^^^^
- |
-note: ...which requires processing `cycle1`...
+error[E0391]: cycle detected when processing `cycle1`
--> $DIR/auto-trait-leak.rs:24:1
|
LL | fn cycle1() -> impl Clone {
| ^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
note: ...which requires evaluating trait selection obligation `impl std::clone::Clone: std::marker::Send`...
note: ...which requires processing `cycle2::{{exist-impl-Trait}}`...
- --> $DIR/auto-trait-leak.rs:33:16
+ --> $DIR/auto-trait-leak.rs:31:16
|
LL | fn cycle2() -> impl Clone {
| ^^^^^^^^^^
note: ...which requires processing `cycle2`...
- --> $DIR/auto-trait-leak.rs:33:1
+ --> $DIR/auto-trait-leak.rs:31:1
|
LL | fn cycle2() -> impl Clone {
| ^^^^^^^^^^^^^^^^^^^^^^^^^
note: ...which requires evaluating trait selection obligation `impl std::clone::Clone: std::marker::Send`...
- = note: ...which again requires processing `cycle1::{{exist-impl-Trait}}`, completing the cycle
-
-error[E0391]: cycle detected when processing `cycle1::{{exist-impl-Trait}}`
+note: ...which requires processing `cycle1::{{exist-impl-Trait}}`...
--> $DIR/auto-trait-leak.rs:24:16
|
LL | fn cycle1() -> impl Clone {
| ^^^^^^^^^^
- |
-note: ...which requires processing `cycle1`...
- --> $DIR/auto-trait-leak.rs:24:1
- |
-LL | fn cycle1() -> impl Clone {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
-note: ...which requires evaluating trait selection obligation `impl std::clone::Clone: std::marker::Send`...
-note: ...which requires processing `cycle2::{{exist-impl-Trait}}`...
- --> $DIR/auto-trait-leak.rs:33:16
- |
-LL | fn cycle2() -> impl Clone {
- | ^^^^^^^^^^
-note: ...which requires processing `cycle2`...
- --> $DIR/auto-trait-leak.rs:33:1
- |
-LL | fn cycle2() -> impl Clone {
- | ^^^^^^^^^^^^^^^^^^^^^^^^^
- = note: ...which again requires processing `cycle1::{{exist-impl-Trait}}`, completing the cycle
-
-error[E0277]: `std::rc::Rc<std::string::String>` cannot be sent between threads safely
- --> $DIR/auto-trait-leak.rs:27:5
- |
-LL | send(cycle2().clone());
- | ^^^^ `std::rc::Rc<std::string::String>` cannot be sent between threads safely
- |
- = help: within `impl std::clone::Clone`, the trait `std::marker::Send` is not implemented for `std::rc::Rc<std::string::String>`
- = note: required because it appears within the type `impl std::clone::Clone`
-note: required by `send`
- --> $DIR/auto-trait-leak.rs:16:1
- |
-LL | fn send<T: Send>(_: T) {}
- | ^^^^^^^^^^^^^^^^^^^^^^
+ = note: ...which again requires processing `cycle1`, completing the cycle
+note: cycle used when type-checking all item bodies
-error: aborting due to 3 previous errors
+error: aborting due to previous error
-Some errors occurred: E0277, E0391.
-For more information about an error, try `rustc --explain E0277`.
+For more information about this error, try `rustc --explain E0391`.
--- /dev/null
+warning: not reporting region error due to nll
+ --> $DIR/static-return-lifetime-infered.rs:17:16
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^
+
+warning: not reporting region error due to nll
+ --> $DIR/static-return-lifetime-infered.rs:21:16
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^
+
+error: free region `` does not outlive free region `'static`
+ --> $DIR/static-return-lifetime-infered.rs:17:9
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^^^^^^^^^^
+
+error: free region `'a` does not outlive free region `'static`
+ --> $DIR/static-return-lifetime-infered.rs:21:9
+ |
+LL | self.x.iter().map(|a| a.0)
+ | ^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+struct A {
+ x: [(u32, u32); 10]
+}
+
+impl A {
+ fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+ self.x.iter().map(|a| a.0)
+ }
+ //~^^ ERROR cannot infer an appropriate lifetime
+ fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+ self.x.iter().map(|a| a.0)
+ }
+ //~^^ ERROR cannot infer an appropriate lifetime
+}
+
+fn main() {}
--- /dev/null
+error: cannot infer an appropriate lifetime
+ --> $DIR/static-return-lifetime-infered.rs:17:16
+ |
+LL | fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+ | ----------------------- this return type evaluates to the `'static` lifetime...
+LL | self.x.iter().map(|a| a.0)
+ | ------ ^^^^
+ | |
+ | ...but this borrow...
+ |
+note: ...can't outlive the anonymous lifetime #1 defined on the method body at 16:5
+ --> $DIR/static-return-lifetime-infered.rs:16:5
+ |
+LL | / fn iter_values_anon(&self) -> impl Iterator<Item=u32> {
+LL | | self.x.iter().map(|a| a.0)
+LL | | }
+ | |_____^
+help: you can add a constraint to the return type to make it last less than `'static` and match the anonymous lifetime #1 defined on the method body at 16:5
+ |
+LL | fn iter_values_anon(&self) -> impl Iterator<Item=u32> + '_ {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: cannot infer an appropriate lifetime
+ --> $DIR/static-return-lifetime-infered.rs:21:16
+ |
+LL | fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+ | ----------------------- this return type evaluates to the `'static` lifetime...
+LL | self.x.iter().map(|a| a.0)
+ | ------ ^^^^
+ | |
+ | ...but this borrow...
+ |
+note: ...can't outlive the lifetime 'a as defined on the method body at 20:5
+ --> $DIR/static-return-lifetime-infered.rs:20:5
+ |
+LL | fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+help: you can add a constraint to the return type to make it last less than `'static` and match the lifetime 'a as defined on the method body at 20:5
+ |
+LL | fn iter_values<'a>(&'a self) -> impl Iterator<Item=u32> + 'a {
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to 2 previous errors
+
fn projection_with_named_trait_inside_path_is_disallowed()
-> <::std::ops::Range<impl Debug> as Iterator>::Item
//~^ ERROR `impl Trait` is not allowed in path parameters
-//~| ERROR trait bound `impl std::fmt::Debug: std::iter::Step` is not satisfied
-{ //~ ERROR trait bound `impl std::fmt::Debug: std::iter::Step` is not satisfied
- (1i32..100).next().unwrap() //~ ERROR mismatched types
+{
+ (1i32..100).next().unwrap()
}
fn projection_from_impl_trait_inside_dyn_trait_is_disallowed()
| ^^^^^^^^^^
error[E0667]: `impl Trait` is not allowed in path parameters
- --> $DIR/impl_trait_projections.rs:43:29
+ --> $DIR/impl_trait_projections.rs:42:29
|
LL | -> <dyn Iterator<Item = impl Debug> as Iterator>::Item
| ^^^^^^^^^^
|
= note: specify the type using the syntax `<impl std::iter::Iterator as Trait>::Item`
-error[E0277]: the trait bound `impl std::fmt::Debug: std::iter::Step` is not satisfied
- --> $DIR/impl_trait_projections.rs:38:1
- |
-LL | / { //~ ERROR trait bound `impl std::fmt::Debug: std::iter::Step` is not satisfied
-LL | | (1i32..100).next().unwrap() //~ ERROR mismatched types
-LL | | }
- | |_^ the trait `std::iter::Step` is not implemented for `impl std::fmt::Debug`
- |
- = note: required because of the requirements on the impl of `std::iter::Iterator` for `std::ops::Range<impl std::fmt::Debug>`
-
-error[E0308]: mismatched types
- --> $DIR/impl_trait_projections.rs:39:5
- |
-LL | (1i32..100).next().unwrap() //~ ERROR mismatched types
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected anonymized type, found i32
- |
- = note: expected type `impl std::fmt::Debug`
- found type `i32`
-
-error[E0277]: the trait bound `impl std::fmt::Debug: std::iter::Step` is not satisfied
- --> $DIR/impl_trait_projections.rs:35:8
- |
-LL | -> <::std::ops::Range<impl Debug> as Iterator>::Item
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `std::iter::Step` is not implemented for `impl std::fmt::Debug`
- |
- = note: required because of the requirements on the impl of `std::iter::Iterator` for `std::ops::Range<impl std::fmt::Debug>`
-
-error: aborting due to 8 previous errors
+error: aborting due to 5 previous errors
-Some errors occurred: E0223, E0277, E0308, E0667.
+Some errors occurred: E0223, E0667.
For more information about an error, try `rustc --explain E0223`.
LL | fn with_dyn_debug_static<'a>(x: Box<dyn Debug + 'a>) {
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: ...so that the expression is assignable:
- expected std::boxed::Box<std::fmt::Debug>
- found std::boxed::Box<std::fmt::Debug + 'a>
+ expected std::boxed::Box<dyn std::fmt::Debug>
+ found std::boxed::Box<(dyn std::fmt::Debug + 'a)>
= note: but, the lifetime must be valid for the static lifetime...
= note: ...so that the types are compatible:
expected StaticTrait
| |_____^
= note: ...but the lifetime must also be valid for the static lifetime...
= note: ...so that the method type is compatible with trait:
- expected fn(&Struct) -> &Trait + 'static
- found fn(&Struct) -> &Trait
+ expected fn(&Struct) -> &(dyn Trait + 'static)
+ found fn(&Struct) -> &dyn Trait
error: aborting due to previous error
//~^ ERROR cast to unsized type: `&[usize; 2]` as `[usize]`
let _bar = Box::new(1_usize) as std::fmt::Debug;
- //~^ ERROR cast to unsized type: `std::boxed::Box<usize>` as `std::fmt::Debug`
+ //~^ ERROR cast to unsized type: `std::boxed::Box<usize>` as `dyn std::fmt::Debug`
let _baz = 1_usize as std::fmt::Debug;
- //~^ ERROR cast to unsized type: `usize` as `std::fmt::Debug`
+ //~^ ERROR cast to unsized type: `usize` as `dyn std::fmt::Debug`
let _quux = [1_usize, 2] as [usize];
//~^ ERROR cast to unsized type: `[usize; 2]` as `[usize]`
LL | let _foo = &[1_usize, 2] as [usize];
| ^^^^^^^^^^^^^^^^^^^^^^^^
-error[E0620]: cast to unsized type: `std::boxed::Box<usize>` as `std::fmt::Debug`
+error[E0620]: cast to unsized type: `std::boxed::Box<usize>` as `dyn std::fmt::Debug`
--> $DIR/issue-17441.rs:15:16
|
LL | let _bar = Box::new(1_usize) as std::fmt::Debug;
| |
| help: try casting to a `Box` instead: `Box<std::fmt::Debug>`
-error[E0620]: cast to unsized type: `usize` as `std::fmt::Debug`
+error[E0620]: cast to unsized type: `usize` as `dyn std::fmt::Debug`
--> $DIR/issue-17441.rs:18:16
|
LL | let _baz = 1_usize as std::fmt::Debug;
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// run-pass
+// run-rustfix
+
+#![allow(non_snake_case)]
+#![allow(dead_code)]
+#![allow(unused_variables)]
+
+#[derive(Copy, Clone)]
+enum Foo {
+ Bar,
+ Baz
+}
+
+impl Foo {
+ fn foo(&self) {
+ match self {
+ &
+Foo::Bar if true
+//~^ WARN pattern binding `Bar` is named the same as one of the variants of the type `Foo`
+=> println!("bar"),
+ &
+Foo::Baz if false
+//~^ WARN pattern binding `Baz` is named the same as one of the variants of the type `Foo`
+=> println!("baz"),
+_ => ()
+ }
+ }
+}
+
+fn main() {}
// except according to those terms.
// run-pass
+// run-rustfix
#![allow(non_snake_case)]
#![allow(dead_code)]
warning[E0170]: pattern binding `Bar` is named the same as one of the variants of the type `Foo`
- --> $DIR/issue-19100.rs:27:1
+ --> $DIR/issue-19100.rs:28:1
|
LL | Bar if true
- | ^^^
- |
- = help: if you meant to match on a variant, consider making the path in the pattern qualified: `Foo::Bar`
+ | ^^^ help: to match on the variant, qualify the path: `Foo::Bar`
warning[E0170]: pattern binding `Baz` is named the same as one of the variants of the type `Foo`
- --> $DIR/issue-19100.rs:31:1
+ --> $DIR/issue-19100.rs:32:1
|
LL | Baz if false
- | ^^^
- |
- = help: if you meant to match on a variant, consider making the path in the pattern qualified: `Foo::Baz`
+ | ^^^ help: to match on the variant, qualify the path: `Foo::Baz`
| ^ the trait `Array` cannot be made into an object
|
= note: the trait cannot require that `Self : Sized`
- = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<&Array>` for `&T`
+ = note: required because of the requirements on the impl of `std::ops::CoerceUnsized<&dyn Array>` for `&T`
error: aborting due to 2 previous errors
--> $DIR/issue-30302.rs:23:9
|
LL | Nil => true,
- | ^^^
- |
- = help: if you meant to match on a variant, consider making the path in the pattern qualified: `Stack::Nil`
+ | ^^^ help: to match on the variant, qualify the path: `Stack::Nil`
error: unreachable pattern
--> $DIR/issue-30302.rs:25:9
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ enum Foo {
+ Drop = assert_eq!(1, 1)
+ }
+}
--- /dev/null
+error[E0317]: if may be missing an else clause
+ --> $DIR/issue-50577.rs:13:16
+ |
+LL | Drop = assert_eq!(1, 1)
+ | ^^^^^^^^^^^^^^^^ expected (), found isize
+ |
+ = note: expected type `()`
+ found type `isize`
+ = note: this error originates in a macro outside of the current crate (in Nightly builds, run with -Z external-macro-backtrace for more info)
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0317`.
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(where_clauses_object_safety)]
+
+trait Trait {}
+
+trait X {
+ fn foo(&self) where Self: Trait; //~ ERROR the trait `X` cannot be made into an object
+ //~^ WARN this was previously accepted by the compiler but is being phased out
+}
+
+impl X for () {
+ fn foo(&self) {}
+}
+
+impl Trait for dyn X {}
+
+pub fn main() {
+ // Check that this does not segfault.
+ <X as X>::foo(&());
+}
--- /dev/null
+error: the trait `X` cannot be made into an object
+ --> $DIR/issue-50781.rs:16:5
+ |
+LL | fn foo(&self) where Self: Trait; //~ ERROR the trait `X` cannot be made into an object
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/issue-50781.rs:11:9
+ |
+LL | #![deny(where_clauses_object_safety)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ = warning: this was previously accepted by the compiler but is being phased out; it will become a hard error in a future release!
+ = note: for more information, see issue #51443 <https://github.com/rust-lang/rust/issues/51443>
+ = note: method `foo` references the `Self` type in where clauses
+
+error: aborting due to previous error
+
// tests that the anonymous_parameters lint is warn-by-default on the 2018 edition
// compile-pass
-// compile-flags: --edition=2018
+// edition:2018
// run-rustfix
trait Foo {
// tests that the anonymous_parameters lint is warn-by-default on the 2018 edition
// compile-pass
-// compile-flags: --edition=2018
+// edition:2018
// run-rustfix
trait Foo {
pub fn char_type(p: char); //~ ERROR uses type `char`
pub fn i128_type(p: i128); //~ ERROR uses type `i128`
pub fn u128_type(p: u128); //~ ERROR uses type `u128`
- pub fn trait_type(p: &Clone); //~ ERROR uses type `std::clone::Clone`
+ pub fn trait_type(p: &Clone); //~ ERROR uses type `dyn std::clone::Clone`
pub fn tuple_type(p: (i32, i32)); //~ ERROR uses type `(i32, i32)`
pub fn tuple_type2(p: I32Pair); //~ ERROR uses type `(i32, i32)`
pub fn zero_size(p: ZeroSize); //~ ERROR struct has no fields
LL | pub fn u128_type(p: u128); //~ ERROR uses type `u128`
| ^^^^
-error: `extern` block uses type `std::clone::Clone` which is not FFI-safe: trait objects have no C equivalent
+error: `extern` block uses type `dyn std::clone::Clone` which is not FFI-safe: trait objects have no C equivalent
--> $DIR/lint-ctypes.rs:62:26
|
-LL | pub fn trait_type(p: &Clone); //~ ERROR uses type `std::clone::Clone`
+LL | pub fn trait_type(p: &Clone); //~ ERROR uses type `dyn std::clone::Clone`
| ^^^^^^
error: `extern` block uses type `(i32, i32)` which is not FFI-safe: tuples have unspecified layout
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// run-rustfix
+
+#![warn(ellipsis_inclusive_range_patterns)]
+
+fn main() {
+ let despondency = 2;
+ match despondency {
+ 1..=2 => {}
+ //~^ WARN `...` range patterns are deprecated
+ _ => {}
+ }
+}
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// compile-pass
+// run-rustfix
+
+#![warn(ellipsis_inclusive_range_patterns)]
+
+fn main() {
+ let despondency = 2;
+ match despondency {
+ 1...2 => {}
+ //~^ WARN `...` range patterns are deprecated
+ _ => {}
+ }
+}
--- /dev/null
+warning: `...` range patterns are deprecated
+ --> $DIR/inclusive-range-pattern-syntax.rs:19:10
+ |
+LL | 1...2 => {}
+ | ^^^ help: use `..=` for an inclusive range
+ |
+note: lint level defined here
+ --> $DIR/inclusive-range-pattern-syntax.rs:14:9
+ |
+LL | #![warn(ellipsis_inclusive_range_patterns)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(trivial_casts, trivial_numeric_casts)]
+#![feature(type_ascription)]
+
+fn main() {
+ let lugubrious = 12i32 as i32;
+ //~^ ERROR trivial numeric cast
+ let haunted: &u32 = &99;
+ let _ = haunted as *const u32;
+ //~^ ERROR trivial cast
+}
--- /dev/null
+error: trivial numeric cast: `i32` as `i32`
+ --> $DIR/trivial-casts-featuring-type-ascription.rs:15:22
+ |
+LL | let lugubrious = 12i32 as i32;
+ | ^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/trivial-casts-featuring-type-ascription.rs:11:24
+ |
+LL | #![deny(trivial_casts, trivial_numeric_casts)]
+ | ^^^^^^^^^^^^^^^^^^^^^
+ = help: cast can be replaced by coercion; this might require type ascription or a temporary variable
+
+error: trivial cast: `&u32` as `*const u32`
+ --> $DIR/trivial-casts-featuring-type-ascription.rs:18:13
+ |
+LL | let _ = haunted as *const u32;
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/trivial-casts-featuring-type-ascription.rs:11:9
+ |
+LL | #![deny(trivial_casts, trivial_numeric_casts)]
+ | ^^^^^^^^^^^^^
+ = help: cast can be replaced by coercion; this might require type ascription or a temporary variable
+
+error: aborting due to 2 previous errors
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(trivial_casts, trivial_numeric_casts)]
+
+fn main() {
+ let lugubrious = 12i32 as i32;
+ //~^ ERROR trivial numeric cast
+ let haunted: &u32 = &99;
+ let _ = haunted as *const u32;
+ //~^ ERROR trivial cast
+}
--- /dev/null
+error: trivial numeric cast: `i32` as `i32`
+ --> $DIR/trivial-casts.rs:14:22
+ |
+LL | let lugubrious = 12i32 as i32;
+ | ^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/trivial-casts.rs:11:24
+ |
+LL | #![deny(trivial_casts, trivial_numeric_casts)]
+ | ^^^^^^^^^^^^^^^^^^^^^
+ = help: cast can be replaced by coercion; this might require a temporary variable
+
+error: trivial cast: `&u32` as `*const u32`
+ --> $DIR/trivial-casts.rs:17:13
+ |
+LL | let _ = haunted as *const u32;
+ | ^^^^^^^^^^^^^^^^^^^^^
+ |
+note: lint level defined here
+ --> $DIR/trivial-casts.rs:11:9
+ |
+LL | #![deny(trivial_casts, trivial_numeric_casts)]
+ | ^^^^^^^^^^^^^
+ = help: cast can be replaced by coercion; this might require a temporary variable
+
+error: aborting due to 2 previous errors
+
LL | | };
| |_____^ expected bound lifetime parameter 'a, found concrete lifetime
|
- = note: expected type `&for<'a, 'b> Foo<&'a u8, &'b u8>`
- found type `&for<'a> Foo<&'a u8, &'a u8>`
+ = note: expected type `&dyn for<'a, 'b> Foo<&'a u8, &'b u8>`
+ found type `&dyn for<'a> Foo<&'a u8, &'a u8>`
= note: this was previously accepted by the compiler but has been phased out
= note: for more information, see https://github.com/rust-lang/rust/issues/45852
LL | let _ = v as *const [u8]; //~ ERROR cannot cast
| ^^^^^^^^^^^^^^^^
-error[E0606]: casting `&Foo` as `*const str` is invalid
+error[E0606]: casting `&dyn Foo` as `*const str` is invalid
--> $DIR/cast-rfc0401.rs:64:13
|
LL | let _ = foo as *const str; //~ ERROR is invalid
| ^^^^^^^^^^^^^^^^^
-error[E0606]: casting `&Foo` as `*mut str` is invalid
+error[E0606]: casting `&dyn Foo` as `*mut str` is invalid
--> $DIR/cast-rfc0401.rs:65:13
|
LL | let _ = foo as *mut str; //~ ERROR is invalid
|
= help: cast through a thin pointer first
-error[E0606]: casting `*const Foo` as `*const [u16]` is invalid
+error[E0606]: casting `*const dyn Foo` as `*const [u16]` is invalid
--> $DIR/cast-rfc0401.rs:78:13
|
LL | let _ = cf as *const [u16]; //~ ERROR is invalid
|
= note: vtable kinds may not match
-error[E0606]: casting `*const Foo` as `*const Bar` is invalid
+error[E0606]: casting `*const dyn Foo` as `*const dyn Bar` is invalid
--> $DIR/cast-rfc0401.rs:79:13
|
LL | let _ = cf as *const Bar; //~ ERROR is invalid
|
= help: the trait `std::marker::Sized` is not implemented for `[u8]`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
- = note: required for the cast to the object type `Foo`
+ = note: required for the cast to the object type `dyn Foo`
error[E0277]: the size for value values of type `str` cannot be known at compilation time
--> $DIR/cast-rfc0401.rs:72:13
|
= help: the trait `std::marker::Sized` is not implemented for `str`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
- = note: required for the cast to the object type `Foo`
+ = note: required for the cast to the object type `dyn Foo`
error[E0606]: casting `&{float}` as `f32` is invalid
--> $DIR/cast-rfc0401.rs:81:30
--> $DIR/issue-19109.rs:14:5
|
LL | fn function(t: &mut Trait) {
- | - help: try adding a return type: `-> *mut Trait`
+ | - help: try adding a return type: `-> *mut dyn Trait`
LL | t as *mut Trait
| ^^^^^^^^^^^^^^^ expected (), found *-ptr
|
= note: expected type `()`
- found type `*mut Trait`
+ found type `*mut dyn Trait`
error: aborting due to previous error
LL | a(x); //~ ERROR mismatched types [E0308]
| ^ expected trait `Foo + std::marker::Send`, found trait `Foo`
|
- = note: expected type `std::boxed::Box<Foo + std::marker::Send + 'static>`
- found type `std::boxed::Box<Foo + 'static>`
+ = note: expected type `std::boxed::Box<(dyn Foo + std::marker::Send + 'static)>`
+ found type `std::boxed::Box<(dyn Foo + 'static)>`
error: aborting due to previous error
LL | touch(&x); //~ ERROR use of partially moved value: `x`
| ^^ value borrowed here after move
|
- = note: move occurs because `x` has type `Foo<std::string::String>`, which does not implement the `Copy` trait
+ = note: move occurs because `x.f` has type `std::string::String`, which does not implement the `Copy` trait
error: aborting due to previous error
= note: where '_#1r: '_#0r
error: free region `ReFree(DefId(0/0:6 ~ propagate_approximated_shorter_to_static_no_bound[317d]::supply[0]), BrNamed(crate0:DefIndex(1:16), 'a))` does not outlive free region `ReStatic`
- --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:45:5
+ --> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:45:47
|
-LL | / establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
+LL | establish_relationships(&cell_a, &cell_b, |_outlives, x, y| {
+ | _______________________________________________^
LL | | //~^ ERROR does not outlive free region
LL | |
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get()) //~ WARNING not reporting region error due to nll
LL | | });
- | |______^
+ | |_____^
note: No external requirements
--> $DIR/propagate-approximated-shorter-to-static-no-bound.rs:44:1
= note: where '_#1r: '_#0r
error: free region `ReFree(DefId(0/0:6 ~ propagate_approximated_shorter_to_static_wrong_bound[317d]::supply[0]), BrNamed(crate0:DefIndex(1:16), 'a))` does not outlive free region `ReStatic`
- --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:48:5
+ --> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:48:47
|
-LL | / establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
+LL | establish_relationships(&cell_a, &cell_b, |_outlives1, _outlives2, x, y| {
+ | _______________________________________________^
LL | | //~^ ERROR does not outlive free region
LL | | // Only works if 'x: 'y:
LL | | demand_y(x, y, x.get())
LL | | //~^ WARNING not reporting region error due to nll
LL | | });
- | |______^
+ | |_____^
note: No external requirements
--> $DIR/propagate-approximated-shorter-to-static-wrong-bound.rs:47:1
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+//
+// An additional regression test for the issue #50716 “NLL ignores lifetimes
+// bounds derived from `Sized` requirements” that checks that the fixed compiler
+// accepts this code fragment with both AST and MIR borrow checkers.
+//
+// revisions: ast mir
+//
+// compile-pass
+
+#![cfg_attr(mir, feature(nll))]
+
+struct Qey<Q: ?Sized>(Q);
+
+fn main() {}
--- /dev/null
+// Copyright 2012 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+//
+// Regression test for the issue #50716: NLL ignores lifetimes bounds
+// derived from `Sized` requirements
+
+#![feature(nll)]
+
+trait A {
+ type X: ?Sized;
+}
+
+fn foo<'a, T: 'static>(s: Box<<&'a T as A>::X>)
+where
+ for<'b> &'b T: A,
+ <&'static T as A>::X: Sized
+{
+ let _x = *s; //~ ERROR free region `'a` does not outlive free region `'static`
+}
+
+fn main() {}
--- /dev/null
+error: free region `'a` does not outlive free region `'static`
+ --> $DIR/issue-50716.rs:25:14
+ |
+LL | let _x = *s; //~ ERROR free region `'a` does not outlive free region `'static`
+ | ^^
+
+error: aborting due to previous error
+
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![allow(warnings)]
+#![feature(nll)]
+
+fn main() {
+ let range = 0..1;
+ let r = range;
+ let x = range.start;
+ //~^ ERROR use of moved value: `range.start` [E0382]
+}
--- /dev/null
+error[E0382]: use of moved value: `range.start`
+ --> $DIR/issue-51512.rs:17:13
+ |
+LL | let r = range;
+ | ----- value moved here
+LL | let x = range.start;
+ | ^^^^^^^^^^^ value used here after move
+ |
+ = note: move occurs because `range` has type `std::ops::Range<i32>`, which does not implement the `Copy` trait
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0382`.
'_#1r,
T,
i32,
- extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<Anything + '_#2r>
+ extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<(dyn Anything + '_#2r)>
]
= note: number of external vids: 3
= note: where <T as std::iter::Iterator>::Item: '_#2r
'_#1r,
T,
i32,
- extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<Anything + '_#2r>
+ extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<(dyn Anything + '_#2r)>
]
= note: number of external vids: 3
= note: where <T as std::iter::Iterator>::Item: '_#2r
'_#2r,
T,
i32,
- extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<Anything + '_#3r>
+ extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<(dyn Anything + '_#3r)>
]
= note: number of external vids: 4
= note: where <T as std::iter::Iterator>::Item: '_#3r
'_#2r,
T,
i32,
- extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<Anything + '_#3r>
+ extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<(dyn Anything + '_#3r)>
]
= note: number of external vids: 4
= note: where <T as std::iter::Iterator>::Item: '_#3r
'_#1r,
T,
i32,
- extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<std::fmt::Debug + '_#2r>
+ extern "rust-call" fn((std::boxed::Box<T>,)) -> std::boxed::Box<(dyn std::fmt::Debug + '_#2r)>
]
= note: number of external vids: 3
= note: where T: '_#2r
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// compile-flags: --edition=2018
+// edition:2018
#![feature(arbitrary_self_types, async_await, await_macro, futures_api, pin)]
// older ... syntax is still allowed as a stability guarantee.
#![feature(box_patterns)]
+#![warn(ellipsis_inclusive_range_patterns)]
+
pub fn main() {
match &12 {
&0...9 => {}
+ //~^ WARN `...` range patterns are deprecated
+ //~| HELP use `..=` for an inclusive range
&10..=15 => {}
//~^ ERROR the range pattern here has ambiguous interpretation
//~^^ HELP add parentheses to clarify the precedence
match Box::new(12) {
box 0...9 => {}
+ //~^ WARN `...` range patterns are deprecated
+ //~| HELP use `..=` for an inclusive range
box 10..=15 => {}
//~^ ERROR the range pattern here has ambiguous interpretation
//~^^ HELP add parentheses to clarify the precedence
error: the range pattern here has ambiguous interpretation
- --> $DIR/range-inclusive-pattern-precedence.rs:23:10
+ --> $DIR/range-inclusive-pattern-precedence.rs:27:10
|
LL | &10..=15 => {}
| ^^^^^^^ help: add parentheses to clarify the precedence: `(10 ..=15)`
error: the range pattern here has ambiguous interpretation
- --> $DIR/range-inclusive-pattern-precedence.rs:32:13
+ --> $DIR/range-inclusive-pattern-precedence.rs:38:13
|
LL | box 10..=15 => {}
| ^^^^^^^ help: add parentheses to clarify the precedence: `(10 ..=15)`
+warning: `...` range patterns are deprecated
+ --> $DIR/range-inclusive-pattern-precedence.rs:24:11
+ |
+LL | &0...9 => {}
+ | ^^^ help: use `..=` for an inclusive range
+ |
+note: lint level defined here
+ --> $DIR/range-inclusive-pattern-precedence.rs:19:9
+ |
+LL | #![warn(ellipsis_inclusive_range_patterns)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+warning: `...` range patterns are deprecated
+ --> $DIR/range-inclusive-pattern-precedence.rs:35:14
+ |
+LL | box 0...9 => {}
+ | ^^^ help: use `..=` for an inclusive range
+
error: aborting due to 2 previous errors
-error[E0277]: the size for value values of type `I + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn I + 'static)` cannot be known at compilation time
--> $DIR/issue-5035-2.rs:14:8
|
LL | fn foo(_x: K) {}
| ^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `I + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn I + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: all local variables must have a statically known size
--> $DIR/borrowck-object-mutability.rs:19:5
|
LL | fn borrowed_receiver(x: &Foo) {
- | ---- help: consider changing this to be a mutable reference: `&mut Foo`
+ | ---- help: consider changing this to be a mutable reference: `&mut dyn Foo`
LL | x.borrowed();
LL | x.borrowed_mut(); //~ ERROR cannot borrow
| ^ `x` is a `&` reference, so the data it refers to cannot be borrowed as mutable
|
= help: did you mean to write `w.wrap.not_closure` instead of `w.wrap.not_closure(...)`?
-error[E0599]: no method named `closure` found for type `Obj<std::boxed::Box<std::boxed::FnBox<(), Output=u32> + 'static>>` in the current scope
+error[E0599]: no method named `closure` found for type `Obj<std::boxed::Box<(dyn std::boxed::FnBox<(), Output=u32> + 'static)>>` in the current scope
--> $DIR/issue-2392.rs:72:24
|
LL | struct Obj<F> where F: FnOnce() -> u32 {
--- /dev/null
+// Copyright 2018 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() {
+ let _redemptive = 1...21;
+ //~^ ERROR unexpected token
+}
--- /dev/null
+error: unexpected token: `...`
+ --> $DIR/dotdotdot-expr.rs:12:24
+ |
+LL | let _redemptive = 1...21;
+ | ^^^
+help: use `..` for an exclusive range
+ |
+LL | let _redemptive = 1..21;
+ | ^^
+help: or `..=` for an inclusive range
+ |
+LL | let _redemptive = 1..=21;
+ | ^^^
+
+error: aborting due to previous error
+
|
= note: #[warn(trivial_bounds)] on by default
-warning: Trait bound for<'a> T<A + 'a>: std::marker::Sized does not depend on any type or lifetime parameters
+warning: Trait bound for<'a> T<(dyn A + 'a)>: std::marker::Sized does not depend on any type or lifetime parameters
--> $DIR/trivial-bounds-inconsistent-sized.rs:26:1
|
LL | / fn unsized_local() where for<'a> T<A + 'a>: Sized {
LL | struct TwoStrs(str, str) where str: Sized;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-warning: Trait bound for<'a> Dst<A + 'a>: std::marker::Sized does not depend on any type or lifetime parameters
+warning: Trait bound for<'a> Dst<(dyn A + 'a)>: std::marker::Sized does not depend on any type or lifetime parameters
--> $DIR/trivial-bounds-inconsistent.rs:65:1
|
LL | / fn unsized_local() where for<'a> Dst<A + 'a>: Sized {
| ^^^^^
= note: but, the lifetime must be valid for the static lifetime...
= note: ...so that the expression is assignable:
- expected std::boxed::Box<std::iter::Iterator<Item=&T> + 'static>
- found std::boxed::Box<std::iter::Iterator<Item=&T>>
+ expected std::boxed::Box<(dyn std::iter::Iterator<Item=&T> + 'static)>
+ found std::boxed::Box<dyn std::iter::Iterator<Item=&T>>
error: aborting due to previous error
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `Foo + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn Foo + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:63:8
|
LL | VM(Foo),
| ^^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `Foo + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn Foo + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `Bar + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn Bar + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:65:8
|
LL | VN{x: Bar},
| ^^^^^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `Bar + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn Bar + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `FooBar + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn FooBar + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:67:15
|
LL | VO(isize, FooBar),
| ^^^^^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `FooBar + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn FooBar + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `BarFoo + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn BarFoo + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:69:18
|
LL | VP{u: isize, x: BarFoo},
| ^^^^^^^^^ doesn't have a size known at compile-time
|
- = help: the trait `std::marker::Sized` is not implemented for `BarFoo + 'static`
+ = help: the trait `std::marker::Sized` is not implemented for `(dyn BarFoo + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: no field of an enum variant may have a dynamically sized type
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `PathHelper1 + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn PathHelper1 + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:53:8
|
LL | VI(Path1),
| ^^^^^ doesn't have a size known at compile-time
|
- = help: within `Path1`, the trait `std::marker::Sized` is not implemented for `PathHelper1 + 'static`
+ = help: within `Path1`, the trait `std::marker::Sized` is not implemented for `(dyn PathHelper1 + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: required because it appears within the type `Path1`
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `PathHelper2 + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn PathHelper2 + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:55:8
|
LL | VJ{x: Path2},
| ^^^^^^^^ doesn't have a size known at compile-time
|
- = help: within `Path2`, the trait `std::marker::Sized` is not implemented for `PathHelper2 + 'static`
+ = help: within `Path2`, the trait `std::marker::Sized` is not implemented for `(dyn PathHelper2 + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: required because it appears within the type `Path2`
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `PathHelper3 + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn PathHelper3 + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:57:15
|
LL | VK(isize, Path3),
| ^^^^^ doesn't have a size known at compile-time
|
- = help: within `Path3`, the trait `std::marker::Sized` is not implemented for `PathHelper3 + 'static`
+ = help: within `Path3`, the trait `std::marker::Sized` is not implemented for `(dyn PathHelper3 + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: required because it appears within the type `Path3`
= note: no field of an enum variant may have a dynamically sized type
-error[E0277]: the size for value values of type `PathHelper4 + 'static` cannot be known at compilation time
+error[E0277]: the size for value values of type `(dyn PathHelper4 + 'static)` cannot be known at compilation time
--> $DIR/unsized-enum2.rs:59:18
|
LL | VL{u: isize, x: Path4},
| ^^^^^^^^ doesn't have a size known at compile-time
|
- = help: within `Path4`, the trait `std::marker::Sized` is not implemented for `PathHelper4 + 'static`
+ = help: within `Path4`, the trait `std::marker::Sized` is not implemented for `(dyn PathHelper4 + 'static)`
= note: to learn more, visit <https://doc.rust-lang.org/book/second-edition/ch19-04-advanced-types.html#dynamically-sized-types--sized>
= note: required because it appears within the type `Path4`
= note: no field of an enum variant may have a dynamically sized type
cargo_release: String,
rls_release: String,
rustfmt_release: String,
+ llvm_tools_release: String,
input: PathBuf,
output: PathBuf,
cargo_version: Option<String>,
rls_version: Option<String>,
rustfmt_version: Option<String>,
+ llvm_tools_version: Option<String>,
rust_git_commit_hash: Option<String>,
cargo_git_commit_hash: Option<String>,
rls_git_commit_hash: Option<String>,
rustfmt_git_commit_hash: Option<String>,
+ llvm_tools_git_commit_hash: Option<String>,
}
fn main() {
let cargo_release = args.next().unwrap();
let rls_release = args.next().unwrap();
let rustfmt_release = args.next().unwrap();
- let _llvm_tools_vers = args.next().unwrap(); // FIXME do something with it?
+ let llvm_tools_release = args.next().unwrap();
let s3_address = args.next().unwrap();
let mut passphrase = String::new();
t!(io::stdin().read_to_string(&mut passphrase));
cargo_release,
rls_release,
rustfmt_release,
+ llvm_tools_release,
input,
output,
cargo_version: None,
rls_version: None,
rustfmt_version: None,
+ llvm_tools_version: None,
rust_git_commit_hash: None,
cargo_git_commit_hash: None,
rls_git_commit_hash: None,
rustfmt_git_commit_hash: None,
+ llvm_tools_git_commit_hash: None,
}.build();
}
self.cargo_version = self.version("cargo", "x86_64-unknown-linux-gnu");
self.rls_version = self.version("rls", "x86_64-unknown-linux-gnu");
self.rustfmt_version = self.version("rustfmt", "x86_64-unknown-linux-gnu");
+ self.llvm_tools_version = self.version("llvm-tools", "x86_64-unknown-linux-gnu");
self.rust_git_commit_hash = self.git_commit_hash("rust", "x86_64-unknown-linux-gnu");
self.cargo_git_commit_hash = self.git_commit_hash("cargo", "x86_64-unknown-linux-gnu");
self.rls_git_commit_hash = self.git_commit_hash("rls", "x86_64-unknown-linux-gnu");
self.rustfmt_git_commit_hash = self.git_commit_hash("rustfmt", "x86_64-unknown-linux-gnu");
+ self.llvm_tools_git_commit_hash = self.git_commit_hash("llvm-tools",
+ "x86_64-unknown-linux-gnu");
self.digest_and_sign();
let manifest = self.build_manifest();
self.package("rls-preview", &mut manifest.pkg, HOSTS);
self.package("rustfmt-preview", &mut manifest.pkg, HOSTS);
self.package("rust-analysis", &mut manifest.pkg, TARGETS);
+ self.package("llvm-tools", &mut manifest.pkg, TARGETS);
let rls_present = manifest.pkg.contains_key("rls-preview");
let rustfmt_present = manifest.pkg.contains_key("rustfmt-preview");
+ let llvm_tools_present = manifest.pkg.contains_key("llvm-tools");
if rls_present {
manifest.renames.insert("rls".to_owned(), Rename { to: "rls-preview".to_owned() });
target: host.to_string(),
});
}
+ if llvm_tools_present {
+ extensions.push(Component {
+ pkg: "llvm-tools".to_string(),
+ target: host.to_string(),
+ });
+ }
extensions.push(Component {
pkg: "rust-analysis".to_string(),
target: host.to_string(),
format!("rls-{}-{}.tar.gz", self.rls_release, target)
} else if component == "rustfmt" || component == "rustfmt-preview" {
format!("rustfmt-{}-{}.tar.gz", self.rustfmt_release, target)
+ } else if component == "llvm_tools" {
+ format!("llvm-tools-{}-{}.tar.gz", self.llvm_tools_release, target)
} else {
format!("{}-{}-{}.tar.gz", component, self.rust_release, target)
}
&self.rls_version
} else if component == "rustfmt" || component == "rustfmt-preview" {
&self.rustfmt_version
+ } else if component == "llvm-tools" {
+ &self.llvm_tools_version
} else {
&self.rust_version
}
&self.rls_git_commit_hash
} else if component == "rustfmt" || component == "rustfmt-preview" {
&self.rustfmt_git_commit_hash
+ } else if component == "llvm-tools" {
+ &self.llvm_tools_git_commit_hash
} else {
&self.rust_git_commit_hash
}
.extend(flags.split_whitespace().map(|s| s.to_owned()));
}
+ if let Some(edition) = config.parse_edition(ln) {
+ self.compile_flags.push(format!("--edition={}", edition));
+ }
+
if let Some(r) = config.parse_revisions(ln) {
self.revisions.extend(r);
}
self.compile_pass = config.parse_compile_pass(ln) || self.run_pass;
}
- if !self.skip_codegen {
- self.skip_codegen = config.parse_skip_codegen(ln);
- }
+ if !self.skip_codegen {
+ self.skip_codegen = config.parse_skip_codegen(ln);
+ }
if !self.disable_ui_testing_normalization {
self.disable_ui_testing_normalization =
fn parse_run_rustfix(&self, line: &str) -> bool {
self.parse_name_directive(line, "run-rustfix")
}
+
+ fn parse_edition(&self, line: &str) -> Option<String> {
+ self.parse_name_value_directive(line, "edition")
+ }
}
pub fn lldb_version_to_int(version_string: &str) -> isize {
.arg(out_dir)
.arg(&self.testpaths.file)
.args(&self.props.compile_flags);
+
if let Some(ref linker) = self.config.linker {
rustdoc
.arg("--linker")