version = "0.0.0"
dependencies = [
"arena 0.0.0",
- "bitflags 1.0.4 (registry+https://github.com/rust-lang/crates.io-index)",
"byteorder 1.2.7 (registry+https://github.com/rust-lang/crates.io-index)",
"either 1.5.0 (registry+https://github.com/rust-lang/crates.io-index)",
"graphviz 0.0.0",
/// Run this rule for all hosts without cross compiling.
const ONLY_HOSTS: bool = false;
- /// Primary function to execute this rule. Can call `builder.ensure(...)`
+ /// Primary function to execute this rule. Can call `builder.ensure()`
/// with other steps to run those.
fn run(self, builder: &Builder) -> Self::Output;
/// When bootstrap is passed a set of paths, this controls whether this rule
/// will execute. However, it does not get called in a "default" context
- /// when we are not passed any paths; in that case, make_run is called
+ /// when we are not passed any paths; in that case, `make_run` is called
/// directly.
fn should_run(run: ShouldRun) -> ShouldRun;
- /// Build up a "root" rule, either as a default rule or from a path passed
+ /// Builds up a "root" rule, either as a default rule or from a path passed
/// to us.
///
/// When path is `None`, we are executing in a context where no paths were
add_lib_path(vec![self.rustc_libdir(compiler)], cmd);
}
- /// Get a path to the compiler specified.
+ /// Gets a path to the compiler specified.
pub fn rustc(&self, compiler: Compiler) -> PathBuf {
if compiler.is_snapshot(self) {
self.initial_rustc.clone()
}
}
- /// Get the paths to all of the compiler's codegen backends.
+ /// Gets the paths to all of the compiler's codegen backends.
fn codegen_backends(&self, compiler: Compiler) -> impl Iterator<Item = PathBuf> {
fs::read_dir(self.sysroot_codegen_backends(compiler))
.into_iter()
let compiler = self.compiler(self.top_stage, host);
cmd.env("RUSTC_STAGE", compiler.stage.to_string())
.env("RUSTC_SYSROOT", self.sysroot(compiler))
- .env(
- "RUSTDOC_LIBDIR",
- self.sysroot_libdir(compiler, self.config.build),
- )
+ // Note that this is *not* the sysroot_libdir because rustdoc must be linked
+ // equivalently to rustc.
+ .env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler))
.env("CFG_RELEASE_CHANNEL", &self.config.channel)
.env("RUSTDOC_REAL", self.rustdoc(host))
.env("RUSTDOC_CRATE_VERSION", self.rust_version())
} else {
&maybe_sysroot
};
- let libdir = sysroot.join(libdir(&compiler.host));
+ let libdir = self.rustc_libdir(compiler);
// Customize the compiler we're running. Specify the compiler to cargo
// as our shim and then pass it some various options used to configure
cargo.env("RUSTC_ERROR_FORMAT", error_format);
}
if cmd != "build" && cmd != "check" && cmd != "rustc" && want_rustdoc {
- cargo.env("RUSTDOC_LIBDIR", self.sysroot_libdir(compiler, self.config.build));
+ cargo.env("RUSTDOC_LIBDIR", self.rustc_libdir(compiler));
}
if mode.is_tool() {
pub static ref INTERNER: Interner = Interner::default();
}
-/// This is essentially a HashMap which allows storing any type in its input and
+/// This is essentially a `HashMap` which allows storing any type in its input and
/// any type in its output. It is a write-once cache; values are never evicted,
/// which means that references to the value can safely be returned from the
-/// get() method.
+/// `get()` method.
#[derive(Debug)]
pub struct Cache(
RefCell<HashMap<
});
}
- /// Build the compiler.
+ /// Builds the compiler.
///
/// This will build the compiler for a particular stage of the build using
/// the `compiler` targeting the `target` architecture. The artifacts
//! Responsible for cleaning out a build directory of all old and stale
//! artifacts to prepare for a fresh build. Currently doesn't remove the
//! `build/cache` directory (download cache) or the `build/$target/llvm`
-//! directory unless the --all flag is present.
+//! directory unless the `--all` flag is present.
use std::fs;
use std::io::{self, ErrorKind};
});
}
- /// Build the standard library.
+ /// Builds the standard library.
///
/// This will build the standard library for a particular stage of the build
/// using the `compiler` targeting the `target` architecture. The artifacts
});
}
- /// Build and prepare startup objects like rsbegin.o and rsend.o
+ /// Builds and prepare startup objects like rsbegin.o and rsend.o
///
/// These are primarily used on Windows right now for linking executables/dlls.
/// They don't require any library support as they're just plain old object
});
}
- /// Build libtest.
+ /// Builds libtest.
///
/// This will build libtest and supporting libraries for a particular stage of
/// the build using the `compiler` targeting the `target` architecture. The
});
}
- /// Build the compiler.
+ /// Builds the compiler.
///
/// This will build the compiler for a particular stage of the build using
/// the `compiler` targeting the `target` architecture. The artifacts
run.builder.ensure(Mingw { host: run.target });
}
- /// Build the `rust-mingw` installer component.
+ /// Builds the `rust-mingw` installer component.
///
/// This contains all the bits and pieces to run the MinGW Windows targets
/// without any extra installed software (e.g., we bundle gcc, libraries, etc).
});
}
- /// Build the book and associated stuff.
+ /// Builds the book and associated stuff.
///
/// We need to build:
///
});
}
- /// Generate whitelisted compiler crate documentation.
+ /// Generates whitelisted compiler crate documentation.
///
/// This will generate all documentation for crates that are whitelisted
/// to be included in the standard documentation. This documentation is
});
}
- /// Generate compiler documentation.
+ /// Generates compiler documentation.
///
/// This will generate all documentation for compiler and dependencies.
/// Compiler documentation is distributed separately, so we make sure
});
}
- /// Generate compiler documentation.
+ /// Generates compiler documentation.
///
/// This will generate all documentation for compiler and dependencies.
/// Compiler documentation is distributed separately, so we make sure
//! ## Copying stage0 {std,test,rustc}
//!
//! This copies the build output from Cargo into
-//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: This step's
+//! `build/$HOST/stage0-sysroot/lib/rustlib/$ARCH/lib`. FIXME: this step's
//! documentation should be expanded -- the information already here may be
//! incorrect.
//!
cleared
}
- /// Get the space-separated set of activated features for the standard
+ /// Gets the space-separated set of activated features for the standard
/// library.
fn std_features(&self) -> String {
let mut features = "panic-unwind".to_string();
features
}
- /// Get the space-separated set of activated features for the compiler.
+ /// Gets the space-separated set of activated features for the compiler.
fn rustc_features(&self) -> String {
let mut features = String::new();
if self.config.jemalloc {
self.out.join(&*target).join("crate-docs")
}
- /// Returns true if no custom `llvm-config` is set for the specified target.
+ /// Returns `true` if no custom `llvm-config` is set for the specified target.
///
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
fn is_rust_llvm(&self, target: Interned<String>) -> bool {
.map(|p| &**p)
}
- /// Returns true if this is a no-std `target`, if defined
+ /// Returns `true` if this is a no-std `target`, if defined
fn no_std(&self, target: Interned<String>) -> Option<bool> {
self.config.target_config.get(&target)
.map(|t| t.no_std)
}
- /// Returns whether the target will be tested using the `remote-test-client`
+ /// Returns `true` if the target will be tested using the `remote-test-client`
/// and `remote-test-server` binaries.
fn remote_tested(&self, target: Interned<String>) -> bool {
self.qemu_rootfs(target).is_some() || target.contains("android") ||
self.rust_info.version(self, channel::CFG_RELEASE_NUM)
}
- /// Return the full commit hash
+ /// Returns the full commit hash.
fn rust_sha(&self) -> Option<&str> {
self.rust_info.sha()
}
panic!("failed to find version in {}'s Cargo.toml", package)
}
- /// Returns whether unstable features should be enabled for the compiler
+ /// Returns `true` if unstable features should be enabled for the compiler
/// we're building.
fn unstable_features(&self) -> bool {
match &self.config.channel[..] {
self
}
- /// Returns whether this is a snapshot compiler for `build`'s configuration
+ /// Returns `true` if this is a snapshot compiler for `build`'s configuration
pub fn is_snapshot(&self, build: &Build) -> bool {
self.stage == 0 && self.host == build.build
}
/// The two modes of the test runner; tests or benchmarks.
#[derive(Debug, PartialEq, Eq, Hash, Copy, Clone, PartialOrd, Ord)]
pub enum TestKind {
- /// Run `cargo test`
+ /// Run `cargo test`.
Test,
- /// Run `cargo bench`
+ /// Run `cargo bench`.
Bench,
}
run.never()
}
- /// Run `rustdoc --test` for all documentation in `src/doc`.
+ /// Runs `rustdoc --test` for all documentation in `src/doc`.
///
/// This will run all tests in our markdown documentation (e.g., the book)
/// located in `src/doc`. The `rustdoc` that's run is the one that sits next to
});
}
- /// Run the error index generator tool to execute the tests located in the error
+ /// Runs the error index generator tool to execute the tests located in the error
/// index.
///
/// The `error_index_generator` tool lives in `src/tools` and is used to
}
}
- /// Run all unit tests plus documentation tests for a given crate defined
+ /// Runs all unit tests plus documentation tests for a given crate defined
/// by a `Cargo.toml` (single manifest)
///
/// This is what runs tests for crates like the standard library, compiler, etc.
/// the standard library and such to the emulator ahead of time. This step
/// represents this and is a dependency of all test suites.
///
-/// Most of the time this is a noop. For some steps such as shipping data to
+/// Most of the time this is a no-op. For some steps such as shipping data to
/// QEMU we have to build our own tools so we've got conditional dependencies
/// on those programs as well. Note that the remote test client is built for
/// the build target (us) and the server is built for the target.
run.builder.ensure(Distcheck);
}
- /// Run "distcheck", a 'make check' from a tarball
+ /// Runs "distcheck", a 'make check' from a tarball
fn run(self, builder: &Builder) {
builder.info("Distcheck");
let dir = builder.out.join("tmp").join("distcheck");
const DEFAULT: bool = true;
const ONLY_HOSTS: bool = true;
- /// Test the build system itself
+ /// Tests the build system itself.
fn run(self, builder: &Builder) {
let mut cmd = Command::new(&builder.initial_cargo);
cmd.arg("test")
run.never()
}
- /// Build a tool in `src/tools`
+ /// Builds a tool in `src/tools`
///
/// This will build the specified tool with the specified `host` compiler in
/// `stage` into the normal cargo output directory.
fn run(self, builder: &Builder) -> PathBuf {
let target_compiler = builder.compiler(builder.top_stage, self.host);
+ if target_compiler.stage == 0 {
+ if !target_compiler.is_snapshot(builder) {
+ panic!("rustdoc in stage 0 must be snapshot rustdoc");
+ }
+ return builder.initial_rustc.with_file_name(exe("rustdoc", &target_compiler.host));
+ }
let target = target_compiler.host;
- let build_compiler = if target_compiler.stage == 0 {
- builder.compiler(0, builder.config.build)
- } else if target_compiler.stage >= 2 {
- // Past stage 2, we consider the compiler to be ABI-compatible and hence capable of
- // building rustdoc itself.
- builder.compiler(target_compiler.stage, builder.config.build)
- } else {
- // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
- // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage
- // compilers, which isn't what we want.
- builder.compiler(target_compiler.stage - 1, builder.config.build)
- };
-
- builder.ensure(compile::Rustc { compiler: build_compiler, target });
- builder.ensure(compile::Rustc {
- compiler: build_compiler,
- target: builder.config.build,
- });
+ // Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
+ // we'd have stageN/bin/rustc and stageN/bin/rustdoc be effectively different stage
+ // compilers, which isn't what we want. Rustdoc should be linked in the same way as the
+ // rustc compiler it's paired with, so it must be built with the previous stage compiler.
+ let build_compiler = builder.compiler(target_compiler.stage - 1, builder.config.build);
+
+ // The presence of `target_compiler` ensures that the necessary libraries (codegen backends,
+ // compiler libraries, ...) are built. Rustdoc does not require the presence of any
+ // libraries within sysroot_libdir (i.e., rustlib), though doctests may want it (since
+ // they'll be linked to those libraries). As such, don't explicitly `ensure` any additional
+ // libraries here. The intuition here is that If we've built a compiler, we should be able
+ // to build rustdoc.
let mut cargo = prepare_tool_cargo(
builder,
);
impl<'a> Builder<'a> {
- /// Get a `Command` which is ready to run `tool` in `stage` built for
+ /// Gets a `Command` which is ready to run `tool` in `stage` built for
/// `host`.
pub fn tool_cmd(&self, tool: Tool) -> Command {
let mut cmd = Command::new(self.tool_exe(tool));
}
}
-/// Returns whether the file name given looks like a dynamic library.
+/// Returns `true` if the file name given looks like a dynamic library.
pub fn is_dylib(name: &str) -> bool {
name.ends_with(".dylib") || name.ends_with(".so") || name.ends_with(".dll")
}
.unwrap_or(UNIX_EPOCH)
}
-/// Returns whether `dst` is up to date given that the file or files in `src`
+/// Returns `true` if `dst` is up to date given that the file or files in `src`
/// are used to generate it.
///
/// Uses last-modified time checks to verify this.
}
impl NativeLibBoilerplate {
- /// On OSX we don't want to ship the exact filename that compiler-rt builds.
+ /// On macOS we don't want to ship the exact filename that compiler-rt builds.
/// This conflicts with the system and ours is likely a wildly different
/// version, so they can't be substituted.
///
/// As a result, we rename it here but we need to also use
- /// `install_name_tool` on OSX to rename the commands listed inside of it to
+ /// `install_name_tool` on macOS to rename the commands listed inside of it to
/// ensure it's linked against correctly.
pub fn fixup_sanitizer_lib_name(&self, sanitizer_name: &str) {
if env::var("TARGET").unwrap() != "x86_64-apple-darwin" {
ln -s ../../${target} /usr/lib/llvm-5.0/${target}
# Install the C++ runtime libraries from CloudABI Ports.
-echo deb https://nuxi.nl/distfiles/cloudabi-ports/debian/ cloudabi cloudabi > \
- /etc/apt/sources.list.d/cloudabi.list
-curl 'https://pgp.mit.edu/pks/lookup?op=get&search=0x0DA51B8531344B15' | \
- apt-key add -
+apt-key adv --batch --yes --keyserver hkp://keyserver.ubuntu.com:80 --recv-keys 0DA51B8531344B15
+add-apt-repository -y 'deb https://nuxi.nl/distfiles/cloudabi-ports/debian/ cloudabi cloudabi'
+
apt-get update
-apt-get install -y $(echo ${target} | sed -e s/_/-/g)-cxx-runtime
+apt-get install -y "${target//_/-}-cxx-runtime"
% Error Handling in Rust
This content has moved into
-[the Rust Programming Language book](book/error-handling.html).
+[the Rust Programming Language book](book/ch09-00-error-handling.html).
% The (old) Rust Ownership Guide
This content has moved into
-[the Rust Programming Language book](book/ownership.html).
+[the Rust Programming Language book](book/ch04-00-understanding-ownership.html).
This content has been removed, with no direct replacement. Rust only
has two built-in pointer types now,
-[references](book/references-and-borrowing.html) and [raw
+[references](book/ch04-02-references-and-borrowing.html) and [raw
pointers](book/raw-pointers.html). Older Rusts had many more pointer
types, they’re gone now.
% The (old) Rust Testing Guide
This content has moved into
-[the Rust Programming Language book](book/testing.html).
+[the Rust Programming Language book](book/ch11-00-testing.html).
- [Targets](targets/index.md)
- [Built-in Targets](targets/built-in.md)
- [Custom Targets](targets/custom.md)
-- [Contributing to `rustc`](contributing.md)
\ No newline at end of file
+- [Linker-plugin based LTO](linker-plugin-lto.md)
+- [Contributing to `rustc`](contributing.md)
--- /dev/null
+# Linker-plugin-LTO
+
+The `-C linker-plugin-lto` flag allows for deferring the LTO optimization
+to the actual linking step, which in turn allows for performing
+interprocedural optimizations across programming language boundaries if
+all the object files being linked were created by LLVM based toolchains.
+The prime example here would be linking Rust code together with
+Clang-compiled C/C++ code.
+
+## Usage
+
+There are two main cases how linker plugin based LTO can be used:
+
+ - compiling a Rust `staticlib` that is used as a C ABI dependency
+ - compiling a Rust binary where `rustc` invokes the linker
+
+In both cases the Rust code has to be compiled with `-C linker-plugin-lto` and
+the C/C++ code with `-flto` or `-flto=thin` so that object files are emitted
+as LLVM bitcode.
+
+### Rust `staticlib` as dependency in C/C++ program
+
+In this case the Rust compiler just has to make sure that the object files in
+the `staticlib` are in the right format. For linking, a linker with the
+LLVM plugin must be used (e.g. LLD).
+
+Using `rustc` directly:
+
+```bash
+# Compile the Rust staticlib
+rustc --crate-type=staticlib -Clinker-plugin-lto -Copt-level=2 ./lib.rs
+# Compile the C code with `-flto=thin`
+clang -c -O2 -flto=thin -o main.o ./main.c
+# Link everything, making sure that we use an appropriate linker
+clang -flto=thin -fuse-ld=lld -L . -l"name-of-your-rust-lib" -o main -O2 ./cmain.o
+```
+
+Using `cargo`:
+
+```bash
+# Compile the Rust staticlib
+RUSTFLAGS="-Clinker-plugin-lto" cargo build --release
+# Compile the C code with `-flto=thin`
+clang -c -O2 -flto=thin -o main.o ./main.c
+# Link everything, making sure that we use an appropriate linker
+clang -flto=thin -fuse-ld=lld -L . -l"name-of-your-rust-lib" -o main -O2 ./cmain.o
+```
+
+### C/C++ code as a dependency in Rust
+
+In this case the linker will be invoked by `rustc`. We again have to make sure
+that an appropriate linker is used.
+
+Using `rustc` directly:
+
+```bash
+# Compile C code with `-flto`
+clang ./clib.c -flto=thin -c -o ./clib.o -O2
+# Create a static library from the C code
+ar crus ./libxyz.a ./clib.o
+
+# Invoke `rustc` with the additional arguments
+rustc -Clinker-plugin-lto -L. -Copt-level=2 -Clinker=clang -Clink-arg=-fuse-ld=lld ./main.rs
+```
+
+Using `cargo` directly:
+
+```bash
+# Compile C code with `-flto`
+clang ./clib.c -flto=thin -c -o ./clib.o -O2
+# Create a static library from the C code
+ar crus ./libxyz.a ./clib.o
+
+# Set the linking arguments via RUSTFLAGS
+RUSTFLAGS="-Clinker-plugin-lto -Clinker=clang -Clink-arg=-fuse-ld=lld" cargo build --release
+```
+
+### Explicitly specifying the linker plugin to be used by `rustc`
+
+If one wants to use a linker other than LLD, the LLVM linker plugin has to be
+specified explicitly. Otherwise the linker cannot read the object files. The
+path to the plugin is passed as an argument to the `-Clinker-plugin-lto`
+option:
+
+```bash
+rustc -Clinker-plugin-lto="/path/to/LLVMgold.so" -L. -Copt-level=2 ./main.rs
+```
+
+
+## Toolchain Compatibility
+
+In order for this kind of LTO to work, the LLVM linker plugin must be able to
+handle the LLVM bitcode produced by both `rustc` and `clang`.
+
+Best results are achieved by using a `rustc` and `clang` that are based on the
+exact same version of LLVM. One can use `rustc -vV` in order to view the LLVM
+used by a given `rustc` version. Note that the version number given
+here is only an approximation as Rust sometimes uses unstable revisions of
+LLVM. However, the approximation is usually reliable.
+
+The following table shows known good combinations of toolchain versions.
+
+| | Clang 7 | Clang 8 |
+|-----------|-----------|-----------|
+| Rust 1.34 | ✗ | ✓ |
+| Rust 1.35 | ✗ | ✓(?) |
+
+Note that the compatibility policy for this feature might change in the future.
/// ```
/// use std::borrow::{Cow, ToOwned};
///
-/// struct Items<'a, X: 'a> where [X]: ToOwned<Owned=Vec<X>> {
+/// struct Items<'a, X: 'a> where [X]: ToOwned<Owned = Vec<X>> {
/// values: Cow<'a, [X]>,
/// }
///
-/// impl<'a, X: Clone + 'a> Items<'a, X> where [X]: ToOwned<Owned=Vec<X>> {
+/// impl<'a, X: Clone + 'a> Items<'a, X> where [X]: ToOwned<Owned = Vec<X>> {
/// fn new(v: Cow<'a, [X]>) -> Self {
/// Items { values: v }
/// }
CoerceUnsized, DispatchFromDyn, Deref, DerefMut, Receiver, Generator, GeneratorState
};
use core::ptr::{self, NonNull, Unique};
-use core::task::{LocalWaker, Poll};
+use core::task::{Waker, Poll};
use crate::vec::Vec;
use crate::raw_vec::RawVec;
impl<F: ?Sized + Future + Unpin> Future for Box<F> {
type Output = F::Output;
- fn poll(mut self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output> {
- F::poll(Pin::new(&mut *self), lw)
+ fn poll(mut self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output> {
+ F::poll(Pin::new(&mut *self), waker)
}
}
}
impl<'a, T> Hole<'a, T> {
- /// Create a new Hole at index `pos`.
+ /// Create a new `Hole` at index `pos`.
///
/// Unsafe because pos must be within the data slice.
#[inline]
/// Gets a mutable reference to the value in the entry.
///
- /// If you need a reference to the `OccupiedEntry` which may outlive the
+ /// If you need a reference to the `OccupiedEntry` that may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
/// [`into_mut`]: #method.into_mut
///
/// We have a separate type for the header and rely on it matching the prefix of `LeafNode`, in
/// order to statically allocate a single dummy node to avoid allocations. This struct is
-/// `repr(C)` to prevent them from being reordered. `LeafNode` does not just contain a
+/// `repr(C)` to prevent them from being reordered. `LeafNode` does not just contain a
/// `NodeHeader` because we do not want unnecessary padding between `len` and the keys.
-/// Crucially, `NodeHeader` can be safely transmuted to different K and V. (This is exploited
+/// Crucially, `NodeHeader` can be safely transmuted to different K and V. (This is exploited
/// by `as_header`.)
-/// See `into_key_slice` for an explanation of K2. K2 cannot be safely transmuted around
+/// See `into_key_slice` for an explanation of K2. K2 cannot be safely transmuted around
/// because the size of `NodeHeader` depends on its alignment!
#[repr(C)]
struct NodeHeader<K, V, K2 = ()> {
}
}
- /// Returns whether it is valid to call `.merge()`, i.e., whether there is enough room in
+ /// Returns `true` if it is valid to call `.merge()`, i.e., whether there is enough room in
/// a node to hold the combination of the nodes to the left and right of this handle along
/// with the key/value pair at this handle.
pub fn can_merge(&self) -> bool {
impl<BorrowType, K, V, HandleType>
Handle<NodeRef<BorrowType, K, V, marker::LeafOrInternal>, HandleType> {
- /// Check whether the underlying node is an `Internal` node or a `Leaf` node.
+ /// Checks whether the underlying node is an `Internal` node or a `Leaf` node.
pub fn force(self) -> ForceResult<
Handle<NodeRef<BorrowType, K, V, marker::Leaf>, HandleType>,
Handle<NodeRef<BorrowType, K, V, marker::Internal>, HandleType>
Recover::replace(&mut self.map, value)
}
- /// Removes a value from the set. Returns `true` if the value was
+ /// Removes a value from the set. Returns whether the value was
/// present in the set.
///
/// The value may be any borrowed form of the set's value type,
#[stable(feature = "fused", since = "1.26.0")]
impl<T> FusedIterator for Range<'_, T> {}
-/// Compare `x` and `y`, but return `short` if x is None and `long` if y is None
+/// Compares `x` and `y`, but return `short` if x is None and `long` if y is None
fn cmp_opt<T: Ord>(x: Option<&T>, y: Option<&T>, short: Ordering, long: Ordering) -> Ordering {
match (x, y) {
(None, _) => short,
ptr::write(self.ptr().add(off), value);
}
- /// Returns `true` if and only if the buffer is at full capacity.
+ /// Returns `true` if the buffer is at full capacity.
#[inline]
fn is_full(&self) -> bool {
self.cap() - self.len() == 1
/// Does nothing if the capacity is already sufficient.
///
/// Note that the allocator may give the collection more space than it
- /// requests. Therefore capacity can not be relied upon to be precisely
+ /// requests. Therefore, capacity can not be relied upon to be precisely
/// minimal. Prefer `reserve` if future insertions are expected.
///
/// # Errors
self.tail == self.head
}
- /// Create a draining iterator that removes the specified range in the
+ /// Creates a draining iterator that removes the specified range in the
/// `VecDeque` and yields the removed items.
///
/// Note 1: The element range is removed even if the iterator is not
///
/// Note 2: It is unspecified how many elements are removed from the deque,
/// if the `Drain` value is not dropped, but the borrow it holds expires
- /// (eg. due to mem::forget).
+ /// (e.g., due to `mem::forget`).
///
/// # Panics
///
///
/// # Panics
///
- /// If `mid` is greater than `len()`. Note that `mid == len()`
+ /// If `mid` is greater than `len()`. Note that `mid == len()`
/// does _not_ panic and is a no-op rotation.
///
/// # Complexity
///
/// # Panics
///
- /// If `k` is greater than `len()`. Note that `k == len()`
+ /// If `k` is greater than `len()`. Note that `k == len()`
/// does _not_ panic and is a no-op rotation.
///
/// # Complexity
//! will then parse the format string and determine if the list of arguments
//! provided is suitable to pass to this format string.
//!
-//! To convert a single value to a string, use the [`to_string`] method. This
+//! To convert a single value to a string, use the [`to_string`] method. This
//! will use the [`Display`] formatting trait.
//!
//! ## Positional parameters
//! When requesting that an argument be formatted with a particular type, you
//! are actually requesting that an argument ascribes to a particular trait.
//! This allows multiple actual types to be formatted via `{:x}` (like [`i8`] as
-//! well as [`isize`]). The current mapping of types to traits is:
+//! well as [`isize`]). The current mapping of types to traits is:
//!
//! * *nothing* ⇒ [`Display`]
//! * `?` ⇒ [`Debug`]
//! 3. An asterisk `.*`:
//!
//! `.*` means that this `{...}` is associated with *two* format inputs rather than one: the
-//! first input holds the `usize` precision, and the second holds the value to print. Note that
+//! first input holds the `usize` precision, and the second holds the value to print. Note that
//! in this case, if one uses the format string `{<arg>:<spec>.*}`, then the `<arg>` part refers
//! to the *value* to print, and the `precision` must come in the input preceding `<arg>`.
//!
#![warn(intra_doc_link_resolution_failure)]
#![warn(missing_debug_implementations)]
-#![cfg_attr(not(test), feature(fn_traits))]
#![cfg_attr(not(test), feature(generator_trait))]
#![cfg_attr(test, feature(test))]
#![feature(dropck_eyepatch)]
#![feature(exact_size_is_empty)]
#![feature(fmt_internals)]
+#![feature(fn_traits)]
#![feature(fundamental)]
#![feature(futures_api)]
#![feature(lang_items)]
#![feature(receiver_trait)]
#![feature(specialization)]
#![feature(staged_api)]
+#![feature(std_internals)]
#![feature(str_internals)]
#![feature(trusted_len)]
#![feature(try_reserve)]
pub mod alloc;
-#[unstable(feature = "futures_api",
- reason = "futures in libcore are unstable",
- issue = "50547")]
-pub mod task;
// Primitive types using the heaps above
// Need to conditionally define the mod from `boxed.rs` to avoid
/// Creates a `String` using interpolation of runtime expressions.
///
-/// The first argument `format!` receives is a format string. This must be a string
-/// literal. The power of the formatting string is in the `{}`s contained.
+/// The first argument `format!` receives is a format string. This must be a string
+/// literal. The power of the formatting string is in the `{}`s contained.
///
/// Additional parameters passed to `format!` replace the `{}`s within the
/// formatting string in the order given unless named or positional parameters
-/// are used, see [`std::fmt`][fmt] for more information.
+/// are used; see [`std::fmt`][fmt] for more information.
///
/// A common use for `format!` is concatenation and interpolation of strings.
/// The same convention is used with [`print!`] and [`write!`] macros,
/// depending on the intended destination of the string.
///
-/// To convert a single value to a string, use the [`to_string`] method. This
+/// To convert a single value to a string, use the [`to_string`] method. This
/// will use the [`Display`] formatting trait.
///
/// [fmt]: ../std/fmt/index.html
/// enough to want to do that it's easiest to just have a dedicated method. Slightly
/// more efficient logic can be provided for this than the general case.
///
- /// Returns true if the reallocation attempt has succeeded, or false otherwise.
+ /// Returns `true` if the reallocation attempt has succeeded.
///
/// # Panics
///
/// the requested space. This is not really unsafe, but the unsafe
/// code *you* write that relies on the behavior of this function may break.
///
- /// Returns true if the reallocation attempt has succeeded, or false otherwise.
+ /// Returns `true` if the reallocation attempt has succeeded.
///
/// # Panics
///
this.strong()
}
- /// Returns true if there are no other `Rc` or [`Weak`][weak] pointers to
+ /// Returns `true` if there are no other `Rc` or [`Weak`][weak] pointers to
/// this inner value.
///
/// [weak]: struct.Weak.html
#[inline]
#[stable(feature = "ptr_eq", since = "1.17.0")]
- /// Returns true if the two `Rc`s point to the same value (not
+ /// Returns `true` if the two `Rc`s point to the same value (not
/// just values that compare as equal).
///
/// # Examples
})
}
- /// Return `None` when the pointer is dangling and there is no allocated `RcBox`,
- /// i.e., this `Weak` was created by `Weak::new`
+ /// Returns `None` when the pointer is dangling and there is no allocated `RcBox`
+ /// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
fn inner(&self) -> Option<&RcBox<T>> {
if is_dangling(self.ptr) {
}
}
- /// Returns true if the two `Weak`s point to the same value (not just values
+ /// Returns `true` if the two `Weak`s point to the same value (not just values
/// that compare as equal).
///
/// # Notes
///
/// The comparator function must define a total ordering for the elements in the slice. If
/// the ordering is not total, the order of the elements is unspecified. An order is a
- /// total order if it is (for all a, b and c):
+ /// total order if it is (for all `a`, `b` and `c`):
///
- /// * total and antisymmetric: exactly one of a < b, a == b or a > b is true; and
- /// * transitive, a < b and b < c implies a < c. The same must hold for both == and >.
+ /// * total and antisymmetric: exactly one of `a < b`, `a == b` or `a > b` is true, and
+ /// * transitive, `a < b` and `b < c` implies `a < c`. The same must hold for both `==` and `>`.
///
/// For example, while [`f64`] doesn't implement [`Ord`] because `NaN != NaN`, we can use
/// `partial_cmp` as our sort function when we know the slice doesn't contain a `NaN`.
#![allow(unused_imports)]
use core::borrow::Borrow;
-use core::fmt;
-use core::str as core_str;
use core::str::pattern::{Pattern, Searcher, ReverseSearcher, DoubleEndedSearcher};
use core::mem;
use core::ptr;
return s;
}
- /// Escapes each char in `s` with [`char::escape_debug`].
- ///
- /// Note: only extended grapheme codepoints that begin the string will be
- /// escaped.
- ///
- /// [`char::escape_debug`]: primitive.char.html#method.escape_debug
- #[unstable(feature = "str_escape",
- reason = "return type may change to be an iterator",
- issue = "27791")]
- pub fn escape_debug(&self) -> String {
- let mut string = String::with_capacity(self.len());
- let mut chars = self.chars();
- if let Some(first) = chars.next() {
- string.extend(first.escape_debug_ext(true))
- }
- string.extend(chars.flat_map(|c| c.escape_debug_ext(false)));
- string
- }
-
- /// Escapes each char in `s` with [`char::escape_default`].
- ///
- /// [`char::escape_default`]: primitive.char.html#method.escape_default
- #[unstable(feature = "str_escape",
- reason = "return type may change to be an iterator",
- issue = "27791")]
- pub fn escape_default(&self) -> String {
- self.chars().flat_map(|c| c.escape_default()).collect()
- }
-
- /// Escapes each char in `s` with [`char::escape_unicode`].
- ///
- /// [`char::escape_unicode`]: primitive.char.html#method.escape_unicode
- #[unstable(feature = "str_escape",
- reason = "return type may change to be an iterator",
- issue = "27791")]
- pub fn escape_unicode(&self) -> String {
- self.chars().flat_map(|c| c.escape_unicode()).collect()
- }
-
/// Converts a [`Box<str>`] into a [`String`] without copying or allocating.
///
/// [`String`]: string/struct.String.html
pub unsafe fn from_boxed_utf8_unchecked(v: Box<[u8]>) -> Box<str> {
Box::from_raw(Box::into_raw(v) as *mut str)
}
+
/// Does nothing if the capacity is already sufficient.
///
/// Note that the allocator may give the collection more space than it
- /// requests. Therefore capacity can not be relied upon to be precisely
+ /// requests. Therefore, capacity can not be relied upon to be precisely
/// minimal. Prefer `reserve` if future insertions are expected.
///
/// # Errors
self.vec.len()
}
- /// Returns `true` if this `String` has a length of zero.
- ///
- /// Returns `false` otherwise.
+ /// Returns `true` if this `String` has a length of zero, and `false` otherwise.
///
/// # Examples
///
#[inline]
#[stable(feature = "ptr_eq", since = "1.17.0")]
- /// Returns true if the two `Arc`s point to the same value (not
+ /// Returns `true` if the two `Arc`s point to the same value (not
/// just values that compare as equal).
///
/// # Examples
})
}
- /// Return `None` when the pointer is dangling and there is no allocated `ArcInner`,
- /// i.e., this `Weak` was created by `Weak::new`
+ /// Returns `None` when the pointer is dangling and there is no allocated `ArcInner`,
+ /// (i.e., when this `Weak` was created by `Weak::new`).
#[inline]
fn inner(&self) -> Option<&ArcInner<T>> {
if is_dangling(self.ptr) {
}
}
- /// Returns true if the two `Weak`s point to the same value (not just values
+ /// Returns `true` if the two `Weak`s point to the same value (not just values
/// that compare as equal).
///
/// # Notes
+++ /dev/null
-//! Types and Traits for working with asynchronous tasks.
-
-pub use core::task::*;
-
-#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
-pub use if_arc::*;
-
-#[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
-mod if_arc {
- use super::*;
- use core::marker::PhantomData;
- use core::mem;
- use core::ptr::{self, NonNull};
- use crate::sync::Arc;
-
- /// A way of waking up a specific task.
- ///
- /// Any task executor must provide a way of signaling that a task it owns
- /// is ready to be `poll`ed again. Executors do so by implementing this trait.
- pub trait Wake: Send + Sync {
- /// Indicates that the associated task is ready to make progress and should
- /// be `poll`ed.
- ///
- /// Executors generally maintain a queue of "ready" tasks; `wake` should place
- /// the associated task onto this queue.
- fn wake(arc_self: &Arc<Self>);
-
- /// Indicates that the associated task is ready to make progress and should
- /// be `poll`ed. This function is like `wake`, but can only be called from the
- /// thread on which this `Wake` was created.
- ///
- /// Executors generally maintain a queue of "ready" tasks; `wake_local` should place
- /// the associated task onto this queue.
- #[inline]
- unsafe fn wake_local(arc_self: &Arc<Self>) {
- Self::wake(arc_self);
- }
- }
-
- #[cfg(all(target_has_atomic = "ptr", target_has_atomic = "cas"))]
- struct ArcWrapped<T>(PhantomData<T>);
-
- unsafe impl<T: Wake + 'static> UnsafeWake for ArcWrapped<T> {
- #[inline]
- unsafe fn clone_raw(&self) -> Waker {
- let me: *const ArcWrapped<T> = self;
- let arc = (*(&me as *const *const ArcWrapped<T> as *const Arc<T>)).clone();
- Waker::from(arc)
- }
-
- #[inline]
- unsafe fn drop_raw(&self) {
- let mut me: *const ArcWrapped<T> = self;
- let me = &mut me as *mut *const ArcWrapped<T> as *mut Arc<T>;
- ptr::drop_in_place(me);
- }
-
- #[inline]
- unsafe fn wake(&self) {
- let me: *const ArcWrapped<T> = self;
- T::wake(&*(&me as *const *const ArcWrapped<T> as *const Arc<T>))
- }
-
- #[inline]
- unsafe fn wake_local(&self) {
- let me: *const ArcWrapped<T> = self;
- T::wake_local(&*(&me as *const *const ArcWrapped<T> as *const Arc<T>))
- }
- }
-
- impl<T> From<Arc<T>> for Waker
- where T: Wake + 'static,
- {
- fn from(rc: Arc<T>) -> Self {
- unsafe {
- let ptr = mem::transmute::<Arc<T>, NonNull<ArcWrapped<T>>>(rc);
- Waker::new(ptr)
- }
- }
- }
-
- /// Creates a `LocalWaker` from a local `wake`.
- ///
- /// This function requires that `wake` is "local" (created on the current thread).
- /// The resulting `LocalWaker` will call `wake.wake_local()` when awoken, and
- /// will call `wake.wake()` if awoken after being converted to a `Waker`.
- #[inline]
- pub unsafe fn local_waker<W: Wake + 'static>(wake: Arc<W>) -> LocalWaker {
- let ptr = mem::transmute::<Arc<W>, NonNull<ArcWrapped<W>>>(wake);
- LocalWaker::new(ptr)
- }
-
- struct NonLocalAsLocal<T>(ArcWrapped<T>);
-
- unsafe impl<T: Wake + 'static> UnsafeWake for NonLocalAsLocal<T> {
- #[inline]
- unsafe fn clone_raw(&self) -> Waker {
- self.0.clone_raw()
- }
-
- #[inline]
- unsafe fn drop_raw(&self) {
- self.0.drop_raw()
- }
-
- #[inline]
- unsafe fn wake(&self) {
- self.0.wake()
- }
-
- #[inline]
- unsafe fn wake_local(&self) {
- // Since we're nonlocal, we can't call wake_local
- self.0.wake()
- }
- }
-
- /// Creates a `LocalWaker` from a non-local `wake`.
- ///
- /// This function is similar to `local_waker`, but does not require that `wake`
- /// is local to the current thread. The resulting `LocalWaker` will call
- /// `wake.wake()` when awoken.
- #[inline]
- pub fn local_waker_from_nonlocal<W: Wake + 'static>(wake: Arc<W>) -> LocalWaker {
- unsafe {
- let ptr = mem::transmute::<Arc<W>, NonNull<NonLocalAsLocal<W>>>(wake);
- LocalWaker::new(ptr)
- }
- }
-}
use std::alloc::{Global, Alloc, Layout, System};
-/// https://github.com/rust-lang/rust/issues/45955
+/// Issue #45955.
#[test]
fn alloc_system_overaligned_request() {
check_overalign_requests(System)
#![feature(pattern)]
#![feature(repeat_generic_slice)]
#![feature(slice_sort_by_cached_key)]
-#![feature(str_escape)]
#![feature(try_reserve)]
#![feature(unboxed_closures)]
#![feature(vecdeque_rotate)]
#[test]
fn test_escape_unicode() {
- assert_eq!("abc".escape_unicode(), "\\u{61}\\u{62}\\u{63}");
- assert_eq!("a c".escape_unicode(), "\\u{61}\\u{20}\\u{63}");
- assert_eq!("\r\n\t".escape_unicode(), "\\u{d}\\u{a}\\u{9}");
- assert_eq!("'\"\\".escape_unicode(), "\\u{27}\\u{22}\\u{5c}");
- assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode(), "\\u{0}\\u{1}\\u{fe}\\u{ff}");
- assert_eq!("\u{100}\u{ffff}".escape_unicode(), "\\u{100}\\u{ffff}");
- assert_eq!("\u{10000}\u{10ffff}".escape_unicode(), "\\u{10000}\\u{10ffff}");
- assert_eq!("ab\u{fb00}".escape_unicode(), "\\u{61}\\u{62}\\u{fb00}");
- assert_eq!("\u{1d4ea}\r".escape_unicode(), "\\u{1d4ea}\\u{d}");
+ assert_eq!("abc".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{63}");
+ assert_eq!("a c".escape_unicode().to_string(), "\\u{61}\\u{20}\\u{63}");
+ assert_eq!("\r\n\t".escape_unicode().to_string(), "\\u{d}\\u{a}\\u{9}");
+ assert_eq!("'\"\\".escape_unicode().to_string(), "\\u{27}\\u{22}\\u{5c}");
+ assert_eq!("\x00\x01\u{fe}\u{ff}".escape_unicode().to_string(), "\\u{0}\\u{1}\\u{fe}\\u{ff}");
+ assert_eq!("\u{100}\u{ffff}".escape_unicode().to_string(), "\\u{100}\\u{ffff}");
+ assert_eq!("\u{10000}\u{10ffff}".escape_unicode().to_string(), "\\u{10000}\\u{10ffff}");
+ assert_eq!("ab\u{fb00}".escape_unicode().to_string(), "\\u{61}\\u{62}\\u{fb00}");
+ assert_eq!("\u{1d4ea}\r".escape_unicode().to_string(), "\\u{1d4ea}\\u{d}");
}
#[test]
// they are escaped. However, when the character is unescaped (e.g., for
// printable characters), only a single backslash appears (as the character
// itself appears in the debug string).
- assert_eq!("abc".escape_debug(), "abc");
- assert_eq!("a c".escape_debug(), "a c");
- assert_eq!("éèê".escape_debug(), "éèê");
- assert_eq!("\r\n\t".escape_debug(), "\\r\\n\\t");
- assert_eq!("'\"\\".escape_debug(), "\\'\\\"\\\\");
- assert_eq!("\u{7f}\u{ff}".escape_debug(), "\\u{7f}\u{ff}");
- assert_eq!("\u{100}\u{ffff}".escape_debug(), "\u{100}\\u{ffff}");
- assert_eq!("\u{10000}\u{10ffff}".escape_debug(), "\u{10000}\\u{10ffff}");
- assert_eq!("ab\u{200b}".escape_debug(), "ab\\u{200b}");
- assert_eq!("\u{10d4ea}\r".escape_debug(), "\\u{10d4ea}\\r");
- assert_eq!("\u{301}a\u{301}bé\u{e000}".escape_debug(), "\\u{301}a\u{301}bé\\u{e000}");
+ assert_eq!("abc".escape_debug().to_string(), "abc");
+ assert_eq!("a c".escape_debug().to_string(), "a c");
+ assert_eq!("éèê".escape_debug().to_string(), "éèê");
+ assert_eq!("\r\n\t".escape_debug().to_string(), "\\r\\n\\t");
+ assert_eq!("'\"\\".escape_debug().to_string(), "\\'\\\"\\\\");
+ assert_eq!("\u{7f}\u{ff}".escape_debug().to_string(), "\\u{7f}\u{ff}");
+ assert_eq!("\u{100}\u{ffff}".escape_debug().to_string(), "\u{100}\\u{ffff}");
+ assert_eq!("\u{10000}\u{10ffff}".escape_debug().to_string(), "\u{10000}\\u{10ffff}");
+ assert_eq!("ab\u{200b}".escape_debug().to_string(), "ab\\u{200b}");
+ assert_eq!("\u{10d4ea}\r".escape_debug().to_string(), "\\u{10d4ea}\\r");
+ assert_eq!("\u{301}a\u{301}bé\u{e000}".escape_debug().to_string(),
+ "\\u{301}a\u{301}bé\\u{e000}");
}
#[test]
fn test_escape_default() {
- assert_eq!("abc".escape_default(), "abc");
- assert_eq!("a c".escape_default(), "a c");
- assert_eq!("éèê".escape_default(), "\\u{e9}\\u{e8}\\u{ea}");
- assert_eq!("\r\n\t".escape_default(), "\\r\\n\\t");
- assert_eq!("'\"\\".escape_default(), "\\'\\\"\\\\");
- assert_eq!("\u{7f}\u{ff}".escape_default(), "\\u{7f}\\u{ff}");
- assert_eq!("\u{100}\u{ffff}".escape_default(), "\\u{100}\\u{ffff}");
- assert_eq!("\u{10000}\u{10ffff}".escape_default(), "\\u{10000}\\u{10ffff}");
- assert_eq!("ab\u{200b}".escape_default(), "ab\\u{200b}");
- assert_eq!("\u{10d4ea}\r".escape_default(), "\\u{10d4ea}\\r");
+ assert_eq!("abc".escape_default().to_string(), "abc");
+ assert_eq!("a c".escape_default().to_string(), "a c");
+ assert_eq!("éèê".escape_default().to_string(), "\\u{e9}\\u{e8}\\u{ea}");
+ assert_eq!("\r\n\t".escape_default().to_string(), "\\r\\n\\t");
+ assert_eq!("'\"\\".escape_default().to_string(), "\\'\\\"\\\\");
+ assert_eq!("\u{7f}\u{ff}".escape_default().to_string(), "\\u{7f}\\u{ff}");
+ assert_eq!("\u{100}\u{ffff}".escape_default().to_string(), "\\u{100}\\u{ffff}");
+ assert_eq!("\u{10000}\u{10ffff}".escape_default().to_string(), "\\u{10000}\\u{10ffff}");
+ assert_eq!("ab\u{200b}".escape_default().to_string(), "ab\\u{200b}");
+ assert_eq!("\u{10d4ea}\r".escape_default().to_string(), "\\u{10d4ea}\\r");
}
#[test]
/// Does nothing if the capacity is already sufficient.
///
/// Note that the allocator may give the collection more space than it
- /// requests. Therefore capacity can not be relied upon to be precisely
+ /// requests. Therefore, capacity can not be relied upon to be precisely
/// minimal. Prefer `reserve` if future insertions are expected.
///
/// # Panics
/// Does nothing if the capacity is already sufficient.
///
/// Note that the allocator may give the collection more space than it
- /// requests. Therefore capacity can not be relied upon to be precisely
+ /// requests. Therefore, capacity can not be relied upon to be precisely
/// minimal. Prefer `reserve` if future insertions are expected.
///
/// # Errors
/// Forces the length of the vector to `new_len`.
///
/// This is a low-level operation that maintains none of the normal
- /// invariants of the type. Normally changing the length of a vector
+ /// invariants of the type. Normally changing the length of a vector
/// is done using one of the safe operations instead, such as
/// [`truncate`], [`resize`], [`extend`], or [`clear`].
///
/// The range from `self.vec.len` to `self.tail_start` contains elements
/// that have been moved out.
/// Fill that range as much as possible with new elements from the `replace_with` iterator.
- /// Return whether we filled the entire range. (`replace_with.next()` didn’t return `None`.)
+ /// Returns `true` if we filled the entire range. (`replace_with.next()` didn’t return `None`.)
unsafe fn fill<I: Iterator<Item=T>>(&mut self, replace_with: &mut I) -> bool {
let vec = self.vec.as_mut();
let range_start = vec.len;
true
}
- /// Make room for inserting more elements before the tail.
+ /// Makes room for inserting more elements before the tail.
unsafe fn move_tail(&mut self, extra_capacity: usize) {
let vec = self.vec.as_mut();
let used_capacity = self.tail_start + self.tail_len;
/// The `GlobalAlloc` trait is an `unsafe` trait for a number of reasons, and
/// implementors must ensure that they adhere to these contracts:
///
-/// * It's undefined behavior if global allocators unwind. This restriction may
+/// * It's undefined behavior if global allocators unwind. This restriction may
/// be lifted in the future, but currently a panic from any of these
/// functions may lead to memory unsafety.
///
//!
//! Consider a situation where we want to log out a value passed to a function.
//! We know the value we're working on implements Debug, but we don't know its
-//! concrete type. We want to give special treatment to certain types: in this
+//! concrete type. We want to give special treatment to certain types: in this
//! case printing out the length of String values prior to their value.
//! We don't know the concrete type of our value at compile time, so we need to
//! use runtime reflection instead.
//! fn log<T: Any + Debug>(value: &T) {
//! let value_any = value as &dyn Any;
//!
-//! // try to convert our value to a String. If successful, we want to
-//! // output the String's length as well as its value. If not, it's a
+//! // Try to convert our value to a `String`. If successful, we want to
+//! // output the String`'s length as well as its value. If not, it's a
//! // different type: just print it out unadorned.
//! match value_any.downcast_ref::<String>() {
//! Some(as_string) => {
//!
//! This is simply a special - but common - case of the previous: hiding mutability for operations
//! that appear to be immutable. The `clone` method is expected to not change the source value, and
-//! is declared to take `&self`, not `&mut self`. Therefore any mutation that happens in the
+//! is declared to take `&self`, not `&mut self`. Therefore, any mutation that happens in the
//! `clone` method must use cell types. For example, `Rc<T>` maintains its reference counts within a
//! `Cell<T>`.
//!
/// The `RefCell` is already immutably borrowed, so this cannot fail.
///
/// This is an associated function that needs to be used as
- /// `Ref::clone(...)`. A `Clone` implementation or a method would interfere
+ /// `Ref::clone(...)`. A `Clone` implementation or a method would interfere
/// with the widespread use of `r.borrow().clone()` to clone the contents of
/// a `RefCell`.
#[stable(feature = "cell_extras", since = "1.15.0")]
}
}
- /// Make a new `Ref` for a component of the borrowed data.
+ /// Makes a new `Ref` for a component of the borrowed data.
///
/// The `RefCell` is already immutably borrowed, so this cannot fail.
///
}
}
- /// Split a `Ref` into multiple `Ref`s for different components of the
+ /// Splits a `Ref` into multiple `Ref`s for different components of the
/// borrowed data.
///
/// The `RefCell` is already immutably borrowed, so this cannot fail.
}
impl<'b, T: ?Sized> RefMut<'b, T> {
- /// Make a new `RefMut` for a component of the borrowed data, e.g., an enum
+ /// Makes a new `RefMut` for a component of the borrowed data, e.g., an enum
/// variant.
///
/// The `RefCell` is already mutably borrowed, so this cannot fail.
///
/// This is an associated function that needs to be used as
- /// `RefMut::map(...)`. A method would interfere with methods of the same
+ /// `RefMut::map(...)`. A method would interfere with methods of the same
/// name on the contents of a `RefCell` used through `Deref`.
///
/// # Examples
}
}
- /// Split a `RefMut` into multiple `RefMut`s for different components of the
+ /// Splits a `RefMut` into multiple `RefMut`s for different components of the
/// borrowed data.
///
/// The underlying `RefCell` will remain mutably borrowed until both
/// co-exist with it. A `&mut T` must always be unique.
///
/// Note that while mutating or mutably aliasing the contents of an `&UnsafeCell<T>` is
-/// okay (provided you enforce the invariants some other way), it is still undefined behavior
+/// ok (provided you enforce the invariants some other way), it is still undefined behavior
/// to have multiple `&mut UnsafeCell<T>` aliases.
///
/// # Examples
code: u16,
}
-/// Create an iterator over the UTF-16 encoded code points in `iter`,
+/// Creates an iterator over the UTF-16 encoded code points in `iter`,
/// returning unpaired surrogates as `Err`s.
///
/// # Examples
/// An extended version of `escape_debug` that optionally permits escaping
/// Extended Grapheme codepoints. This allows us to format characters like
/// nonspacing marks better when they're at the start of a string.
- #[doc(hidden)]
- #[unstable(feature = "str_internals", issue = "0")]
#[inline]
- pub fn escape_debug_ext(self, escape_grapheme_extended: bool) -> EscapeDebug {
+ pub(crate) fn escape_debug_ext(self, escape_grapheme_extended: bool) -> EscapeDebug {
let init_state = match self {
'\t' => EscapeDefaultState::Backslash('t'),
'\r' => EscapeDefaultState::Backslash('r'),
}
}
- /// Returns true if this `char` is an alphabetic code point, and false if not.
+ /// Returns `true` if this `char` is an alphabetic code point, and false if not.
///
/// # Examples
///
}
}
- /// Returns true if this `char` satisfies the 'XID_Start' Unicode property, and false
+ /// Returns `true` if this `char` satisfies the 'XID_Start' Unicode property, and false
/// otherwise.
///
/// 'XID_Start' is a Unicode Derived Property specified in
derived_property::XID_Start(self)
}
- /// Returns true if this `char` satisfies the 'XID_Continue' Unicode property, and false
+ /// Returns `true` if this `char` satisfies the 'XID_Continue' Unicode property, and false
/// otherwise.
///
/// 'XID_Continue' is a Unicode Derived Property specified in
derived_property::XID_Continue(self)
}
- /// Returns true if this `char` is lowercase, and false otherwise.
+ /// Returns `true` if this `char` is lowercase.
///
/// 'Lowercase' is defined according to the terms of the Unicode Derived Core
/// Property `Lowercase`.
}
}
- /// Returns true if this `char` is uppercase, and false otherwise.
+ /// Returns `true` if this `char` is uppercase.
///
/// 'Uppercase' is defined according to the terms of the Unicode Derived Core
/// Property `Uppercase`.
}
}
- /// Returns true if this `char` is whitespace, and false otherwise.
+ /// Returns `true` if this `char` is whitespace.
///
/// 'Whitespace' is defined according to the terms of the Unicode Derived Core
/// Property `White_Space`.
}
}
- /// Returns true if this `char` is alphanumeric, and false otherwise.
+ /// Returns `true` if this `char` is alphanumeric.
///
/// 'Alphanumeric'-ness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No' and the Derived Core Property 'Alphabetic'.
self.is_alphabetic() || self.is_numeric()
}
- /// Returns true if this `char` is a control code point, and false otherwise.
+ /// Returns `true` if this `char` is a control code point.
///
/// 'Control code point' is defined in terms of the Unicode General
/// Category `Cc`.
general_category::Cc(self)
}
- /// Returns true if this `char` is an extended grapheme character, and false otherwise.
+ /// Returns `true` if this `char` is an extended grapheme character.
///
/// 'Extended grapheme character' is defined in terms of the Unicode Shaping and Rendering
/// Category `Grapheme_Extend`.
derived_property::Grapheme_Extend(self)
}
- /// Returns true if this `char` is numeric, and false otherwise.
+ /// Returns `true` if this `char` is numeric.
///
/// 'Numeric'-ness is defined in terms of the Unicode General Categories
/// 'Nd', 'Nl', 'No'.
/// relations](http://en.wikipedia.org/wiki/Partial_equivalence_relation).
///
/// This trait allows for partial equality, for types that do not have a full
-/// equivalence relation. For example, in floating point numbers `NaN != NaN`,
+/// equivalence relation. For example, in floating point numbers `NaN != NaN`,
/// so floating point types implement `PartialEq` but not `Eq`.
///
/// Formally, the equality must be (for all `a`, `b` and `c`):
/// - Use `Borrow` when the goal is related to writing code that is agnostic to
/// the type of borrow and whether it is a reference or value
///
-/// See [the book][book] for a more detailed comparison.
-///
-/// [book]: ../../book/first-edition/borrow-and-asref.html
/// [`Borrow`]: ../../std/borrow/trait.Borrow.html
///
/// **Note: this trait must not fail**. If the conversion can fail, use a
///
/// There is one exception to implementing `Into`, and it's kind of esoteric.
/// If the destination type is not part of the current crate, and it uses a
-/// generic variable, then you can't implement `From` directly. For example,
+/// generic variable, then you can't implement `From` directly. For example,
/// take this crate:
///
/// ```compile_fail
/// [`String`]: ../../std/string/struct.String.html
/// [`Into<U>`]: trait.Into.html
/// [`from`]: trait.From.html#tymethod.from
-/// [book]: ../../book/first-edition/error-handling.html
+/// [book]: ../../book/ch09-00-error-handling.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait From<T>: Sized {
/// Performs the conversion.
///
/// ## How can I implement `Default`?
///
-/// Provide an implementation for the `default()` method that returns the value of
+/// Provides an implementation for the `default()` method that returns the value of
/// your type that should be the default:
///
/// ```
va_arg(self)
}
- /// Copy the `va_list` at the current location.
+ /// Copies the `va_list` at the current location.
#[unstable(feature = "c_variadic",
reason = "the `c_variadic` feature has not been properly tested on \
all supported platforms",
/// `va_copy`.
fn va_end(ap: &mut VaList);
- /// Copy the current location of arglist `src` to the arglist `dst`.
+ /// Copies the current location of arglist `src` to the arglist `dst`.
#[cfg(any(all(not(target_arch = "aarch64"), not(target_arch = "powerpc"),
not(target_arch = "x86_64")),
windows))]
/// implementations, such as [`debug_struct`][debug_struct].
///
/// `Debug` implementations using either `derive` or the debug builder API
-/// on [`Formatter`] support pretty printing using the alternate flag: `{:#?}`.
+/// on [`Formatter`] support pretty-printing using the alternate flag: `{:#?}`.
///
/// [debug_struct]: ../../std/fmt/struct.Formatter.html#method.debug_struct
/// [`Formatter`]: ../../std/fmt/struct.Formatter.html
///
-/// Pretty printing with `#?`:
+/// Pretty-printing with `#?`:
///
/// ```
/// #[derive(Debug)]
Ok(())
}
+/// Padding after the end of something. Returned by `Formatter::padding`.
+#[must_use = "don't forget to write the post padding"]
+struct PostPadding {
+ fill: char,
+ padding: usize,
+}
+
+impl PostPadding {
+ fn new(fill: char, padding: usize) -> PostPadding {
+ PostPadding { fill, padding }
+ }
+
+ /// Write this post padding.
+ fn write(self, buf: &mut dyn Write) -> Result {
+ for _ in 0..self.padding {
+ buf.write_char(self.fill)?;
+ }
+ Ok(())
+ }
+}
+
impl<'a> Formatter<'a> {
fn wrap_buf<'b, 'c, F>(&'b mut self, wrap: F) -> Formatter<'c>
where 'b: 'c, F: FnOnce(&'b mut (dyn Write+'b)) -> &'c mut (dyn Write+'c)
sign = Some('+'); width += 1;
}
- let prefixed = self.alternate();
- if prefixed {
+ let prefix = if self.alternate() {
width += prefix.chars().count();
- }
+ Some(prefix)
+ } else {
+ None
+ };
// Writes the sign if it exists, and then the prefix if it was requested
- let write_prefix = |f: &mut Formatter| {
+ #[inline(never)]
+ fn write_prefix(f: &mut Formatter, sign: Option<char>, prefix: Option<&str>) -> Result {
if let Some(c) = sign {
f.buf.write_char(c)?;
}
- if prefixed { f.buf.write_str(prefix) }
- else { Ok(()) }
- };
+ if let Some(prefix) = prefix {
+ f.buf.write_str(prefix)
+ } else {
+ Ok(())
+ }
+ }
// The `width` field is more of a `min-width` parameter at this point.
match self.width {
// If there's no minimum length requirements then we can just
// write the bytes.
None => {
- write_prefix(self)?; self.buf.write_str(buf)
+ write_prefix(self, sign, prefix)?;
+ self.buf.write_str(buf)
}
// Check if we're over the minimum width, if so then we can also
// just write the bytes.
Some(min) if width >= min => {
- write_prefix(self)?; self.buf.write_str(buf)
+ write_prefix(self, sign, prefix)?;
+ self.buf.write_str(buf)
}
// The sign and prefix goes before the padding if the fill character
// is zero
Some(min) if self.sign_aware_zero_pad() => {
self.fill = '0';
self.align = rt::v1::Alignment::Right;
- write_prefix(self)?;
- self.with_padding(min - width, rt::v1::Alignment::Right, |f| {
- f.buf.write_str(buf)
- })
+ write_prefix(self, sign, prefix)?;
+ let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
+ self.buf.write_str(buf)?;
+ post_padding.write(self.buf)
}
// Otherwise, the sign and prefix goes after the padding
Some(min) => {
- self.with_padding(min - width, rt::v1::Alignment::Right, |f| {
- write_prefix(f)?; f.buf.write_str(buf)
- })
+ let post_padding = self.padding(min - width, rt::v1::Alignment::Right)?;
+ write_prefix(self, sign, prefix)?;
+ self.buf.write_str(buf)?;
+ post_padding.write(self.buf)
}
}
}
// up the minimum width with the specified string + some alignment.
Some(width) => {
let align = rt::v1::Alignment::Left;
- self.with_padding(width - s.chars().count(), align, |me| {
- me.buf.write_str(s)
- })
+ let post_padding = self.padding(width - s.chars().count(), align)?;
+ self.buf.write_str(s)?;
+ post_padding.write(self.buf)
}
}
}
- /// Runs a callback, emitting the correct padding either before or
- /// afterwards depending on whether right or left alignment is requested.
- fn with_padding<F>(&mut self, padding: usize, default: rt::v1::Alignment,
- f: F) -> Result
- where F: FnOnce(&mut Formatter) -> Result,
- {
+ /// Write the pre-padding and return the unwritten post-padding. Callers are
+ /// responsible for ensuring post-padding is written after the thing that is
+ /// being padded.
+ fn padding(
+ &mut self,
+ padding: usize,
+ default: rt::v1::Alignment
+ ) -> result::Result<PostPadding, Error> {
let align = match self.align {
rt::v1::Alignment::Unknown => default,
_ => self.align
rt::v1::Alignment::Center => (padding / 2, (padding + 1) / 2),
};
- let mut fill = [0; 4];
- let fill = self.fill.encode_utf8(&mut fill);
-
for _ in 0..pre_pad {
- self.buf.write_str(fill)?;
- }
-
- f(self)?;
-
- for _ in 0..post_pad {
- self.buf.write_str(fill)?;
+ self.buf.write_char(self.fill)?;
}
- Ok(())
+ Ok(PostPadding::new(self.fill, post_pad))
}
/// Takes the formatted parts and applies the padding.
let ret = if width <= len { // no padding
self.write_formatted_parts(&formatted)
} else {
- self.with_padding(width - len, align, |f| {
- f.write_formatted_parts(&formatted)
- })
+ let post_padding = self.padding(width - len, align)?;
+ self.write_formatted_parts(&formatted)?;
+ post_padding.write(self.buf)
};
self.fill = old_fill;
self.align = old_align;
integer! { i64, u64 }
integer! { i128, u128 }
-const DEC_DIGITS_LUT: &'static[u8] =
+
+static DEC_DIGITS_LUT: &[u8; 200] =
b"0001020304050607080910111213141516171819\
2021222324252627282930313233343536373839\
4041424344454647484950515253545556575859\
8081828384858687888990919293949596979899";
macro_rules! impl_Display {
- ($($t:ident),*: $conv_fn:ident) => ($(
- #[stable(feature = "rust1", since = "1.0.0")]
- impl fmt::Display for $t {
- #[allow(unused_comparisons)]
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- let is_nonnegative = *self >= 0;
- let mut n = if is_nonnegative {
- self.$conv_fn()
- } else {
- // convert the negative num to positive by summing 1 to it's 2 complement
- (!self.$conv_fn()).wrapping_add(1)
- };
+ ($($t:ident),* as $u:ident via $conv_fn:ident named $name:ident) => {
+ fn $name(mut n: $u, is_nonnegative: bool, f: &mut fmt::Formatter) -> fmt::Result {
let mut buf = uninitialized_array![u8; 39];
let mut curr = buf.len() as isize;
let buf_ptr = MaybeUninit::first_ptr_mut(&mut buf);
unsafe {
// need at least 16 bits for the 4-characters-at-a-time to work.
- if ::mem::size_of::<$t>() >= 2 {
- // eagerly decode 4 characters at a time
- while n >= 10000 {
- let rem = (n % 10000) as isize;
- n /= 10000;
-
- let d1 = (rem / 100) << 1;
- let d2 = (rem % 100) << 1;
- curr -= 4;
- ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
- ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);
- }
+ assert!(::mem::size_of::<$u>() >= 2);
+
+ // eagerly decode 4 characters at a time
+ while n >= 10000 {
+ let rem = (n % 10000) as isize;
+ n /= 10000;
+
+ let d1 = (rem / 100) << 1;
+ let d2 = (rem % 100) << 1;
+ curr -= 4;
+ ptr::copy_nonoverlapping(lut_ptr.offset(d1), buf_ptr.offset(curr), 2);
+ ptr::copy_nonoverlapping(lut_ptr.offset(d2), buf_ptr.offset(curr + 2), 2);
}
// if we reach here numbers are <= 9999, so at most 4 chars long
};
f.pad_integral(is_nonnegative, "", buf_slice)
}
- })*);
+
+ $(
+ #[stable(feature = "rust1", since = "1.0.0")]
+ impl fmt::Display for $t {
+ #[allow(unused_comparisons)]
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ let is_nonnegative = *self >= 0;
+ let n = if is_nonnegative {
+ self.$conv_fn()
+ } else {
+ // convert the negative num to positive by summing 1 to it's 2 complement
+ (!self.$conv_fn()).wrapping_add(1)
+ };
+ $name(n, is_nonnegative, f)
+ }
+ })*
+ };
+}
+
+// Include wasm32 in here since it doesn't reflect the native pointer size, and
+// often cares strongly about getting a smaller code size.
+#[cfg(any(target_pointer_width = "64", target_arch = "wasm32"))]
+mod imp {
+ use super::*;
+ impl_Display!(
+ i8, u8, i16, u16, i32, u32, i64, u64, usize, isize
+ as u64 via to_u64 named fmt_u64
+ );
+}
+
+#[cfg(not(any(target_pointer_width = "64", target_arch = "wasm32")))]
+mod imp {
+ use super::*;
+ impl_Display!(i8, u8, i16, u16, i32, u32, isize, usize as u32 via to_u32 named fmt_u32);
+ impl_Display!(i64, u64 as u64 via to_u64 named fmt_u64);
}
-impl_Display!(i8, u8, i16, u16, i32, u32: to_u32);
-impl_Display!(i64, u64: to_u64);
-impl_Display!(i128, u128: to_u128);
-#[cfg(target_pointer_width = "16")]
-impl_Display!(isize, usize: to_u16);
-#[cfg(target_pointer_width = "32")]
-impl_Display!(isize, usize: to_u32);
-#[cfg(target_pointer_width = "64")]
-impl_Display!(isize, usize: to_u64);
+impl_Display!(i128, u128 as u128 via to_u128 named fmt_u128);
use marker::Unpin;
use ops;
use pin::Pin;
-use task::{Poll, LocalWaker};
+use task::{Poll, Waker};
/// A future represents an asynchronous computation.
///
/// final value. This method does not block if the value is not ready. Instead,
/// the current task is scheduled to be woken up when it's possible to make
/// further progress by `poll`ing again. The wake up is performed using
-/// `cx.waker()`, a handle for waking up the current task.
+/// the `waker` argument of the `poll()` method, which is a handle for waking
+/// up the current task.
///
/// When using a future, you generally won't call `poll` directly, but instead
/// `await!` the value.
#[must_use = "futures do nothing unless polled"]
pub trait Future {
- /// The result of the `Future`.
+ /// The type of value produced on completion.
type Output;
/// Attempt to resolve the future to a final value, registering
/// Once a future has finished, clients should not `poll` it again.
///
/// When a future is not ready yet, `poll` returns `Poll::Pending` and
- /// stores a clone of the [`LocalWaker`] to be woken once the future can
+ /// stores a clone of the [`Waker`] to be woken once the future can
/// make progress. For example, a future waiting for a socket to become
- /// readable would call `.clone()` on the [`LocalWaker`] and store it.
+ /// readable would call `.clone()` on the [`Waker`] and store it.
/// When a signal arrives elsewhere indicating that the socket is readable,
- /// `[LocalWaker::wake]` is called and the socket future's task is awoken.
+ /// `[Waker::wake]` is called and the socket future's task is awoken.
/// Once a task has been woken up, it should attempt to `poll` the future
/// again, which may or may not produce a final value.
///
/// Note that on multiple calls to `poll`, only the most recent
- /// [`LocalWaker`] passed to `poll` should be scheduled to receive a
+ /// [`Waker`] passed to `poll` should be scheduled to receive a
/// wakeup.
///
/// # Runtime characteristics
/// progress, meaning that each time the current task is woken up, it should
/// actively re-`poll` pending futures that it still has an interest in.
///
- /// The `poll` function is not called repeatedly in a tight loop-- instead,
+ /// The `poll` function is not called repeatedly in a tight loop -- instead,
/// it should only be called when the future indicates that it is ready to
/// make progress (by calling `wake()`). If you're familiar with the
/// `poll(2)` or `select(2)` syscalls on Unix it's worth noting that futures
/// typically do *not* suffer the same problems of "all wakeups must poll
/// all events"; they are more like `epoll(4)`.
///
- /// An implementation of `poll` should strive to return quickly, and must
- /// *never* block. Returning quickly prevents unnecessarily clogging up
+ /// An implementation of `poll` should strive to return quickly, and should
+ /// not block. Returning quickly prevents unnecessarily clogging up
/// threads or event loops. If it is known ahead of time that a call to
/// `poll` may end up taking awhile, the work should be offloaded to a
/// thread pool (or something similar) to ensure that `poll` can return
/// quickly.
///
- /// # [`LocalWaker`], [`Waker`] and thread-safety
- ///
- /// The `poll` function takes a [`LocalWaker`], an object which knows how to
- /// awaken the current task. [`LocalWaker`] is not `Send` nor `Sync`, so in
- /// order to make thread-safe futures the [`LocalWaker::into_waker`] method
- /// should be used to convert the [`LocalWaker`] into a thread-safe version.
- /// [`LocalWaker::wake`] implementations have the ability to be more
- /// efficient, however, so when thread safety is not necessary,
- /// [`LocalWaker`] should be preferred.
+ /// An implementation of `poll` may also never cause memory unsafety.
///
/// # Panics
///
/// Once a future has completed (returned `Ready` from `poll`),
/// then any future calls to `poll` may panic, block forever, or otherwise
- /// cause bad behavior. The `Future` trait itself provides no guarantees
- /// about the behavior of `poll` after a future has completed.
+ /// cause any kind of bad behavior expect causing memory unsafety.
+ /// The `Future` trait itself provides no guarantees about the behavior
+ /// of `poll` after a future has completed.
///
/// [`Poll::Pending`]: ../task/enum.Poll.html#variant.Pending
/// [`Poll::Ready(val)`]: ../task/enum.Poll.html#variant.Ready
- /// [`LocalWaker`]: ../task/struct.LocalWaker.html
- /// [`LocalWaker::into_waker`]: ../task/struct.LocalWaker.html#method.into_waker
- /// [`LocalWaker::wake`]: ../task/struct.LocalWaker.html#method.wake
/// [`Waker`]: ../task/struct.Waker.html
- fn poll(self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output>;
+ /// [`Waker::wake`]: ../task/struct.Waker.html#method.wake
+ fn poll(self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output>;
}
impl<'a, F: ?Sized + Future + Unpin> Future for &'a mut F {
type Output = F::Output;
- fn poll(mut self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output> {
- F::poll(Pin::new(&mut **self), lw)
+ fn poll(mut self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output> {
+ F::poll(Pin::new(&mut **self), waker)
}
}
{
type Output = <<P as ops::Deref>::Target as Future>::Output;
- fn poll(self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output> {
- Pin::get_mut(self).as_mut().poll(lw)
+ fn poll(self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output> {
+ Pin::get_mut(self).as_mut().poll(waker)
}
}
/// An implementation of SipHash 1-3.
///
/// This is currently the default hashing function used by standard library
-/// (eg. `collections::HashMap` uses it by default).
+/// (e.g., `collections::HashMap` uses it by default).
///
/// See: <https://131002.net/siphash>
#[unstable(feature = "hashmap_internals", issue = "0")]
});
}
-/// Load an integer of the desired type from a byte stream, in LE order. Uses
+/// Loads an integer of the desired type from a byte stream, in LE order. Uses
/// `copy_nonoverlapping` to let the compiler generate the most efficient way
/// to load it from a possibly unaligned address.
///
});
}
-/// Load an u64 using up to 7 bytes of a byte slice.
+/// Loads an u64 using up to 7 bytes of a byte slice.
///
/// Unsafe because: unchecked indexing at start..start+len
#[inline]
/// use std::hint::unreachable_unchecked;
///
/// // `b.saturating_add(1)` is always positive (not zero),
-/// // hence `checked_div` will never return None.
+/// // hence `checked_div` will never return `None`.
/// // Therefore, the else branch is unreachable.
/// a.checked_div(b.saturating_add(1))
/// .unwrap_or_else(|| unsafe { unreachable_unchecked() })
}
}
}
+
+/// Create a zero-size type similar to a closure type, but named.
+#[unstable(feature = "std_internals", issue = "0")]
+macro_rules! impl_fn_for_zst {
+ ($(
+ $( #[$attr: meta] )*
+ // FIXME: when libcore is in the 2018 edition, use `?` repetition in
+ // $( <$( $li : lifetime ),+> )?
+ struct $Name: ident impl$( <$( $lifetime : lifetime ),+> )* Fn =
+ |$( $arg: ident: $ArgTy: ty ),*| -> $ReturnTy: ty
+ $body: block;
+ )+) => {
+ $(
+ $( #[$attr] )*
+ struct $Name;
+
+ impl $( <$( $lifetime ),+> )* Fn<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call(&self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ $body
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )* FnMut<($( $ArgTy, )*)> for $Name {
+ #[inline]
+ extern "rust-call" fn call_mut(
+ &mut self,
+ ($( $arg, )*): ($( $ArgTy, )*)
+ ) -> $ReturnTy {
+ Fn::call(&*self, ($( $arg, )*))
+ }
+ }
+
+ impl $( <$( $lifetime ),+> )* FnOnce<($( $ArgTy, )*)> for $Name {
+ type Output = $ReturnTy;
+
+ #[inline]
+ extern "rust-call" fn call_once(self, ($( $arg, )*): ($( $ArgTy, )*)) -> $ReturnTy {
+ Fn::call(&self, ($( $arg, )*))
+ }
+ }
+ )+
+ }
+}
-//! rustc compiler intrinsics.
+//! Compiler intrinsics.
//!
-//! The corresponding definitions are in librustc_codegen_llvm/intrinsic.rs.
+//! The corresponding definitions are in `librustc_codegen_llvm/intrinsic.rs`.
//!
//! # Volatiles
//!
/// [`AtomicBool::swap`](../../std/sync/atomic/struct.AtomicBool.html#method.swap).
pub fn atomic_xchg_relaxed<T>(dst: *mut T, src: T) -> T;
- /// Add to the current value, returning the previous value.
+ /// Adds to the current value, returning the previous value.
/// The stabilized version of this intrinsic is available on the
/// `std::sync::atomic` types via the `fetch_add` method by passing
/// [`Ordering::SeqCst`](../../std/sync/atomic/enum.Ordering.html)
/// as the `order`. For example,
/// [`AtomicIsize::fetch_add`](../../std/sync/atomic/struct.AtomicIsize.html#method.fetch_add).
pub fn atomic_xadd<T>(dst: *mut T, src: T) -> T;
- /// Add to the current value, returning the previous value.
+ /// Adds to the current value, returning the previous value.
/// The stabilized version of this intrinsic is available on the
/// `std::sync::atomic` types via the `fetch_add` method by passing
/// [`Ordering::Acquire`](../../std/sync/atomic/enum.Ordering.html)
/// as the `order`. For example,
/// [`AtomicIsize::fetch_add`](../../std/sync/atomic/struct.AtomicIsize.html#method.fetch_add).
pub fn atomic_xadd_acq<T>(dst: *mut T, src: T) -> T;
- /// Add to the current value, returning the previous value.
+ /// Adds to the current value, returning the previous value.
/// The stabilized version of this intrinsic is available on the
/// `std::sync::atomic` types via the `fetch_add` method by passing
/// [`Ordering::Release`](../../std/sync/atomic/enum.Ordering.html)
/// as the `order`. For example,
/// [`AtomicIsize::fetch_add`](../../std/sync/atomic/struct.AtomicIsize.html#method.fetch_add).
pub fn atomic_xadd_rel<T>(dst: *mut T, src: T) -> T;
- /// Add to the current value, returning the previous value.
+ /// Adds to the current value, returning the previous value.
/// The stabilized version of this intrinsic is available on the
/// `std::sync::atomic` types via the `fetch_add` method by passing
/// [`Ordering::AcqRel`](../../std/sync/atomic/enum.Ordering.html)
/// as the `order`. For example,
/// [`AtomicIsize::fetch_add`](../../std/sync/atomic/struct.AtomicIsize.html#method.fetch_add).
pub fn atomic_xadd_acqrel<T>(dst: *mut T, src: T) -> T;
- /// Add to the current value, returning the previous value.
+ /// Adds to the current value, returning the previous value.
/// The stabilized version of this intrinsic is available on the
/// `std::sync::atomic` types via the `fetch_add` method by passing
/// [`Ordering::Relaxed`](../../std/sync/atomic/enum.Ordering.html)
pub fn atomic_umax_relaxed<T>(dst: *mut T, src: T) -> T;
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
- /// if supported; otherwise, it is a noop.
+ /// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// ranging from (0) - no locality, to (3) - extremely local keep in cache
pub fn prefetch_read_data<T>(data: *const T, locality: i32);
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
- /// if supported; otherwise, it is a noop.
+ /// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// ranging from (0) - no locality, to (3) - extremely local keep in cache
pub fn prefetch_write_data<T>(data: *const T, locality: i32);
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
- /// if supported; otherwise, it is a noop.
+ /// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// ranging from (0) - no locality, to (3) - extremely local keep in cache
pub fn prefetch_read_instruction<T>(data: *const T, locality: i32);
/// The `prefetch` intrinsic is a hint to the code generator to insert a prefetch instruction
- /// if supported; otherwise, it is a noop.
+ /// if supported; otherwise, it is a no-op.
/// Prefetches have no effect on the behavior of the program but can change its performance
/// characteristics.
///
/// Creates a value initialized to zero.
///
/// `init` is unsafe because it returns a zeroed-out datum,
- /// which is unsafe unless T is `Copy`. Also, even if T is
+ /// which is unsafe unless `T` is `Copy`. Also, even if T is
/// `Copy`, an all-zero value may not correspond to any legitimate
/// state for the type in question.
pub fn init<T>() -> T;
///
/// // The no-copy, unsafe way, still using transmute, but not UB.
/// // This is equivalent to the original, but safer, and reuses the
- /// // same Vec internals. Therefore the new inner type must have the
+ /// // same `Vec` internals. Therefore, the new inner type must have the
/// // exact same size, and the same alignment, as the old type.
/// // The same caveats exist for this method as transmute, for
/// // the original inner type (`&i32`) to the converted inner type
/// ```
/// use std::{slice, mem};
///
- /// // There are multiple ways to do this; and there are multiple problems
- /// // with the following, transmute, way.
+ /// // There are multiple ways to do this, and there are multiple problems
+ /// // with the following (transmute) way.
/// fn split_at_mut_transmute<T>(slice: &mut [T], mid: usize)
/// -> (&mut [T], &mut [T]) {
/// let len = slice.len();
/// beginning at `dst` with the same size.
///
/// Like [`read`], `copy_nonoverlapping` creates a bitwise copy of `T`, regardless of
- /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using *both* the values
+ /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using *both* the values
/// in the region beginning at `*src` and the region beginning at `*dst` can
/// [violate memory safety][read-ownership].
///
/// [`copy_nonoverlapping`] can be used instead.
///
/// `copy` is semantically equivalent to C's [`memmove`], but with the argument
- /// order swapped. Copying takes place as if the bytes were copied from `src`
+ /// order swapped. Copying takes place as if the bytes were copied from `src`
/// to a temporary array and then copied from the array to `dst`.
///
/// [`copy_nonoverlapping`]: ./fn.copy_nonoverlapping.html
/// * Both `src` and `dst` must be properly aligned.
///
/// Like [`read`], `copy` creates a bitwise copy of `T`, regardless of
- /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the values
+ /// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the values
/// in the region beginning at `*src` and the region beginning at `*dst` can
/// [violate memory safety][read-ownership].
///
/// unless size is equal to zero.
pub fn volatile_set_memory<T>(dst: *mut T, val: u8, count: usize);
- /// Perform a volatile load from the `src` pointer.
+ /// Performs a volatile load from the `src` pointer.
/// The stabilized version of this intrinsic is
/// [`std::ptr::read_volatile`](../../std/ptr/fn.read_volatile.html).
pub fn volatile_load<T>(src: *const T) -> T;
- /// Perform a volatile store to the `dst` pointer.
+ /// Performs a volatile store to the `dst` pointer.
/// The stabilized version of this intrinsic is
/// [`std::ptr::write_volatile`](../../std/ptr/fn.write_volatile.html).
pub fn volatile_store<T>(dst: *mut T, val: T);
- /// Perform a volatile load from the `src` pointer
+ /// Performs a volatile load from the `src` pointer
/// The pointer is not required to be aligned.
pub fn unaligned_volatile_load<T>(src: *const T) -> T;
- /// Perform a volatile store to the `dst` pointer.
+ /// Performs a volatile store to the `dst` pointer.
/// The pointer is not required to be aligned.
pub fn unaligned_volatile_store<T>(dst: *mut T, val: T);
/// without overflow.
fn steps_between(start: &Self, end: &Self) -> Option<usize>;
- /// Replaces this step with `1`, returning itself
+ /// Replaces this step with `1`, returning itself.
fn replace_one(&mut self) -> Self;
- /// Replaces this step with `0`, returning itself
+ /// Replaces this step with `0`, returning itself.
fn replace_zero(&mut self) -> Self;
- /// Adds one to this step, returning the result
+ /// Adds one to this step, returning the result.
fn add_one(&self) -> Self;
- /// Subtracts one to this step, returning the result
+ /// Subtracts one to this step, returning the result.
fn sub_one(&self) -> Self;
- /// Add an usize, returning None on overflow
+ /// Adds a `usize`, returning `None` on overflow.
fn add_usize(&self, n: usize) -> Option<Self>;
}
lower
}
- /// Returns whether the iterator is empty.
+ /// Returns `true` if the iterator is empty.
///
/// This method has a default implementation using `self.len()`, so you
/// don't need to implement it yourself.
/// // ... and then None once it's over.
/// assert_eq!(None, iter.next());
///
- /// // More calls may or may not return None. Here, they always will.
+ /// // More calls may or may not return `None`. Here, they always will.
/// assert_eq!(None, iter.next());
/// assert_eq!(None, iter.next());
/// ```
/// Calls a closure on each element of an iterator.
///
/// This is equivalent to using a [`for`] loop on the iterator, although
- /// `break` and `continue` are not possible from a closure. It's generally
+ /// `break` and `continue` are not possible from a closure. It's generally
/// more idiomatic to use a `for` loop, but `for_each` may be more legible
- /// when processing items at the end of longer iterator chains. In some
+ /// when processing items at the end of longer iterator chains. In some
/// cases `for_each` may also be faster than a loop, because it will use
/// internal iteration on adaptors like `Chain`.
///
/// assert_eq!(iter.next(), Some(4));
/// assert_eq!(iter.next(), None);
///
- /// // it will always return None after the first time.
+ /// // it will always return `None` after the first time.
/// assert_eq!(iter.next(), None);
/// assert_eq!(iter.next(), None);
/// assert_eq!(iter.next(), None);
/// is propagated back to the caller immediately (short-circuiting).
///
/// The initial value is the value the accumulator will have on the first
- /// call. If applying the closure succeeded against every element of the
+ /// call. If applying the closure succeeded against every element of the
/// iterator, `try_fold()` returns the final accumulator as success.
///
/// Folding is useful whenever you have a collection of something, and want
/// do something better than the default `for` loop implementation.
///
/// In particular, try to have this call `try_fold()` on the internal parts
- /// from which this iterator is composed. If multiple calls are needed,
+ /// from which this iterator is composed. If multiple calls are needed,
/// the `?` operator may be convenient for chaining the accumulator value
/// along, but beware any invariants that need to be upheld before those
- /// early returns. This is a `&mut self` method, so iteration needs to be
+ /// early returns. This is a `&mut self` method, so iteration needs to be
/// resumable after hitting an error here.
///
/// # Examples
#![feature(simd_ffi)]
#![feature(specialization)]
#![feature(staged_api)]
+#![feature(std_internals)]
#![feature(stmt_expr_attributes)]
#![feature(unboxed_closures)]
#![feature(unsized_locals)]
// `core_arch` depends on libcore, but the contents of this module are
// set up in such a way that directly pulling it here works such that the
// crate uses the this crate as its libcore.
-#[allow(unused_macros)]
-macro_rules! test_v16 { ($item:item) => {}; }
-#[allow(unused_macros)]
-macro_rules! test_v32 { ($item:item) => {}; }
-#[allow(unused_macros)]
-macro_rules! test_v64 { ($item:item) => {}; }
-#[allow(unused_macros)]
-macro_rules! test_v128 { ($item:item) => {}; }
-#[allow(unused_macros)]
-macro_rules! test_v256 { ($item:item) => {}; }
-#[allow(unused_macros)]
-macro_rules! test_v512 { ($item:item) => {}; }
-#[allow(unused_macros)]
-macro_rules! vector_impl { ($([$f:ident, $($args:tt)*]),*) => { $($f!($($args)*);)* } }
#[path = "../stdsimd/crates/core_arch/src/mod.rs"]
#[allow(missing_docs, missing_debug_implementations, dead_code, unused_imports)]
#[unstable(feature = "stdsimd", issue = "48556")]
-/// Entry point of thread panic, for details, see std::macros
+/// Entry point of thread panic. For details, see `std::macros`.
#[macro_export]
#[cfg_attr(not(stage0), allow_internal_unstable(core_panic, __rust_unstable_column))]
#[cfg_attr(stage0, allow_internal_unstable)]
match (&$left, &$right) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
+ // The reborrows below are intentional. Without them, the stack slot for the
+ // borrow is initialized even before the values are compared, leading to a
+ // noticeable slow down.
panic!(r#"assertion failed: `(left == right)`
left: `{:?}`,
- right: `{:?}`"#, left_val, right_val)
+ right: `{:?}`"#, &*left_val, &*right_val)
}
}
}
match (&($left), &($right)) {
(left_val, right_val) => {
if !(*left_val == *right_val) {
+ // The reborrows below are intentional. Without them, the stack slot for the
+ // borrow is initialized even before the values are compared, leading to a
+ // noticeable slow down.
panic!(r#"assertion failed: `(left == right)`
left: `{:?}`,
- right: `{:?}`: {}"#, left_val, right_val,
+ right: `{:?}`: {}"#, &*left_val, &*right_val,
format_args!($($arg)+))
}
}
match (&$left, &$right) {
(left_val, right_val) => {
if *left_val == *right_val {
+ // The reborrows below are intentional. Without them, the stack slot for the
+ // borrow is initialized even before the values are compared, leading to a
+ // noticeable slow down.
panic!(r#"assertion failed: `(left != right)`
left: `{:?}`,
- right: `{:?}`"#, left_val, right_val)
+ right: `{:?}`"#, &*left_val, &*right_val)
}
}
}
match (&($left), &($right)) {
(left_val, right_val) => {
if *left_val == *right_val {
+ // The reborrows below are intentional. Without them, the stack slot for the
+ // borrow is initialized even before the values are compared, leading to a
+ // noticeable slow down.
panic!(r#"assertion failed: `(left != right)`
left: `{:?}`,
- right: `{:?}`: {}"#, left_val, right_val,
+ right: `{:?}`: {}"#, &*left_val, &*right_val,
format_args!($($arg)+))
}
}
/// * Iterators that dynamically terminate.
///
/// If the determination that the code is unreachable proves incorrect, the
-/// program immediately terminates with a [`panic!`]. The function [`unreachable_unchecked`],
+/// program immediately terminates with a [`panic!`]. The function [`unreachable_unchecked`],
/// which belongs to the [`std::hint`] module, informs the compiler to
/// optimize the code out of the release version entirely.
///
/// A standardized placeholder for marking unfinished code.
///
/// This can be useful if you are prototyping and are just looking to have your
-/// code typecheck, or if you're implementing a trait that requires multiple
+/// code type-check, or if you're implementing a trait that requires multiple
/// methods, and you're only planning on using one of them.
///
/// # Panics
/// // be made into an object
/// ```
///
-/// [trait object]: ../../book/first-edition/trait-objects.html
+/// [trait object]: ../../book/ch17-02-trait-objects.html
#[stable(feature = "rust1", since = "1.0.0")]
#[lang = "sized"]
#[rustc_on_unimplemented(
/// types. We track the Rust type using a phantom type parameter on
/// the struct `ExternalResource` which wraps a handle.
///
-/// [FFI]: ../../book/first-edition/ffi.html
+/// [FFI]: ../../book/ch19-01-unsafe-rust.html#using-extern-functions-to-call-external-code
///
/// ```
/// # #![allow(dead_code)]
/// Returns the size of the pointed-to value in bytes.
///
/// This is usually the same as `size_of::<T>()`. However, when `T` *has* no
-/// statically known size, e.g., a slice [`[T]`][slice] or a [trait object],
+/// statically-known size, e.g., a slice [`[T]`][slice] or a [trait object],
/// then `size_of_val` can be used to get the dynamically-known size.
///
/// [slice]: ../../std/primitive.slice.html
-/// [trait object]: ../../book/first-edition/trait-objects.html
+/// [trait object]: ../../book/ch17-02-trait-objects.html
///
/// # Examples
///
unsafe { intrinsics::min_align_of_val(val) }
}
-/// Returns whether dropping values of type `T` matters.
+/// Returns `true` if dropping values of type `T` matters.
///
/// This is purely an optimization hint, and may be implemented conservatively:
/// it may return `true` for types that don't actually need to be dropped.
ManuallyDrop { value }
}
- /// Extract the value from the `ManuallyDrop` container.
+ /// Extracts the value from the `ManuallyDrop` container.
///
/// This allows the value to be dropped again.
///
/// A newtype to construct uninitialized instances of `T`.
///
/// The compiler, in general, assumes that variables are properly initialized
-/// at their respective type. For example, a variable of reference type must
-/// be aligned and non-NULL. This is an invariant that must *always* be upheld,
-/// even in unsafe code. As a consequence, 0-initializing a variable of reference
+/// at their respective type. For example, a variable of reference type must
+/// be aligned and non-NULL. This is an invariant that must *always* be upheld,
+/// even in unsafe code. As a consequence, zero-initializing a variable of reference
/// type causes instantaneous undefined behavior, no matter whether that reference
/// ever gets used to access memory:
+///
/// ```rust,no_run
/// use std::mem;
///
/// let x: &i32 = unsafe { mem::zeroed() }; // undefined behavior!
/// ```
+///
/// This is exploited by the compiler for various optimizations, such as eliding
/// run-time checks and optimizing `enum` layout.
///
-/// Not initializing memory at all (instead of 0-initializing it) causes the same
+/// Not initializing memory at all (instead of zero--initializing it) causes the same
/// issue: after all, the initial value of the variable might just happen to be
/// one that violates the invariant.
///
/// `MaybeUninit` serves to enable unsafe code to deal with uninitialized data:
/// it is a signal to the compiler indicating that the data here might *not*
/// be initialized:
+///
/// ```rust
/// #![feature(maybe_uninit)]
/// use std::mem::MaybeUninit;
/// // initializing `x`!
/// let x = unsafe { x.into_initialized() };
/// ```
+///
/// The compiler then knows to not optimize this code.
#[allow(missing_debug_implementations)]
#[unstable(feature = "maybe_uninit", issue = "53491")]
MaybeUninit { value: ManuallyDrop::new(val) }
}
- /// Create a new `MaybeUninit` in an uninitialized state.
+ /// Creates a new `MaybeUninit` in an uninitialized state.
///
/// Note that dropping a `MaybeUninit` will never call `T`'s drop code.
/// It is your responsibility to make sure `T` gets dropped if it got initialized.
MaybeUninit { uninit: () }
}
- /// Create a new `MaybeUninit` in an uninitialized state, with the memory being
- /// filled with `0` bytes. It depends on `T` whether that already makes for
+ /// Creates a new `MaybeUninit` in an uninitialized state, with the memory being
+ /// filled with `0` bytes. It depends on `T` whether that already makes for
/// proper initialization. For example, `MaybeUninit<usize>::zeroed()` is initialized,
/// but `MaybeUninit<&'static i32>::zeroed()` is not because references must not
/// be null.
u
}
- /// Set the value of the `MaybeUninit`. This overwrites any previous value without dropping it.
- /// For your convenience, this also returns a mutable reference to the (now
- /// safely initialized) content of `self`.
+ /// Sets the value of the `MaybeUninit`. This overwrites any previous value without dropping it.
+ /// For your convenience, this also returns a mutable reference to the (now safely initialized)
+ /// contents of `self`.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
pub fn set(&mut self, val: T) -> &mut T {
}
}
- /// Extract the value from the `MaybeUninit` container. This is a great way
+ /// Extracts the value from the `MaybeUninit` container. This is a great way
/// to ensure that the data will get dropped, because the resulting `T` is
/// subject to the usual drop handling.
///
ManuallyDrop::into_inner(self.value)
}
- /// Deprecated alternative to `into_initialized`. Will never get stabilized.
+ /// Deprecated alternative to `into_initialized`. Will never get stabilized.
/// Exists only to transition stdsimd to `into_initialized`.
#[inline(always)]
#[allow(unused)]
self.into_initialized()
}
- /// Get a reference to the contained value.
+ /// Gets a reference to the contained value.
///
/// # Unsafety
///
&*self.value
}
- /// Get a mutable reference to the contained value.
+ /// Gets a mutable reference to the contained value.
///
/// # Unsafety
///
&mut *self.value
}
- /// Get a pointer to the contained value. Reading from this pointer or turning it
+ /// Gets a pointer to the contained value. Reading from this pointer or turning it
/// into a reference will be undefined behavior unless the `MaybeUninit` is initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
unsafe { &*self.value as *const T }
}
- /// Get a mutable pointer to the contained value. Reading from this pointer or turning it
+ /// Get sa mutable pointer to the contained value. Reading from this pointer or turning it
/// into a reference will be undefined behavior unless the `MaybeUninit` is initialized.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
unsafe { &mut *self.value as *mut T }
}
- /// Get a pointer to the first element of the array.
+ /// Gets a pointer to the first element of the array.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
pub fn first_ptr(this: &[MaybeUninit<T>]) -> *const T {
this as *const [MaybeUninit<T>] as *const T
}
- /// Get a mutable pointer to the first element of the array.
+ /// Gets a mutable pointer to the first element of the array.
#[unstable(feature = "maybe_uninit", issue = "53491")]
#[inline(always)]
pub fn first_ptr_mut(this: &mut [MaybeUninit<T>]) -> *mut T {
unsafe { asm!("fldcw $0" :: "m" (cw) :: "volatile") }
}
- /// Set the precision field of the FPU to `T` and return a `FPUControlWord`
+ /// Sets the precision field of the FPU to `T` and returns a `FPUControlWord`.
pub fn set_precision<T>() -> FPUControlWord {
let cw = 0u16;
//!
//! In addition, there are numerous helper functions that are used in the paper but not available
//! in Rust (or at least in core). Our version is additionally complicated by the need to handle
-//! overflow and underflow and the desire to handle subnormal numbers. Bellerophon and
+//! overflow and underflow and the desire to handle subnormal numbers. Bellerophon and
//! Algorithm R have trouble with overflow, subnormals, and underflow. We conservatively switch to
//! Algorithm M (with the modifications described in section 8 of the paper) well before the
//! inputs get into the critical region.
//! operations as well, if you want 0.5 ULP accuracy you need to do *everything* in full precision
//! and round *exactly once, at the end*, by considering all truncated bits at once.
//!
-//! FIXME Although some code duplication is necessary, perhaps parts of the code could be shuffled
+//! FIXME: Although some code duplication is necessary, perhaps parts of the code could be shuffled
//! around such that less code is duplicated. Large parts of the algorithms are independent of the
//! float type to output, or only needs access to a few constants, which could be passed in as
//! parameters.
/// # Return value
///
/// `Err(ParseFloatError)` if the string did not represent a valid
- /// number. Otherwise, `Ok(n)` where `n` is the floating-point
+ /// number. Otherwise, `Ok(n)` where `n` is the floating-point
/// number represented by `src`.
#[inline]
fn from_str(src: &str) -> Result<Self, ParseFloatError> {
ParseFloatError { kind: FloatErrorKind::Invalid }
}
-/// Split decimal string into sign and the rest, without inspecting or validating the rest.
+/// Splits a decimal string into sign and the rest, without inspecting or validating the rest.
fn extract_sign(s: &str) -> (Sign, &str) {
match s.as_bytes()[0] {
b'+' => (Sign::Positive, &s[1..]),
}
}
-/// Convert a decimal string into a floating point number.
+/// Converts a decimal string into a floating point number.
fn dec2flt<T: RawFloat>(s: &str) -> Result<T, ParseFloatError> {
if s.is_empty() {
return Err(pfe_empty())
Equal
}
-/// Convert an ASCII string containing only decimal digits to a `u64`.
+/// Converts an ASCII string containing only decimal digits to a `u64`.
///
/// Does not perform checks for overflow or invalid characters, so if the caller is not careful,
/// the result is bogus and can panic (though it won't be `unsafe`). Additionally, empty strings
result
}
-/// Convert a string of ASCII digits into a bignum.
+/// Converts a string of ASCII digits into a bignum.
///
/// Like `from_str_unchecked`, this function relies on the parser to weed out non-digits.
pub fn digits_to_big(integral: &[u8], fractional: &[u8]) -> Big {
}
-/// Extract a range of bits.
+/// Extracts a range of bits.
/// Index 0 is the least significant bit and the range is half-open as usual.
/// Panics if asked to extract more bits than fit into the return type.
Invalid,
}
-/// Check if the input string is a valid floating point number and if so, locate the integral
+/// Checks if the input string is a valid floating point number and if so, locate the integral
/// part, the fractional part, and the exponent in it. Does not handle signs.
pub fn parse_decimal(s: &str) -> ParseResult {
if s.is_empty() {
/// Type used by `to_bits` and `from_bits`.
type Bits: Add<Output = Self::Bits> + From<u8> + TryFrom<u64>;
- /// Raw transmutation to integer.
+ /// Performs a raw transmutation to an integer.
fn to_bits(self) -> Self::Bits;
- /// Raw transmutation from integer.
+ /// Performs a raw transmutation from an integer.
fn from_bits(v: Self::Bits) -> Self;
/// Returns the category that this number falls into.
/// Returns the mantissa, exponent and sign as integers.
fn integer_decode(self) -> (u64, i16, i8);
- /// Decode the float.
+ /// Decodes the float.
fn unpack(self) -> Unpacked;
- /// Cast from a small integer that can be represented exactly. Panic if the integer can't be
+ /// Casts from a small integer that can be represented exactly. Panic if the integer can't be
/// represented, the other code in this module makes sure to never let that happen.
fn from_int(x: u64) -> Self;
- /// Get the value 10<sup>e</sup> from a pre-computed table.
+ /// Gets the value 10<sup>e</sup> from a pre-computed table.
/// Panics for `e >= CEIL_LOG5_OF_MAX_SIG`.
fn short_fast_pow10(e: usize) -> Self;
fn from_bits(v: Self::Bits) -> Self { Self::from_bits(v) }
}
-/// Convert an Fp to the closest machine float type.
+/// Converts an `Fp` to the closest machine float type.
/// Does not handle subnormal results.
pub fn fp_to_float<T: RawFloat>(x: Fp) -> T {
let x = x.normalize();
}
}
-/// Find the largest floating point number strictly smaller than the argument.
+/// Finds the largest floating point number strictly smaller than the argument.
/// Does not handle subnormals, zero, or exponent underflow.
pub fn prev_float<T: RawFloat>(x: T) -> T {
match x.classify() {
#[lang = "f32"]
#[cfg(not(test))]
impl f32 {
- /// Returns `true` if this value is `NaN` and false otherwise.
+ /// Returns `true` if this value is `NaN`.
///
/// ```
/// use std::f32;
f32::from_bits(self.to_bits() & 0x7fff_ffff)
}
- /// Returns `true` if this value is positive infinity or negative infinity and
- /// false otherwise.
+ /// Returns `true` if this value is positive infinity or negative infinity, and
+ /// `false` otherwise.
///
/// ```
/// use std::f32;
}
}
- /// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaN`s with
+ /// Returns `true` if `self` has a positive sign, including `+0.0`, `NaN`s with
/// positive sign bit and positive infinity.
///
/// ```
!self.is_sign_negative()
}
- /// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaN`s with
+ /// Returns `true` if `self` has a negative sign, including `-0.0`, `NaN`s with
/// negative sign bit and negative infinity.
///
/// ```
#[lang = "f64"]
#[cfg(not(test))]
impl f64 {
- /// Returns `true` if this value is `NaN` and false otherwise.
+ /// Returns `true` if this value is `NaN`.
///
/// ```
/// use std::f64;
f64::from_bits(self.to_bits() & 0x7fff_ffff_ffff_ffff)
}
- /// Returns `true` if this value is positive infinity or negative infinity and
- /// false otherwise.
+ /// Returns `true` if this value is positive infinity or negative infinity, and
+ /// `false` otherwise.
///
/// ```
/// use std::f64;
}
}
- /// Returns `true` if and only if `self` has a positive sign, including `+0.0`, `NaN`s with
+ /// Returns `true` if `self` has a positive sign, including `+0.0`, `NaN`s with
/// positive sign bit and positive infinity.
///
/// ```
self.is_sign_positive()
}
- /// Returns `true` if and only if `self` has a negative sign, including `-0.0`, `NaN`s with
+ /// Returns `true` if `self` has a negative sign, including `-0.0`, `NaN`s with
/// negative sign bit and negative infinity.
///
/// ```
}
impl $Ty {
- /// Create a non-zero without checking the value.
+ /// Creates a non-zero without checking the value.
///
/// # Safety
///
$Ty(n)
}
- /// Create a non-zero if the given value is not zero.
+ /// Creates a non-zero if the given value is not zero.
#[$stability]
#[inline]
pub fn new(n: $Int) -> Option<Self> {
/// }
///
/// // Notice that the implementation uses the associated type `Output`.
-/// impl<T: Add<Output=T>> Add for Point<T> {
+/// impl<T: Add<Output = T>> Add for Point<T> {
/// type Output = Point<T>;
///
/// fn add(self, other: Point<T>) -> Point<T> {
/// }
///
/// // Notice that the implementation uses the associated type `Output`.
-/// impl<T: Sub<Output=T>> Sub for Point<T> {
+/// impl<T: Sub<Output = T>> Sub for Point<T> {
/// type Output = Point<T>;
///
/// fn sub(self, other: Point<T>) -> Point<T> {
macro_rules! rem_impl_integer {
($($t:ty)*) => ($(
- /// This operation satisfies `n % d == n - (n / d) * d`. The
+ /// This operation satisfies `n % d == n - (n / d) * d`. The
/// result has the same sign as the left operand.
#[stable(feature = "rust1", since = "1.0.0")]
impl Rem for $t {
/// (`start..end`).
///
/// The `Range` `start..end` contains all values with `x >= start` and
-/// `x < end`. It is empty unless `start < end`.
+/// `x < end`. It is empty unless `start < end`.
///
/// # Examples
///
/// A range bounded inclusively below and above (`start..=end`).
///
/// The `RangeInclusive` `start..=end` contains all values with `x >= start`
-/// and `x <= end`. It is empty unless `start <= end`.
+/// and `x <= end`. It is empty unless `start <= end`.
///
/// This iterator is [fused], but the specific values of `start` and `end` after
/// iteration has finished are **unspecified** other than that [`.is_empty()`]
/// A trait for customizing the behavior of the `?` operator.
///
/// A type implementing `Try` is one that has a canonical way to view it
-/// in terms of a success/failure dichotomy. This trait allows both
+/// in terms of a success/failure dichotomy. This trait allows both
/// extracting those success or failure values from an existing instance and
/// creating a new instance from a success or failure value.
#[unstable(feature = "try_trait", issue = "42327")]
///
/// # Examples
///
- /// Convert an `Option<`[`String`]`>` into an `Option<`[`usize`]`>`, preserving the original.
+ /// Converts an `Option<`[`String`]`>` into an `Option<`[`usize`]`>`, preserving the original.
/// The [`map`] method takes the `self` argument by value, consuming the original,
/// so this technique uses `as_ref` to first take an `Option` to a reference
/// to the value inside the original.
///
/// # Examples
///
- /// Convert an `Option<`[`String`]`>` into an `Option<`[`usize`]`>`, consuming the original:
+ /// Converts an `Option<`[`String`]`>` into an `Option<`[`usize`]`>`, consuming the original:
///
/// [`String`]: ../../std/string/struct.String.html
/// [`usize`]: ../../std/primitive.usize.html
///
/// # Examples
///
- /// Convert a string to an integer, turning poorly-formed strings
+ /// Converts a string to an integer, turning poorly-formed strings
/// into 0 (the default value for integers). [`parse`] converts
/// a string to any other type that implements [`FromStr`], returning
/// [`None`] on error.
Pin { pointer }
}
- /// Get a pinned shared reference from this pinned pointer.
+ /// Gets a pinned shared reference from this pinned pointer.
#[stable(feature = "pin", since = "1.33.0")]
#[inline(always)]
pub fn as_ref(self: &Pin<P>) -> Pin<&P::Target> {
}
impl<P: DerefMut> Pin<P> {
- /// Get a pinned mutable reference from this pinned pointer.
+ /// Gets a pinned mutable reference from this pinned pointer.
#[stable(feature = "pin", since = "1.33.0")]
#[inline(always)]
pub fn as_mut(self: &mut Pin<P>) -> Pin<&mut P::Target> {
Pin::new_unchecked(new_pointer)
}
- /// Get a shared reference out of a pin.
+ /// Gets a shared reference out of a pin.
///
/// Note: `Pin` also implements `Deref` to the target, which can be used
/// to access the inner value. However, `Deref` only provides a reference
}
impl<'a, T: ?Sized> Pin<&'a mut T> {
- /// Convert this `Pin<&mut T>` into a `Pin<&T>` with the same lifetime.
+ /// Converts this `Pin<&mut T>` into a `Pin<&T>` with the same lifetime.
#[stable(feature = "pin", since = "1.33.0")]
#[inline(always)]
pub fn into_ref(self: Pin<&'a mut T>) -> Pin<&'a T> {
Pin { pointer: self.pointer }
}
- /// Get a mutable reference to the data inside of this `Pin`.
+ /// Gets a mutable reference to the data inside of this `Pin`.
///
/// This requires that the data inside this `Pin` is `Unpin`.
///
self.pointer
}
- /// Get a mutable reference to the data inside of this `Pin`.
+ /// Gets a mutable reference to the data inside of this `Pin`.
///
/// # Safety
///
//! to access only a single value, in which case the documentation omits the size
//! and implicitly assumes it to be `size_of::<T>()` bytes.
//!
-//! The precise rules for validity are not determined yet. The guarantees that are
+//! The precise rules for validity are not determined yet. The guarantees that are
//! provided at this point are very minimal:
//!
//! * A [null] pointer is *never* valid, not even for accesses of [size zero][zst].
///
/// * `to_drop` must be [valid] for reads.
///
-/// * `to_drop` must be properly aligned. See the example below for how to drop
+/// * `to_drop` must be properly aligned. See the example below for how to drop
/// an unaligned pointer.
///
/// Additionally, if `T` is not [`Copy`], using the pointed-to value after
/// unsafe {
/// // Get a raw pointer to the last element in `v`.
/// let ptr = &mut v[1] as *mut _;
-/// // Shorten `v` to prevent the last item from being dropped. We do that first,
+/// // Shorten `v` to prevent the last item from being dropped. We do that first,
/// // to prevent issues if the `drop_in_place` below panics.
/// v.set_len(1);
/// // Without a call `drop_in_place`, the last item would never be dropped,
///
/// `read` creates a bitwise copy of `T`, regardless of whether `T` is [`Copy`].
/// If `T` is not [`Copy`], using both the returned value and the value at
-/// `*src` can violate memory safety. Note that assigning to `*src` counts as a
+/// `*src` can violate memory safety. Note that assigning to `*src` counts as a
/// use because it will attempt to drop the value at `*src`.
///
/// [`write`] can be used to overwrite data without causing it to be dropped.
/// * `src` must be [valid] for reads.
///
/// Like [`read`], `read_unaligned` creates a bitwise copy of `T`, regardless of
-/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
+/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
/// value and the value at `*src` can [violate memory safety][read-ownership].
///
/// Note that even if `T` has size `0`, the pointer must be non-NULL.
///
/// The compiler shouldn't change the relative order or number of volatile
/// memory operations. However, volatile memory operations on zero-sized types
-/// (e.g., if a zero-sized type is passed to `read_volatile`) are no-ops
+/// (e.g., if a zero-sized type is passed to `read_volatile`) are noops
/// and may be ignored.
///
/// [c11]: http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1570.pdf
/// * `src` must be properly aligned.
///
/// Like [`read`], `read_unaligned` creates a bitwise copy of `T`, regardless of
-/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
+/// whether `T` is [`Copy`]. If `T` is not [`Copy`], using both the returned
/// value and the value at `*src` can [violate memory safety][read-ownership].
/// However, storing non-[`Copy`] types in volatile memory is almost certainly
/// incorrect.
///
/// The compiler shouldn't change the relative order or number of volatile
/// memory operations. However, volatile memory operations on zero-sized types
-/// (e.g., if a zero-sized type is passed to `write_volatile`) are no-ops
+/// (e.g., if a zero-sized type is passed to `write_volatile`) are noops
/// and may be ignored.
///
/// [c11]: http://www.open-std.org/jtc1/sc22/wg14/www/docs/n1570.pdf
/// unless `x` and `y` point into the same allocated object.
///
/// Always use `.offset(count)` instead when possible, because `offset`
- /// allows the compiler to optimize better. If you need to cross object
+ /// allows the compiler to optimize better. If you need to cross object
/// boundaries, cast the pointer to an integer and do the arithmetic there.
///
/// # Examples
/// unless `x` and `y` point into the same allocated object.
///
/// Always use `.offset(count)` instead when possible, because `offset`
- /// allows the compiler to optimize better. If you need to cross object
+ /// allows the compiler to optimize better. If you need to cross object
/// boundaries, cast the pointer to an integer and do the arithmetic there.
///
/// # Examples
#[stable(feature = "rust1", since = "1.0.0")]
impl<T: ?Sized> Eq for *mut T {}
-/// Compare raw pointers for equality.
+/// Compares raw pointers for equality.
///
/// This is the same as using the `==` operator, but less generic:
/// the arguments have to be `*const T` raw pointers,
///
/// # Examples
///
- /// Convert a string to an integer, turning poorly-formed strings
+ /// Converts a string to an integer, turning poorly-formed strings
/// into 0 (the default value for integers). [`parse`] converts
/// a string to any other type that implements [`FromStr`], returning an
/// [`Err`] on error.
const LO_USIZE: usize = LO_U64 as usize;
const HI_USIZE: usize = HI_U64 as usize;
-/// Returns whether `x` contains any zero byte.
+/// Returns `true` if `x` contains any zero byte.
///
/// From *Matters Computational*, J. Arndt:
///
/// Returns an iterator over subslices separated by elements that match
/// `pred` limited to returning at most `n` items. This starts at the end of
- /// the slice and works backwards. The matched element is not contained in
+ /// the slice and works backwards. The matched element is not contained in
/// the subslices.
///
/// The last element returned, if any, will contain the remainder of the
unsafe impl<T: Sync> Send for Iter<'_, T> {}
impl<'a, T> Iter<'a, T> {
- /// View the underlying data as a subslice of the original data.
+ /// Views the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
unsafe impl<T: Send> Send for IterMut<'_, T> {}
impl<'a, T> IterMut<'a, T> {
- /// View the underlying data as a subslice of the original data.
+ /// Views the underlying data as a subslice of the original data.
///
/// To avoid creating `&mut` references that alias, this is forced
/// to consume the iterator.
}
impl<'a, T> ChunksExact<'a, T> {
- /// Return the remainder of the original slice that is not going to be
+ /// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "chunks_exact", since = "1.31.0")]
}
impl<'a, T> ChunksExactMut<'a, T> {
- /// Return the remainder of the original slice that is not going to be
+ /// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "chunks_exact", since = "1.31.0")]
}
impl<'a, T> RChunksExact<'a, T> {
- /// Return the remainder of the original slice that is not going to be
+ /// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "rchunks", since = "1.31.0")]
}
impl<'a, T> RChunksExactMut<'a, T> {
- /// Return the remainder of the original slice that is not going to be
+ /// Returns the remainder of the original slice that is not going to be
/// returned by the iterator. The returned slice has at most `chunk_size-1`
/// elements.
#[stable(feature = "rchunks", since = "1.31.0")]
}
/// Rotates the range `[mid-left, mid+right)` such that the element at `mid`
-/// becomes the first element. Equivalently, rotates the range `left`
+/// becomes the first element. Equivalently, rotates the range `left`
/// elements to the left or `right` elements to the right.
///
/// # Safety
/// # Algorithm
///
/// For longer rotations, swap the left-most `delta = min(left, right)`
-/// elements with the right-most `delta` elements. LLVM vectorizes this,
+/// elements with the right-most `delta` elements. LLVM vectorizes this,
/// which is profitable as we only reach this step for a "large enough"
-/// rotation. Doing this puts `delta` elements on the larger side into the
-/// correct position, leaving a smaller rotate problem. Demonstration:
+/// rotation. Doing this puts `delta` elements on the larger side into the
+/// correct position, leaving a smaller rotate problem. Demonstration:
///
/// ```text
/// [ 6 7 8 9 10 11 12 13 . 1 2 3 4 5 ]
-//! String manipulation
+//! String manipulation.
//!
-//! For more details, see std::str
+//! For more details, see the `std::str` module.
#![stable(feature = "rust1", since = "1.0.0")]
use self::pattern::{Searcher, ReverseSearcher, DoubleEndedSearcher};
use char;
-use fmt;
+use fmt::{self, Write};
use iter::{Map, Cloned, FusedIterator, TrustedLen, TrustedRandomAccess, Filter};
+use iter::{Flatten, FlatMap, Chain};
use slice::{self, SliceIndex, Split as SliceSplit};
use mem;
+use ops::Try;
+use option;
pub mod pattern;
#[stable(feature = "utf8_error", since = "1.5.0")]
pub fn valid_up_to(&self) -> usize { self.valid_up_to }
- /// Provide more information about the failure:
+ /// Provides more information about the failure:
///
/// * `None`: the end of the input was reached unexpectedly.
/// `self.valid_up_to()` is 1 to 3 bytes from the end of the input.
impl FusedIterator for Chars<'_> {}
impl<'a> Chars<'a> {
- /// View the underlying data as a subslice of the original data.
+ /// Views the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
impl FusedIterator for CharIndices<'_> {}
impl<'a> CharIndices<'a> {
- /// View the underlying data as a subslice of the original data.
+ /// Views the underlying data as a subslice of the original data.
///
/// This has the same lifetime as the original slice, and so the
/// iterator can continue to be used while this exists.
#[allow(deprecated)]
pub struct LinesAny<'a>(Lines<'a>);
-/// A nameable, cloneable fn type
-#[derive(Clone)]
-struct LinesAnyMap;
-
-impl<'a> Fn<(&'a str,)> for LinesAnyMap {
- #[inline]
- extern "rust-call" fn call(&self, (line,): (&'a str,)) -> &'a str {
+impl_fn_for_zst! {
+ /// A nameable, cloneable fn type
+ #[derive(Clone)]
+ struct LinesAnyMap impl<'a> Fn = |line: &'a str| -> &'a str {
let l = line.len();
if l > 0 && line.as_bytes()[l - 1] == b'\r' { &line[0 .. l - 1] }
else { line }
- }
-}
-
-impl<'a> FnMut<(&'a str,)> for LinesAnyMap {
- #[inline]
- extern "rust-call" fn call_mut(&mut self, (line,): (&'a str,)) -> &'a str {
- Fn::call(&*self, (line,))
- }
-}
-
-impl<'a> FnOnce<(&'a str,)> for LinesAnyMap {
- type Output = &'a str;
-
- #[inline]
- extern "rust-call" fn call_once(self, (line,): (&'a str,)) -> &'a str {
- Fn::call(&self, (line,))
- }
+ };
}
#[stable(feature = "rust1", since = "1.0.0")]
/// Implements ordering of strings.
///
- /// Strings are ordered lexicographically by their byte values. This orders Unicode code
- /// points based on their positions in the code charts. This is not necessarily the same as
- /// "alphabetical" order, which varies by language and locale. Sorting strings according to
+ /// Strings are ordered lexicographically by their byte values. This orders Unicode code
+ /// points based on their positions in the code charts. This is not necessarily the same as
+ /// "alphabetical" order, which varies by language and locale. Sorting strings according to
/// culturally-accepted standards requires locale-specific data that is outside the scope of
/// the `str` type.
#[stable(feature = "rust1", since = "1.0.0")]
/// Implements comparison operations on strings.
///
- /// Strings are compared lexicographically by their byte values. This compares Unicode code
- /// points based on their positions in the code charts. This is not necessarily the same as
- /// "alphabetical" order, which varies by language and locale. Comparing strings according to
+ /// Strings are compared lexicographically by their byte values. This compares Unicode code
+ /// points based on their positions in the code charts. This is not necessarily the same as
+ /// "alphabetical" order, which varies by language and locale. Comparing strings according to
/// culturally-accepted standards requires locale-specific data that is outside the scope of
/// the `str` type.
#[stable(feature = "rust1", since = "1.0.0")]
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output {
- let ptr = slice.as_ptr().add(self.start);
+ let ptr = slice.as_mut_ptr().add(self.start);
let len = self.end - self.start;
- super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr as *mut u8, len))
+ super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, len))
}
#[inline]
fn index(self, slice: &str) -> &Self::Output {
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output {
- let ptr = slice.as_ptr();
- super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr as *mut u8, self.end))
+ let ptr = slice.as_mut_ptr();
+ super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, self.end))
}
#[inline]
fn index(self, slice: &str) -> &Self::Output {
}
#[inline]
unsafe fn get_unchecked_mut(self, slice: &mut str) -> &mut Self::Output {
- let ptr = slice.as_ptr().add(self.start);
+ let ptr = slice.as_mut_ptr().add(self.start);
let len = slice.len() - self.start;
- super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr as *mut u8, len))
+ super::from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, len))
}
#[inline]
fn index(self, slice: &str) -> &Self::Output {
self as *const str as *const u8
}
+ /// Converts a mutable string slice to a raw pointer.
+ ///
+ /// As string slices are a slice of bytes, the raw pointer points to a
+ /// [`u8`]. This pointer will be pointing to the first byte of the string
+ /// slice.
+ ///
+ /// It is your responsibility to make sure that the string slice only gets
+ /// modified in a way that it remains valid UTF-8.
+ ///
+ /// [`u8`]: primitive.u8.html
+ #[unstable(feature = "str_as_mut_ptr", issue = "58215")]
+ #[inline]
+ pub fn as_mut_ptr(&mut self) -> *mut u8 {
+ self as *mut str as *mut u8
+ }
+
/// Returns a subslice of `str`.
///
/// This is the non-panicking alternative to indexing the `str`. Returns
// is_char_boundary checks that the index is in [0, .len()]
if self.is_char_boundary(mid) {
let len = self.len();
- let ptr = self.as_ptr() as *mut u8;
+ let ptr = self.as_mut_ptr();
unsafe {
(from_utf8_unchecked_mut(slice::from_raw_parts_mut(ptr, mid)),
from_utf8_unchecked_mut(slice::from_raw_parts_mut(
Bytes(self.as_bytes().iter().cloned())
}
- /// Split a string slice by whitespace.
+ /// Splits a string slice by whitespace.
///
/// The iterator returned will return string slices that are sub-slices of
/// the original string slice, separated by any amount of whitespace.
SplitWhitespace { inner: self.split(IsWhitespace).filter(IsNotEmpty) }
}
- /// Split a string slice by ASCII whitespace.
+ /// Splits a string slice by ASCII whitespace.
///
/// The iterator returned will return string slices that are sub-slices of
/// the original string slice, separated by any amount of ASCII whitespace.
let inner = self
.as_bytes()
.split(IsAsciiWhitespace)
- .filter(IsNotEmpty)
+ .filter(BytesIsNotEmpty)
.map(UnsafeBytesToStr);
SplitAsciiWhitespace { inner }
}
///
/// A string is a sequence of bytes. `start` in this context means the first
/// position of that byte string; for a left-to-right language like English or
- /// Russian, this will be left side; and for right-to-left languages like
+ /// Russian, this will be left side, and for right-to-left languages like
/// like Arabic or Hebrew, this will be the right side.
///
/// # Examples
///
/// A string is a sequence of bytes. `end` in this context means the last
/// position of that byte string; for a left-to-right language like English or
- /// Russian, this will be right side; and for right-to-left languages like
+ /// Russian, this will be right side, and for right-to-left languages like
/// like Arabic or Hebrew, this will be the left side.
///
/// # Examples
///
/// A string is a sequence of bytes. `start` in this context means the first
/// position of that byte string; for a left-to-right language like English or
- /// Russian, this will be left side; and for right-to-left languages like
+ /// Russian, this will be left side, and for right-to-left languages like
/// like Arabic or Hebrew, this will be the right side.
///
/// # Examples
///
/// A string is a sequence of bytes. `end` in this context means the last
/// position of that byte string; for a left-to-right language like English or
- /// Russian, this will be right side; and for right-to-left languages like
+ /// Russian, this will be right side, and for right-to-left languages like
/// like Arabic or Hebrew, this will be the left side.
///
/// # Examples
let me = unsafe { self.as_bytes_mut() };
me.make_ascii_lowercase()
}
+
+ /// Return an iterator that escapes each char in `s` with [`char::escape_debug`].
+ ///
+ /// Note: only extended grapheme codepoints that begin the string will be
+ /// escaped.
+ ///
+ /// [`char::escape_debug`]: ../std/primitive.char.html#method.escape_debug
+ ///
+ /// # Examples
+ ///
+ /// As an iterator:
+ ///
+ /// ```
+ /// for c in "❤\n!".escape_debug() {
+ /// print!("{}", c);
+ /// }
+ /// println!();
+ /// ```
+ ///
+ /// Using `println!` directly:
+ ///
+ /// ```
+ /// println!("{}", "❤\n!".escape_debug());
+ /// ```
+ ///
+ ///
+ /// Both are equivalent to:
+ ///
+ /// ```
+ /// println!("❤\\n!");
+ /// ```
+ ///
+ /// Using `to_string`:
+ ///
+ /// ```
+ /// assert_eq!("❤\n!".escape_debug().to_string(), "❤\\n!");
+ /// ```
+ #[stable(feature = "str_escape", since = "1.34.0")]
+ pub fn escape_debug(&self) -> EscapeDebug {
+ let mut chars = self.chars();
+ EscapeDebug {
+ inner: chars.next()
+ .map(|first| first.escape_debug_ext(true))
+ .into_iter()
+ .flatten()
+ .chain(chars.flat_map(CharEscapeDebugContinue))
+ }
+ }
+
+ /// Return an iterator that escapes each char in `s` with [`char::escape_default`].
+ ///
+ /// [`char::escape_default`]: ../std/primitive.char.html#method.escape_default
+ ///
+ /// # Examples
+ ///
+ /// As an iterator:
+ ///
+ /// ```
+ /// for c in "❤\n!".escape_default() {
+ /// print!("{}", c);
+ /// }
+ /// println!();
+ /// ```
+ ///
+ /// Using `println!` directly:
+ ///
+ /// ```
+ /// println!("{}", "❤\n!".escape_default());
+ /// ```
+ ///
+ ///
+ /// Both are equivalent to:
+ ///
+ /// ```
+ /// println!("\\u{{2764}}\n!");
+ /// ```
+ ///
+ /// Using `to_string`:
+ ///
+ /// ```
+ /// assert_eq!("❤\n!".escape_default().to_string(), "\\u{2764}\\n!");
+ /// ```
+ #[stable(feature = "str_escape", since = "1.34.0")]
+ pub fn escape_default(&self) -> EscapeDefault {
+ EscapeDefault { inner: self.chars().flat_map(CharEscapeDefault) }
+ }
+
+ /// Return an iterator that escapes each char in `s` with [`char::escape_unicode`].
+ ///
+ /// [`char::escape_unicode`]: ../std/primitive.char.html#method.escape_unicode
+ ///
+ /// # Examples
+ ///
+ /// As an iterator:
+ ///
+ /// ```
+ /// for c in "❤\n!".escape_unicode() {
+ /// print!("{}", c);
+ /// }
+ /// println!();
+ /// ```
+ ///
+ /// Using `println!` directly:
+ ///
+ /// ```
+ /// println!("{}", "❤\n!".escape_unicode());
+ /// ```
+ ///
+ ///
+ /// Both are equivalent to:
+ ///
+ /// ```
+ /// println!("\\u{{2764}}\\u{{a}}\\u{{21}}");
+ /// ```
+ ///
+ /// Using `to_string`:
+ ///
+ /// ```
+ /// assert_eq!("❤\n!".escape_unicode().to_string(), "\\u{2764}\\u{a}\\u{21}");
+ /// ```
+ #[stable(feature = "str_escape", since = "1.34.0")]
+ pub fn escape_unicode(&self) -> EscapeUnicode {
+ EscapeUnicode { inner: self.chars().flat_map(CharEscapeUnicode) }
+ }
+}
+
+impl_fn_for_zst! {
+ #[derive(Clone)]
+ struct CharEscapeDebugContinue impl Fn = |c: char| -> char::EscapeDebug {
+ c.escape_debug_ext(false)
+ };
+
+ #[derive(Clone)]
+ struct CharEscapeUnicode impl Fn = |c: char| -> char::EscapeUnicode {
+ c.escape_unicode()
+ };
+ #[derive(Clone)]
+ struct CharEscapeDefault impl Fn = |c: char| -> char::EscapeDefault {
+ c.escape_default()
+ };
}
#[stable(feature = "rust1", since = "1.0.0")]
#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
#[derive(Clone, Debug)]
pub struct SplitAsciiWhitespace<'a> {
- inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, IsNotEmpty>, UnsafeBytesToStr>,
-}
-
-#[derive(Clone)]
-struct IsWhitespace;
-
-impl FnOnce<(char, )> for IsWhitespace {
- type Output = bool;
-
- #[inline]
- extern "rust-call" fn call_once(mut self, arg: (char, )) -> bool {
- self.call_mut(arg)
- }
-}
-
-impl FnMut<(char, )> for IsWhitespace {
- #[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (char, )) -> bool {
- arg.0.is_whitespace()
- }
-}
-
-#[derive(Clone)]
-struct IsAsciiWhitespace;
-
-impl<'a> FnOnce<(&'a u8, )> for IsAsciiWhitespace {
- type Output = bool;
-
- #[inline]
- extern "rust-call" fn call_once(mut self, arg: (&u8, )) -> bool {
- self.call_mut(arg)
- }
-}
-
-impl<'a> FnMut<(&'a u8, )> for IsAsciiWhitespace {
- #[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (&u8, )) -> bool {
- arg.0.is_ascii_whitespace()
- }
-}
-
-#[derive(Clone)]
-struct IsNotEmpty;
-
-impl<'a, 'b> FnOnce<(&'a &'b str, )> for IsNotEmpty {
- type Output = bool;
-
- #[inline]
- extern "rust-call" fn call_once(mut self, arg: (&'a &'b str, )) -> bool {
- self.call_mut(arg)
- }
-}
-
-impl<'a, 'b> FnMut<(&'a &'b str, )> for IsNotEmpty {
- #[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b str, )) -> bool {
- !arg.0.is_empty()
- }
-}
-
-impl<'a, 'b> FnOnce<(&'a &'b [u8], )> for IsNotEmpty {
- type Output = bool;
-
- #[inline]
- extern "rust-call" fn call_once(mut self, arg: (&'a &'b [u8], )) -> bool {
- self.call_mut(arg)
- }
+ inner: Map<Filter<SliceSplit<'a, u8, IsAsciiWhitespace>, BytesIsNotEmpty>, UnsafeBytesToStr>,
}
-impl<'a, 'b> FnMut<(&'a &'b [u8], )> for IsNotEmpty {
- #[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (&'a &'b [u8], )) -> bool {
- !arg.0.is_empty()
- }
-}
+impl_fn_for_zst! {
+ #[derive(Clone)]
+ struct IsWhitespace impl Fn = |c: char| -> bool {
+ c.is_whitespace()
+ };
-#[derive(Clone)]
-struct UnsafeBytesToStr;
+ #[derive(Clone)]
+ struct IsAsciiWhitespace impl Fn = |byte: &u8| -> bool {
+ byte.is_ascii_whitespace()
+ };
-impl<'a> FnOnce<(&'a [u8], )> for UnsafeBytesToStr {
- type Output = &'a str;
+ #[derive(Clone)]
+ struct IsNotEmpty impl<'a, 'b> Fn = |s: &'a &'b str| -> bool {
+ !s.is_empty()
+ };
- #[inline]
- extern "rust-call" fn call_once(mut self, arg: (&'a [u8], )) -> &'a str {
- self.call_mut(arg)
- }
-}
+ #[derive(Clone)]
+ struct BytesIsNotEmpty impl<'a, 'b> Fn = |s: &'a &'b [u8]| -> bool {
+ !s.is_empty()
+ };
-impl<'a> FnMut<(&'a [u8], )> for UnsafeBytesToStr {
- #[inline]
- extern "rust-call" fn call_mut(&mut self, arg: (&'a [u8], )) -> &'a str {
- unsafe { from_utf8_unchecked(arg.0) }
- }
+ #[derive(Clone)]
+ struct UnsafeBytesToStr impl<'a> Fn = |bytes: &'a [u8]| -> &'a str {
+ unsafe { from_utf8_unchecked(bytes) }
+ };
}
-
#[stable(feature = "split_whitespace", since = "1.1.0")]
impl<'a> Iterator for SplitWhitespace<'a> {
type Item = &'a str;
#[stable(feature = "fused", since = "1.26.0")]
impl FusedIterator for EncodeUtf16<'_> {}
+
+/// The return type of [`str::escape_debug`].
+///
+/// [`str::escape_debug`]: ../../std/primitive.str.html#method.escape_debug
+#[stable(feature = "str_escape", since = "1.34.0")]
+#[derive(Clone, Debug)]
+pub struct EscapeDebug<'a> {
+ inner: Chain<
+ Flatten<option::IntoIter<char::EscapeDebug>>,
+ FlatMap<Chars<'a>, char::EscapeDebug, CharEscapeDebugContinue>
+ >,
+}
+
+/// The return type of [`str::escape_default`].
+///
+/// [`str::escape_default`]: ../../std/primitive.str.html#method.escape_default
+#[stable(feature = "str_escape", since = "1.34.0")]
+#[derive(Clone, Debug)]
+pub struct EscapeDefault<'a> {
+ inner: FlatMap<Chars<'a>, char::EscapeDefault, CharEscapeDefault>,
+}
+
+/// The return type of [`str::escape_unicode`].
+///
+/// [`str::escape_unicode`]: ../../std/primitive.str.html#method.escape_unicode
+#[stable(feature = "str_escape", since = "1.34.0")]
+#[derive(Clone, Debug)]
+pub struct EscapeUnicode<'a> {
+ inner: FlatMap<Chars<'a>, char::EscapeUnicode, CharEscapeUnicode>,
+}
+
+macro_rules! escape_types_impls {
+ ($( $Name: ident ),+) => {$(
+ #[stable(feature = "str_escape", since = "1.34.0")]
+ impl<'a> fmt::Display for $Name<'a> {
+ fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ self.clone().try_for_each(|c| f.write_char(c))
+ }
+ }
+
+ #[stable(feature = "str_escape", since = "1.34.0")]
+ impl<'a> Iterator for $Name<'a> {
+ type Item = char;
+
+ #[inline]
+ fn next(&mut self) -> Option<char> { self.inner.next() }
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) { self.inner.size_hint() }
+
+ #[inline]
+ fn try_fold<Acc, Fold, R>(&mut self, init: Acc, fold: Fold) -> R where
+ Self: Sized, Fold: FnMut(Acc, Self::Item) -> R, R: Try<Ok=Acc>
+ {
+ self.inner.try_fold(init, fold)
+ }
+
+ #[inline]
+ fn fold<Acc, Fold>(self, init: Acc, fold: Fold) -> Acc
+ where Fold: FnMut(Acc, Self::Item) -> Acc,
+ {
+ self.inner.fold(init, fold)
+ }
+ }
+
+ #[stable(feature = "str_escape", since = "1.34.0")]
+ impl<'a> FusedIterator for $Name<'a> {}
+ )+}
+}
+
+escape_types_impls!(EscapeDebug, EscapeDefault, EscapeUnicode);
//! The string Pattern API.
//!
-//! For more details, see the traits `Pattern`, `Searcher`,
-//! `ReverseSearcher` and `DoubleEndedSearcher`.
+//! For more details, see the traits [`Pattern`], [`Searcher`],
+//! [`ReverseSearcher`], and [`DoubleEndedSearcher`].
#![unstable(feature = "pattern",
reason = "API not fully fleshed out and ready to be stabilized",
/// `[Reject(0, 1), Reject(1, 2), Match(2, 5), Reject(5, 8)]`
fn next(&mut self) -> SearchStep;
- /// Find the next `Match` result. See `next()`
+ /// Finds the next `Match` result. See `next()`
///
/// Unlike next(), there is no guarantee that the returned ranges
/// of this and next_reject will overlap. This will return (start_match, end_match),
}
}
- /// Find the next `Reject` result. See `next()` and `next_match()`
+ /// Finds the next `Reject` result. See `next()` and `next_match()`
///
/// Unlike next(), there is no guarantee that the returned ranges
/// of this and next_match will overlap.
/// `[Reject(7, 8), Match(4, 7), Reject(1, 4), Reject(0, 1)]`
fn next_back(&mut self) -> SearchStep;
- /// Find the next `Match` result. See `next_back()`
+ /// Finds the next `Match` result. See `next_back()`
#[inline]
fn next_match_back(&mut self) -> Option<(usize, usize)>{
loop {
}
}
- /// Find the next `Reject` result. See `next_back()`
+ /// Finds the next `Reject` result. See `next_back()`
#[inline]
fn next_reject_back(&mut self) -> Option<(usize, usize)>{
loop {
pub use self::poll::Poll;
mod wake;
-pub use self::wake::{Waker, LocalWaker, UnsafeWake};
+pub use self::wake::{Waker, RawWaker, RawWakerVTable};
}
impl<T> Poll<T> {
- /// Change the ready value of this `Poll` with the closure provided
+ /// Changes the ready value of this `Poll` with the closure provided.
pub fn map<U, F>(self, f: F) -> Poll<U>
where F: FnOnce(T) -> U
{
}
}
- /// Returns whether this is `Poll::Ready`
+ /// Returns `true` if this is `Poll::Ready`
#[inline]
pub fn is_ready(&self) -> bool {
match *self {
}
}
- /// Returns whether this is `Poll::Pending`
+ /// Returns `true` if this is `Poll::Pending`
#[inline]
pub fn is_pending(&self) -> bool {
!self.is_ready()
}
impl<T, E> Poll<Result<T, E>> {
- /// Change the success value of this `Poll` with the closure provided
+ /// Changes the success value of this `Poll` with the closure provided.
pub fn map_ok<U, F>(self, f: F) -> Poll<Result<U, E>>
where F: FnOnce(T) -> U
{
}
}
- /// Change the error value of this `Poll` with the closure provided
+ /// Changes the error value of this `Poll` with the closure provided.
pub fn map_err<U, F>(self, f: F) -> Poll<Result<T, U>>
where F: FnOnce(E) -> U
{
use fmt;
use marker::Unpin;
-use ptr::NonNull;
+
+/// A `RawWaker` allows the implementor of a task executor to create a [`Waker`]
+/// which provides customized wakeup behavior.
+///
+/// [vtable]: https://en.wikipedia.org/wiki/Virtual_method_table
+///
+/// It consists of a data pointer and a [virtual function pointer table (vtable)][vtable] that
+/// customizes the behavior of the `RawWaker`.
+#[derive(PartialEq, Debug)]
+pub struct RawWaker {
+ /// A data pointer, which can be used to store arbitrary data as required
+ /// by the executor. This could be e.g. a type-erased pointer to an `Arc`
+ /// that is associated with the task.
+ /// The value of this field gets passed to all functions that are part of
+ /// the vtable as the first parameter.
+ data: *const (),
+ /// Virtual function pointer table that customizes the behavior of this waker.
+ vtable: &'static RawWakerVTable,
+}
+
+impl RawWaker {
+ /// Creates a new `RawWaker` from the provided `data` pointer and `vtable`.
+ ///
+ /// The `data` pointer can be used to store arbitrary data as required
+ /// by the executor. This could be e.g. a type-erased pointer to an `Arc`
+ /// that is associated with the task.
+ /// The value of this poiner will get passed to all functions that are part
+ /// of the `vtable` as the first parameter.
+ ///
+ /// The `vtable` customizes the behavior of a `Waker` which gets created
+ /// from a `RawWaker`. For each operation on the `Waker`, the associated
+ /// function in the `vtable` of the underlying `RawWaker` will be called.
+ pub const fn new(data: *const (), vtable: &'static RawWakerVTable) -> RawWaker {
+ RawWaker {
+ data,
+ vtable,
+ }
+ }
+}
+
+/// A virtual function pointer table (vtable) that specifies the behavior
+/// of a [`RawWaker`].
+///
+/// The pointer passed to all functions inside the vtable is the `data` pointer
+/// from the enclosing [`RawWaker`] object.
+///
+/// The functions inside this struct are only intended be called on the `data`
+/// pointer of a properly constructed [`RawWaker`] object from inside the
+/// [`RawWaker`] implementation. Calling one of the contained functions using
+/// any other `data` pointer will cause undefined behavior.
+#[derive(PartialEq, Copy, Clone, Debug)]
+pub struct RawWakerVTable {
+ /// This function will be called when the [`RawWaker`] gets cloned, e.g. when
+ /// the [`Waker`] in which the [`RawWaker`] is stored gets cloned.
+ ///
+ /// The implementation of this function must retain all resources that are
+ /// required for this additional instance of a [`RawWaker`] and associated
+ /// task. Calling `wake` on the resulting [`RawWaker`] should result in a wakeup
+ /// of the same task that would have been awoken by the original [`RawWaker`].
+ pub clone: unsafe fn(*const ()) -> RawWaker,
+
+ /// This function will be called when `wake` is called on the [`Waker`].
+ /// It must wake up the task associated with this [`RawWaker`].
+ ///
+ /// The implemention of this function must not consume the provided data
+ /// pointer.
+ pub wake: unsafe fn(*const ()),
+
+ /// This function gets called when a [`RawWaker`] gets dropped.
+ ///
+ /// The implementation of this function must make sure to release any
+ /// resources that are associated with this instance of a [`RawWaker`] and
+ /// associated task.
+ pub drop: unsafe fn(*const ()),
+}
/// A `Waker` is a handle for waking up a task by notifying its executor that it
/// is ready to be run.
///
-/// This handle contains a trait object pointing to an instance of the `UnsafeWake`
-/// trait, allowing notifications to get routed through it.
+/// This handle encapsulates a [`RawWaker`] instance, which defines the
+/// executor-specific wakeup behavior.
+///
+/// Implements [`Clone`], [`Send`], and [`Sync`].
#[repr(transparent)]
pub struct Waker {
- inner: NonNull<dyn UnsafeWake>,
+ waker: RawWaker,
}
impl Unpin for Waker {}
unsafe impl Sync for Waker {}
impl Waker {
- /// Constructs a new `Waker` directly.
- ///
- /// Note that most code will not need to call this. Implementers of the
- /// `UnsafeWake` trait will typically provide a wrapper that calls this
- /// but you otherwise shouldn't call it directly.
- ///
- /// If you're working with the standard library then it's recommended to
- /// use the `Waker::from` function instead which works with the safe
- /// `Arc` type and the safe `Wake` trait.
- #[inline]
- pub unsafe fn new(inner: NonNull<dyn UnsafeWake>) -> Self {
- Waker { inner }
- }
-
/// Wake up the task associated with this `Waker`.
- #[inline]
pub fn wake(&self) {
- unsafe { self.inner.as_ref().wake() }
+ // The actual wakeup call is delegated through a virtual function call
+ // to the implementation which is defined by the executor.
+
+ // SAFETY: This is safe because `Waker::new_unchecked` is the only way
+ // to initialize `wake` and `data` requiring the user to acknowledge
+ // that the contract of `RawWaker` is upheld.
+ unsafe { (self.waker.vtable.wake)(self.waker.data) }
}
- /// Returns whether or not this `Waker` and `other` awaken the same task.
+ /// Returns whether or not this `Waker` and other `Waker` have awaken the same task.
///
/// This function works on a best-effort basis, and may return false even
/// when the `Waker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `Waker`s will awaken the same
- /// task.
+ /// returns `true`, it is guaranteed that the `Waker`s will awaken the same task.
///
/// This function is primarily used for optimization purposes.
- #[inline]
pub fn will_wake(&self, other: &Waker) -> bool {
- self.inner == other.inner
+ self.waker == other.waker
}
- /// Returns whether or not this `Waker` and `other` `LocalWaker` awaken
- /// the same task.
+ /// Creates a new `Waker` from [`RawWaker`].
///
- /// This function works on a best-effort basis, and may return false even
- /// when the `Waker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `Waker`s will awaken the same
- /// task.
- ///
- /// This function is primarily used for optimization purposes.
- #[inline]
- pub fn will_wake_local(&self, other: &LocalWaker) -> bool {
- self.will_wake(&other.0)
+ /// The behavior of the returned `Waker` is undefined if the contract defined
+ /// in [`RawWaker`]'s and [`RawWakerVTable`]'s documentation is not upheld.
+ /// Therefore this method is unsafe.
+ pub unsafe fn new_unchecked(waker: RawWaker) -> Waker {
+ Waker {
+ waker,
+ }
}
}
impl Clone for Waker {
- #[inline]
fn clone(&self) -> Self {
- unsafe {
- self.inner.as_ref().clone_raw()
+ Waker {
+ // SAFETY: This is safe because `Waker::new_unchecked` is the only way
+ // to initialize `clone` and `data` requiring the user to acknowledge
+ // that the contract of [`RawWaker`] is upheld.
+ waker: unsafe { (self.waker.vtable.clone)(self.waker.data) },
}
}
}
-impl fmt::Debug for Waker {
- fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("Waker")
- .finish()
- }
-}
-
impl Drop for Waker {
- #[inline]
fn drop(&mut self) {
- unsafe {
- self.inner.as_ref().drop_raw()
- }
- }
-}
-
-/// A `LocalWaker` is a handle for waking up a task by notifying its executor that it
-/// is ready to be run.
-///
-/// This is similar to the `Waker` type, but cannot be sent across threads.
-/// Task executors can use this type to implement more optimized single-threaded wakeup
-/// behavior.
-#[repr(transparent)]
-#[derive(Clone)]
-pub struct LocalWaker(Waker);
-
-impl Unpin for LocalWaker {}
-impl !Send for LocalWaker {}
-impl !Sync for LocalWaker {}
-
-impl LocalWaker {
- /// Constructs a new `LocalWaker` directly.
- ///
- /// Note that most code will not need to call this. Implementers of the
- /// `UnsafeWake` trait will typically provide a wrapper that calls this
- /// but you otherwise shouldn't call it directly.
- ///
- /// If you're working with the standard library then it's recommended to
- /// use the `local_waker_from_nonlocal` or `local_waker` to convert a `Waker`
- /// into a `LocalWaker`.
- ///
- /// For this function to be used safely, it must be sound to call `inner.wake_local()`
- /// on the current thread.
- #[inline]
- pub unsafe fn new(inner: NonNull<dyn UnsafeWake>) -> Self {
- LocalWaker(Waker::new(inner))
- }
-
- /// Borrows this `LocalWaker` as a `Waker`.
- ///
- /// `Waker` is nearly identical to `LocalWaker`, but is threadsafe
- /// (implements `Send` and `Sync`).
- #[inline]
- pub fn as_waker(&self) -> &Waker {
- &self.0
- }
-
- /// Converts this `LocalWaker` into a `Waker`.
- ///
- /// `Waker` is nearly identical to `LocalWaker`, but is threadsafe
- /// (implements `Send` and `Sync`).
- #[inline]
- pub fn into_waker(self) -> Waker {
- self.0
- }
-
- /// Wake up the task associated with this `LocalWaker`.
- #[inline]
- pub fn wake(&self) {
- unsafe { self.0.inner.as_ref().wake_local() }
- }
-
- /// Returns whether or not this `LocalWaker` and `other` `LocalWaker` awaken the same task.
- ///
- /// This function works on a best-effort basis, and may return false even
- /// when the `LocalWaker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `LocalWaker`s will awaken the same
- /// task.
- ///
- /// This function is primarily used for optimization purposes.
- #[inline]
- pub fn will_wake(&self, other: &LocalWaker) -> bool {
- self.0.will_wake(&other.0)
- }
-
- /// Returns whether or not this `LocalWaker` and `other` `Waker` awaken the same task.
- ///
- /// This function works on a best-effort basis, and may return false even
- /// when the `Waker`s would awaken the same task. However, if this function
- /// returns true, it is guaranteed that the `LocalWaker`s will awaken the same
- /// task.
- ///
- /// This function is primarily used for optimization purposes.
- #[inline]
- pub fn will_wake_nonlocal(&self, other: &Waker) -> bool {
- self.0.will_wake(other)
- }
-}
-
-impl From<LocalWaker> for Waker {
- /// Converts a `LocalWaker` into a `Waker`.
- ///
- /// This conversion turns a `!Sync` `LocalWaker` into a `Sync` `Waker`, allowing a wakeup
- /// object to be sent to another thread, but giving up its ability to do specialized
- /// thread-local wakeup behavior.
- #[inline]
- fn from(local_waker: LocalWaker) -> Self {
- local_waker.0
+ // SAFETY: This is safe because `Waker::new_unchecked` is the only way
+ // to initialize `drop` and `data` requiring the user to acknowledge
+ // that the contract of `RawWaker` is upheld.
+ unsafe { (self.waker.vtable.drop)(self.waker.data) }
}
}
-impl fmt::Debug for LocalWaker {
+impl fmt::Debug for Waker {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
- f.debug_struct("LocalWaker")
+ let vtable_ptr = self.waker.vtable as *const RawWakerVTable;
+ f.debug_struct("Waker")
+ .field("data", &self.waker.data)
+ .field("vtable", &vtable_ptr)
.finish()
}
}
-
-/// An unsafe trait for implementing custom memory management for a `Waker` or `LocalWaker`.
-///
-/// A `Waker` conceptually is a cloneable trait object for `Wake`, and is
-/// most often essentially just `Arc<dyn Wake>`. However, in some contexts
-/// (particularly `no_std`), it's desirable to avoid `Arc` in favor of some
-/// custom memory management strategy. This trait is designed to allow for such
-/// customization.
-///
-/// When using `std`, a default implementation of the `UnsafeWake` trait is provided for
-/// `Arc<T>` where `T: Wake`.
-pub unsafe trait UnsafeWake: Send + Sync {
- /// Creates a clone of this `UnsafeWake` and stores it behind a `Waker`.
- ///
- /// This function will create a new uniquely owned handle that under the
- /// hood references the same notification instance. In other words calls
- /// to `wake` on the returned handle should be equivalent to calls to
- /// `wake` on this handle.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e., hasn't been dropped.
- unsafe fn clone_raw(&self) -> Waker;
-
- /// Drops this instance of `UnsafeWake`, deallocating resources
- /// associated with it.
- ///
- /// FIXME(cramertj)
- /// This method is intended to have a signature such as:
- ///
- /// ```ignore (not-a-doctest)
- /// fn drop_raw(self: *mut Self);
- /// ```
- ///
- /// Unfortunately in Rust today that signature is not object safe.
- /// Nevertheless it's recommended to implement this function *as if* that
- /// were its signature. As such it is not safe to call on an invalid
- /// pointer, nor is the validity of the pointer guaranteed after this
- /// function returns.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e., hasn't been dropped.
- unsafe fn drop_raw(&self);
-
- /// Indicates that the associated task is ready to make progress and should
- /// be `poll`ed.
- ///
- /// Executors generally maintain a queue of "ready" tasks; `wake` should place
- /// the associated task onto this queue.
- ///
- /// # Panics
- ///
- /// Implementations should avoid panicking, but clients should also be prepared
- /// for panics.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e., hasn't been dropped.
- unsafe fn wake(&self);
-
- /// Indicates that the associated task is ready to make progress and should
- /// be `poll`ed. This function is the same as `wake`, but can only be called
- /// from the thread that this `UnsafeWake` is "local" to. This allows for
- /// implementors to provide specialized wakeup behavior specific to the current
- /// thread. This function is called by `LocalWaker::wake`.
- ///
- /// Executors generally maintain a queue of "ready" tasks; `wake_local` should place
- /// the associated task onto this queue.
- ///
- /// # Panics
- ///
- /// Implementations should avoid panicking, but clients should also be prepared
- /// for panics.
- ///
- /// # Unsafety
- ///
- /// This function is unsafe to call because it's asserting the `UnsafeWake`
- /// value is in a consistent state, i.e., hasn't been dropped, and that the
- /// `UnsafeWake` hasn't moved from the thread on which it was created.
- unsafe fn wake_local(&self) {
- self.wake()
- }
-}
assert_eq!(i, ys.len());
}
-/// Test `FlatMap::fold` with items already picked off the front and back,
+/// Tests `FlatMap::fold` with items already picked off the front and back,
/// to make sure all parts of the `FlatMap` are folded correctly.
#[test]
fn test_iterator_flat_map_fold() {
assert_eq!(i, ys.len());
}
-/// Test `Flatten::fold` with items already picked off the front and back,
+/// Tests `Flatten::fold` with items already picked off the front and back,
/// to make sure all parts of the `Flatten` are folded correctly.
#[test]
fn test_iterator_flatten_fold() {
/// timeouts.
///
/// Each `Duration` is composed of a whole number of seconds and a fractional part
-/// represented in nanoseconds. If the underlying system does not support
+/// represented in nanoseconds. If the underlying system does not support
/// nanosecond-level precision, APIs binding a system timeout will typically round up
/// the number of nanoseconds.
///
}
}
- /// Multiply `Duration` by `f64`.
+ /// Multiplies `Duration` by `f64`.
///
/// # Panics
/// This method will panic if result is not finite, negative or overflows `Duration`.
#![deny(rust_2018_idioms)]
#![feature(nll)]
-#![feature(str_escape)]
use LabelText::*;
/// digit (i.e., the regular expression `[a-zA-Z_][a-zA-Z_0-9]*`).
///
/// (Note: this format is a strict subset of the `ID` format
- /// defined by the DOT language. This function may change in the
+ /// defined by the DOT language. This function may change in the
/// future to accept a broader subset, or the entirety, of DOT's
/// `ID` format.)
///
}
/// Decomposes content into string suitable for making EscStr that
- /// yields same content as self. The result obeys the law
+ /// yields same content as self. The result obeys the law
/// render(`lt`) == render(`EscStr(lt.pre_escaped_content())`) for
/// all `lt: LabelText`.
fn pre_escaped_content(self) -> Cow<'a, str> {
EscStr(s) => s,
LabelStr(s) => {
if s.contains('\\') {
- (&*s).escape_default().into()
+ (&*s).escape_default().to_string().into()
} else {
s
}
authors = ["The Rust Project Developers"]
name = "panic_unwind"
version = "0.0.0"
+edition = "2018"
[lib]
path = "lib.rs"
-//! Unwinding for wasm32
+//! Unwinding for *wasm32* target.
//!
-//! Right now we don't support this, so this is just stubs
+//! Right now we don't support this, so this is just stubs.
use alloc::boxed::Box;
use core::any::Any;
//! http://www.airs.com/blog/archives/464
//!
//! A reference implementation may be found in the GCC source tree
-//! (<root>/libgcc/unwind-c.c as of this writing)
+//! (`<root>/libgcc/unwind-c.c` as of this writing).
#![allow(non_upper_case_globals)]
#![allow(unused)]
-use dwarf::DwarfReader;
+use crate::dwarf::DwarfReader;
use core::mem;
pub const DW_EH_PE_omit: u8 = 0xFF;
pub const USING_SJLJ_EXCEPTIONS: bool = cfg!(all(target_os = "ios", target_arch = "arm"));
-pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext)
+pub unsafe fn find_eh_action(lsda: *const u8, context: &EHContext<'_>)
-> Result<EHAction, ()>
{
if lsda.is_null() {
}
unsafe fn read_encoded_pointer(reader: &mut DwarfReader,
- context: &EHContext,
+ context: &EHContext<'_>,
encoding: u8)
-> Result<usize, ()> {
if encoding == DW_EH_PE_omit {
//! Utilities for parsing DWARF-encoded data streams.
-//! See http://www.dwarfstd.org,
+//! See <http://www.dwarfstd.org>,
//! DWARF-4 standard, Section 7 - "Data Representation"
// This module is used only by x86_64-pc-windows-gnu for now, but we
-//! Unwinding for emscripten
+//! Unwinding for *emscripten* target.
//!
//! Whereas Rust's usual unwinding implementation for Unix platforms
-//! calls into the libunwind APIs directly, on emscripten we instead
+//! calls into the libunwind APIs directly, on Emscripten we instead
//! call into the C++ unwinding APIs. This is just an expedience since
-//! emscripten's runtime always implements those APIs and does not
+//! Emscripten's runtime always implements those APIs and does not
//! implement libunwind.
#![allow(private_no_mangle_fns)]
use core::any::Any;
use core::ptr;
+use core::mem;
use alloc::boxed::Box;
use libc::{self, c_int};
use unwind as uw;
-use core::mem;
pub fn payload() -> *mut u8 {
ptr::null_mut()
-//! Implementation of panics backed by libgcc/libunwind (in some form)
+//! Implementation of panics backed by libgcc/libunwind (in some form).
//!
//! For background on exception handling and stack unwinding please see
//! "Exception Handling in LLVM" (llvm.org/docs/ExceptionHandling.html) and
//!
//! In the search phase, the job of a personality routine is to examine
//! exception object being thrown, and to decide whether it should be caught at
-//! that stack frame. Once the handler frame has been identified, cleanup phase
+//! that stack frame. Once the handler frame has been identified, cleanup phase
//! begins.
//!
//! In the cleanup phase, the unwinder invokes each personality routine again.
//! This time it decides which (if any) cleanup code needs to be run for
-//! the current stack frame. If so, the control is transferred to a special
+//! the current stack frame. If so, the control is transferred to a special
//! branch in the function body, the "landing pad", which invokes destructors,
-//! frees memory, etc. At the end of the landing pad, control is transferred
+//! frees memory, etc. At the end of the landing pad, control is transferred
//! back to the unwinder and unwinding resumes.
//!
//! Once stack has been unwound down to the handler frame level, unwinding stops
//! ## `eh_personality` and `eh_unwind_resume`
//!
//! These language items are used by the compiler when generating unwind info.
-//! The first one is the personality routine described above. The second one
+//! The first one is the personality routine described above. The second one
//! allows compilation target to customize the process of resuming unwind at the
//! end of the landing pads. `eh_unwind_resume` is used only if
//! `custom_unwind_resume` flag in the target options is set.
use unwind as uw;
use libc::{c_int, uintptr_t};
-use dwarf::eh::{self, EHContext, EHAction};
+use crate::dwarf::eh::{self, EHContext, EHAction};
#[repr(C)]
struct Exception {
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/",
issue_tracker_base_url = "https://github.com/rust-lang/rust/issues/")]
+#![deny(rust_2018_idioms)]
+
#![feature(allocator_api)]
#![feature(alloc)]
#![feature(core_intrinsics)]
#![panic_runtime]
#![feature(panic_runtime)]
-extern crate alloc;
-extern crate libc;
-#[cfg(not(any(target_env = "msvc", all(windows, target_arch = "x86_64", target_env = "gnu"))))]
-extern crate unwind;
-
use alloc::boxed::Box;
use core::intrinsics;
use core::mem;
vtable_ptr: *mut usize)
-> u32 {
let mut payload = imp::payload();
- if intrinsics::try(f, data, &mut payload as *mut _ as *mut _) == 0 {
+ if intrinsics::r#try(f, data, &mut payload as *mut _ as *mut _) == 0 {
0
} else {
let obj = mem::transmute::<_, raw::TraitObject>(imp::cleanup(payload));
use core::mem;
use core::raw;
-use windows as c;
+use crate::windows as c;
use libc::{c_int, c_uint};
// First up, a whole bunch of type definitions. There's a few platform-specific
#[lang = "eh_personality"]
#[cfg(not(test))]
fn rust_eh_personality() {
- unsafe { ::core::intrinsics::abort() }
+ unsafe { core::intrinsics::abort() }
}
use core::any::Any;
use core::intrinsics;
use core::ptr;
-use dwarf::eh::{EHContext, EHAction, find_eh_action};
-use windows as c;
+use crate::dwarf::eh::{EHContext, EHAction, find_eh_action};
+use crate::windows as c;
// Define our exception codes:
// according to http://msdn.microsoft.com/en-us/library/het71c37(v=VS.80).aspx,
ScopedCell(Cell::new(value))
}
- /// Set the value in `self` to `replacement` while
+ /// Sets the value in `self` to `replacement` while
/// running `f`, which gets the old value, mutably.
/// The old value will be restored after `f` exits, even
/// by panic, including modifications made to it by `f`.
f(RefMutL(put_back_on_drop.value.as_mut().unwrap()))
}
- /// Set the value in `self` to `value` while running `f`.
+ /// Sets the value in `self` to `value` while running `f`.
pub fn set<'a, R>(&self, value: <T as ApplyL<'a>>::Out, f: impl FnOnce() -> R) -> R {
self.replace(value, |_| f())
}
macro_rules! diagnostic_child_methods {
($spanned:ident, $regular:ident, $level:expr) => (
- /// Add a new child diagnostic message to `self` with the level
+ /// Adds a new child diagnostic message to `self` with the level
/// identified by this method's name with the given `spans` and
/// `message`.
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
self
}
- /// Add a new child diagnostic message to `self` with the level
+ /// Adds a new child diagnostic message to `self` with the level
/// identified by this method's name with the given `message`.
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
pub fn $regular<T: Into<String>>(mut self, message: T) -> Diagnostic {
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
impl Diagnostic {
- /// Create a new diagnostic with the given `level` and `message`.
+ /// Creates a new diagnostic with the given `level` and `message`.
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
pub fn new<T: Into<String>>(level: Level, message: T) -> Diagnostic {
Diagnostic {
}
}
- /// Create a new diagnostic with the given `level` and `message` pointing to
+ /// Creates a new diagnostic with the given `level` and `message` pointing to
/// the given set of `spans`.
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
pub fn spanned<S, T>(spans: S, level: Level, message: T) -> Diagnostic
//! function-like macros `#[proc_macro]`, macro attributes `#[proc_macro_attribute]` and
//! custom derive attributes`#[proc_macro_derive]`.
//!
-//! See [the book](../book/first-edition/procedural-macros.html) for more.
+//! See [the book] for more.
+//!
+//! [the book]: ../book/ch19-06-macros.html#procedural-macros-for-generating-code-from-attributes
#![stable(feature = "proc_macro_lib", since = "1.15.0")]
#![deny(missing_docs)]
/// or characters not existing in the language.
/// All tokens in the parsed stream get `Span::call_site()` spans.
///
-/// NOTE: Some errors may cause panics instead of returning `LexError`. We reserve the right to
+/// NOTE: some errors may cause panics instead of returning `LexError`. We reserve the right to
/// change these errors into `LexError`s later.
#[stable(feature = "proc_macro_lib", since = "1.15.0")]
impl FromStr for TokenStream {
macro_rules! diagnostic_method {
($name:ident, $level:expr) => (
- /// Create a new `Diagnostic` with the given `message` at the span
+ /// Creates a new `Diagnostic` with the given `message` at the span
/// `self`.
#[unstable(feature = "proc_macro_diagnostic", issue = "54140")]
pub fn $name<T: Into<String>>(self, message: T) -> Diagnostic {
Span(self.0.source())
}
- /// Get the starting line/column in the source file for this span.
+ /// Gets the starting line/column in the source file for this span.
#[unstable(feature = "proc_macro_span", issue = "54725")]
pub fn start(&self) -> LineColumn {
self.0.start()
}
- /// Get the ending line/column in the source file for this span.
+ /// Gets the ending line/column in the source file for this span.
#[unstable(feature = "proc_macro_span", issue = "54725")]
pub fn end(&self) -> LineColumn {
self.0.end()
}
- /// Create a new span encompassing `self` and `other`.
+ /// Creates a new span encompassing `self` and `other`.
///
/// Returns `None` if `self` and `other` are from different files.
#[unstable(feature = "proc_macro_span", issue = "54725")]
pub struct SourceFile(bridge::client::SourceFile);
impl SourceFile {
- /// Get the path to this source file.
+ /// Gets the path to this source file.
///
/// ### Note
/// If the code span associated with this `SourceFile` was generated by an external macro, this
rustc_apfloat = { path = "../librustc_apfloat" }
rustc_target = { path = "../librustc_target" }
rustc_data_structures = { path = "../librustc_data_structures" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
serialize = { path = "../libserialize" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
}
}
- /// True if all nodes always pass the filter.
+ /// Returns `true` if all nodes always pass the filter.
pub fn accepts_all(&self) -> bool {
self.text.is_empty()
}
}
}
- /// Create a new, parameterless DepNode. This method will assert
+ /// Creates a new, parameterless DepNode. This method will assert
/// that the DepNode corresponding to the given DepKind actually
/// does not require any parameters.
#[inline(always)]
}
}
- /// Extract the DefId corresponding to this DepNode. This will work
+ /// Extracts the DefId corresponding to this DepNode. This will work
/// if two conditions are met:
///
/// 1. The Fingerprint of the DepNode actually is a DefPathHash, and
}
/// A "work product" corresponds to a `.o` (or other) file that we
-/// save in between runs. These ids do not have a DefId but rather
+/// save in between runs. These IDs do not have a `DefId` but rather
/// some independent path or string that persists between runs without
/// the need to be mapped or unmapped. (This ensures we can serialize
/// them even in the absence of a tcx.)
///
/// Here, `[op]` represents whatever nodes `op` reads in the
/// course of execution; `Map(key)` represents the node for this
- /// map; and `CurrentTask` represents the current task when
+ /// map, and `CurrentTask` represents the current task when
/// `memoize` is invoked.
///
/// **Important:** when `op` is invoked, the current task will be
-use crate::errors::{Diagnostic, DiagnosticBuilder};
+use errors::{Diagnostic, DiagnosticBuilder};
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::indexed_vec::{Idx, IndexVec};
colors: DepNodeColorMap,
- /// A set of loaded diagnostics which has been emitted.
+ /// A set of loaded diagnostics that have been emitted.
emitted_diagnostics: Mutex<FxHashSet<DepNodeIndex>>,
/// Used to wait for diagnostics to be emitted.
emitted_diagnostics_cond_var: Condvar,
- /// When we load, there may be `.o` files, cached mir, or other such
+ /// When we load, there may be `.o` files, cached MIR, or other such
/// things available to us. If we find that they are not dirty, we
/// load the path to the file storing those work-products here into
/// this map. We can later look for and extract that data.
}
}
- /// True if we are actually building the full dep-graph.
+ /// Returns `true` if we are actually building the full dep-graph, and `false` otherwise.
#[inline]
pub fn is_fully_enabled(&self) -> bool {
self.data.is_some()
task_deps.map(|lock| lock.into_inner()),
);
+ let print_status = cfg!(debug_assertions) && hcx.sess().opts.debugging_opts.dep_tasks;
+
// Determine the color of the new DepNode.
if let Some(prev_index) = data.previous.node_to_index_opt(&key) {
let prev_fingerprint = data.previous.fingerprint_by_index(prev_index);
let color = if let Some(current_fingerprint) = current_fingerprint {
if current_fingerprint == prev_fingerprint {
+ if print_status {
+ eprintln!("[task::green] {:?}", key);
+ }
DepNodeColor::Green(dep_node_index)
} else {
+ if print_status {
+ eprintln!("[task::red] {:?}", key);
+ }
DepNodeColor::Red
}
} else {
+ if print_status {
+ eprintln!("[task::unknown] {:?}", key);
+ }
// Mark the node as Red if we can't hash the result
DepNodeColor::Red
};
insertion for {:?}", key);
data.colors.insert(prev_index, color);
+ } else {
+ if print_status {
+ eprintln!("[task::new] {:?}", key);
+ }
}
(result, dep_node_index)
}
}
- /// Execute something within an "anonymous" task, that is, a task the
- /// DepNode of which is determined by the list of inputs it read from.
+ /// Executes something within an "anonymous" task, that is, a task the
+ /// `DepNode` of which is determined by the list of inputs it read from.
pub fn with_anon_task<OP,R>(&self, dep_kind: DepKind, op: OP) -> (R, DepNodeIndex)
where OP: FnOnce() -> R
{
}
}
- /// Execute something within an "eval-always" task which is a task
- // that runs whenever anything changes.
+ /// Executes something within an "eval-always" task which is a task
+ /// that runs whenever anything changes.
pub fn with_eval_always_task<'a, C, A, R>(
&self,
key: DepNode,
self.data.as_ref().unwrap().previous.node_to_index(dep_node)
}
- /// Check whether a previous work product exists for `v` and, if
+ /// Checks whether a previous work product exists for `v` and, if
/// so, return the path that leads to it. Used to skip doing work.
pub fn previous_work_product(&self, v: &WorkProductId) -> Option<WorkProduct> {
self.data
}
}
- /// Try to mark a dep-node which existed in the previous compilation session as green
+ /// Try to mark a dep-node which existed in the previous compilation session as green.
fn try_mark_previous_green<'tcx>(
&self,
tcx: TyCtxt<'_, 'tcx, 'tcx>,
Some(dep_node_index)
}
- /// Atomically emits some loaded diagnotics assuming that this only gets called with
- /// did_allocation set to true on one thread
+ /// Atomically emits some loaded diagnotics, assuming that this only gets called with
+ /// `did_allocation` set to `true` on a single thread.
#[cold]
#[inline(never)]
fn emit_diagnostics<'tcx>(
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct WorkProduct {
pub cgu_name: String,
- /// Saved files associated with this CGU
+ /// Saved files associated with this CGU.
pub saved_files: Vec<(WorkProductFileKind, String)>,
}
#[allow(dead_code)]
forbidden_edge: Option<EdgeFilter>,
- // Anonymous DepNodes are nodes the ID of which we compute from the list of
- // their edges. This has the beneficial side-effect that multiple anonymous
- // nodes can be coalesced into one without changing the semantics of the
- // dependency graph. However, the merging of nodes can lead to a subtle
- // problem during red-green marking: The color of an anonymous node from
- // the current session might "shadow" the color of the node with the same
- // ID from the previous session. In order to side-step this problem, we make
- // sure that anon-node IDs allocated in different sessions don't overlap.
- // This is implemented by mixing a session-key into the ID fingerprint of
- // each anon node. The session-key is just a random number generated when
- // the DepGraph is created.
+ /// Anonymous `DepNode`s are nodes whose IDs we compute from the list of
+ /// their edges. This has the beneficial side-effect that multiple anonymous
+ /// nodes can be coalesced into one without changing the semantics of the
+ /// dependency graph. However, the merging of nodes can lead to a subtle
+ /// problem during red-green marking: The color of an anonymous node from
+ /// the current session might "shadow" the color of the node with the same
+ /// ID from the previous session. In order to side-step this problem, we make
+ /// sure that anonymous `NodeId`s allocated in different sessions don't overlap.
+ /// This is implemented by mixing a session-key into the ID fingerprint of
+ /// each anon node. The session-key is just a random number generated when
+ /// the `DepGraph` is created.
anon_id_seed: Fingerprint,
total_read_count: u64,
+// ignore-tidy-linelength
#![allow(non_snake_case)]
// Error messages for EXXXX errors.
Lifetime elision in implementation headers was part of the lifetime elision
RFC. It is, however, [currently unimplemented][iss15872].
-[book-le]: https://doc.rust-lang.org/nightly/book/first-edition/lifetimes.html#lifetime-elision
+[book-le]: https://doc.rust-lang.org/book/ch10-03-lifetime-syntax.html#lifetime-elision
[iss15872]: https://github.com/rust-lang/rust/issues/15872
"##,
#![no_std]
```
-See also https://doc.rust-lang.org/book/first-edition/no-stdlib.html
+See also the [unstable book][1].
+
+[1]: https://doc.rust-lang.org/unstable-book/language-features/lang-items.html#writing-an-executable-without-stdlib
"##,
E0214: r##"
```
To understand better how closures work in Rust, read:
-https://doc.rust-lang.org/book/first-edition/closures.html
+https://doc.rust-lang.org/book/ch13-01-closures.html
"##,
E0580: r##"
}
impl<'a, 'tcx> CheckAttrVisitor<'a, 'tcx> {
- /// Check any attribute.
+ /// Checks any attribute.
fn check_attributes(&self, item: &hir::Item, target: Target) {
if target == Target::Fn || target == Target::Const {
self.tcx.codegen_fn_attrs(self.tcx.hir().local_def_id(item.id));
self.check_used(item, target);
}
- /// Check if an `#[inline]` is applied to a function or a closure.
+ /// Checks if an `#[inline]` is applied to a function or a closure.
fn check_inline(&self, attr: &hir::Attribute, span: &Span, target: Target) {
if target != Target::Fn && target != Target::Closure {
struct_span_err!(self.tcx.sess,
}
}
- /// Check if the `#[non_exhaustive]` attribute on an `item` is valid.
+ /// Checks if the `#[non_exhaustive]` attribute on an `item` is valid.
fn check_non_exhaustive(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
match target {
Target::Struct | Target::Enum => { /* Valid */ },
}
}
- /// Check if the `#[marker]` attribute on an `item` is valid.
+ /// Checks if the `#[marker]` attribute on an `item` is valid.
fn check_marker(&self, attr: &hir::Attribute, item: &hir::Item, target: Target) {
match target {
Target::Trait => { /* Valid */ },
}
}
- /// Check if the `#[repr]` attributes on `item` are valid.
+ /// Checks if the `#[repr]` attributes on `item` are valid.
fn check_repr(&self, item: &hir::Item, target: Target) {
// Extract the names of all repr hints, e.g., [foo, bar, align] for:
// ```
}
impl<T> PerNS<Option<T>> {
- /// Returns whether all the items in this collection are `None`.
+ /// Returns `true` if all the items in this collection are `None`.
pub fn is_empty(&self) -> bool {
self.type_ns.is_none() && self.value_ns.is_none() && self.macro_ns.is_none()
}
}
impl DefId {
- /// Make a local `DefId` with the given index.
+ /// Makes a local `DefId` from the given `DefIndex`.
#[inline]
pub fn local(index: DefIndex) -> DefId {
DefId { krate: LOCAL_CRATE, index: index }
//! `super::itemlikevisit::ItemLikeVisitor` trait.**
//!
//! If you have decided to use this visitor, here are some general
-//! notes on how to do it:
+//! notes on how to do so:
//!
//! Each overridden visit method has full control over what
//! happens with its node, it can do its own traversal of the node's children,
/// using this setting.
OnlyBodies(&'this Map<'tcx>),
- /// Visit all nested things, including item-likes.
+ /// Visits all nested things, including item-likes.
///
/// **This is an unusual choice.** It is used when you want to
/// process everything within their lexical context. Typically you
impl<'this, 'tcx> NestedVisitorMap<'this, 'tcx> {
/// Returns the map to use for an "intra item-like" thing (if any).
- /// e.g., function body.
+ /// E.g., function body.
pub fn intra(self) -> Option<&'this Map<'tcx>> {
match self {
NestedVisitorMap::None => None,
}
/// Returns the map to use for an "item-like" thing (if any).
- /// e.g., item, impl-item.
+ /// E.g., item, impl-item.
pub fn inter(self) -> Option<&'this Map<'tcx>> {
match self {
NestedVisitorMap::None => None,
}
/// Each method of the Visitor trait is a hook to be potentially
-/// overridden. Each method's default implementation recursively visits
+/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
/// e.g., the `visit_mod` method by default calls `intravisit::walk_mod`.
///
/// on `visit_nested_item` for details on how to visit nested items.
///
/// If you want to ensure that your code handles every variant
-/// explicitly, you need to override each method. (And you also need
+/// explicitly, you need to override each method. (And you also need
/// to monitor future changes to `Visitor` in case a new method with a
/// new default implementation gets introduced.)
pub trait Visitor<'v> : Sized {
}
}
- /// Visit the top-level item and (optionally) nested items / impl items. See
+ /// Visits the top-level item and (optionally) nested items / impl items. See
/// `visit_nested_item` for details.
fn visit_item(&mut self, i: &'v Item) {
walk_item(self, i)
}
/// When invoking `visit_all_item_likes()`, you need to supply an
- /// item-like visitor. This method converts a "intra-visit"
+ /// item-like visitor. This method converts a "intra-visit"
/// visitor into an item-like visitor that walks the entire tree.
/// If you use this, you probably don't want to process the
/// contents of nested item-like things, since the outer loop will
//! Since the AST and HIR are fairly similar, this is mostly a simple procedure,
//! much like a fold. Where lowering involves a bit more work things get more
//! interesting and there are some invariants you should know about. These mostly
-//! concern spans and ids.
+//! concern spans and IDs.
//!
//! Spans are assigned to AST nodes during parsing and then are modified during
//! expansion to indicate the origin of a node and the process it went through
-//! being expanded. Ids are assigned to AST nodes just before lowering.
+//! being expanded. IDs are assigned to AST nodes just before lowering.
//!
-//! For the simpler lowering steps, ids and spans should be preserved. Unlike
+//! For the simpler lowering steps, IDs and spans should be preserved. Unlike
//! expansion we do not preserve the process of lowering in the spans, so spans
//! should not be modified here. When creating a new node (as opposed to
-//! 'folding' an existing one), then you create a new id using `next_id()`.
+//! 'folding' an existing one), then you create a new ID using `next_id()`.
//!
-//! You must ensure that ids are unique. That means that you should only use the
-//! id from an AST node in a single HIR node (you can assume that AST node ids
-//! are unique). Every new node must have a unique id. Avoid cloning HIR nodes.
-//! If you do, you must then set the new node's id to a fresh one.
+//! You must ensure that IDs are unique. That means that you should only use the
+//! ID from an AST node in a single HIR node (you can assume that AST node IDs
+//! are unique). Every new node must have a unique ID. Avoid cloning HIR nodes.
+//! If you do, you must then set the new node's ID to a fresh one.
//!
//! Spans are used for error messages and for tools to map semantics back to
-//! source code. It is therefore not as important with spans as ids to be strict
+//! source code. It is therefore not as important with spans as IDs to be strict
//! about use (you can't break the compiler by screwing up a span). Obviously, a
//! HIR node can only have a single span. But multiple nodes can have the same
//! span and spans don't need to be kept in order, etc. Where code is preserved
//! in the HIR, especially for multiple identifiers.
use crate::dep_graph::DepGraph;
-use crate::errors::Applicability;
use crate::hir::{self, ParamName};
use crate::hir::HirVec;
use crate::hir::map::{DefKey, DefPathData, Definitions};
use crate::lint::builtin::{self, PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
ELIDED_LIFETIMES_IN_PATHS};
use crate::middle::cstore::CrateStore;
-use rustc_data_structures::fx::FxHashSet;
-use rustc_data_structures::indexed_vec::IndexVec;
-use rustc_data_structures::thin_vec::ThinVec;
-use rustc_data_structures::sync::Lrc;
use crate::session::Session;
use crate::session::config::nightly_options;
use crate::util::common::FN_OUTPUT_NAME;
use crate::util::nodemap::{DefIdMap, NodeMap};
+use errors::Applicability;
+use rustc_data_structures::fx::FxHashSet;
+use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::thin_vec::ThinVec;
+use rustc_data_structures::sync::Lrc;
use std::collections::{BTreeSet, BTreeMap};
use std::fmt::Debug;
is_value: bool,
) -> hir::Path;
- /// Obtain the resolution for a node-id.
+ /// Obtain the resolution for a `NodeId`.
fn get_resolution(&mut self, id: NodeId) -> Option<PathResolution>;
/// Obtain the possible resolutions for the given `use` statement.
}
/// What to do when we encounter an **anonymous** lifetime
-/// reference. Anonymous lifetime references come in two flavors. You
+/// reference. Anonymous lifetime references come in two flavors. You
/// have implicit, or fully elided, references to lifetimes, like the
/// one in `&T` or `Ref<T>`, and you have `'_` lifetimes, like `&'_ T`
-/// or `Ref<'_, T>`. These often behave the same, but not always:
+/// or `Ref<'_, T>`. These often behave the same, but not always:
///
/// - certain usages of implicit references are deprecated, like
/// `Ref<T>`, and we sometimes just give hard errors in those cases
/// Paths like the visibility path in `pub(super) use foo::{bar, baz}` are repeated
/// many times in the HIR tree; for each occurrence, we need to assign distinct
- /// node-ids. (See e.g., #56128.)
+ /// `NodeId`s. (See, e.g., #56128.)
fn renumber_segment_ids(&mut self, path: &P<hir::Path>) -> P<hir::Path> {
debug!("renumber_segment_ids(path = {:?})", path);
let mut path = path.clone();
hir::ExprKind::Call(f, args.iter().map(|x| self.lower_expr(x)).collect())
}
ExprKind::MethodCall(ref seg, ref args) => {
- let hir_seg = self.lower_path_segment(
+ let hir_seg = P(self.lower_path_segment(
e.span,
seg,
ParamMode::Optional,
ParenthesizedGenericArgs::Err,
ImplTraitContext::disallowed(),
None,
- );
+ ));
let args = args.iter().map(|x| self.lower_expr(x)).collect();
hir::ExprKind::MethodCall(hir_seg, seg.ident.span, args)
}
node: if is_unit {
hir::ExprKind::Path(struct_path)
} else {
- hir::ExprKind::Struct(struct_path, fields, None)
+ hir::ExprKind::Struct(P(struct_path), fields, None)
},
span: e.span,
attrs: e.attrs.clone(),
hir::ExprKind::InlineAsm(P(hir_asm), outputs, inputs)
}
ExprKind::Struct(ref path, ref fields, ref maybe_expr) => hir::ExprKind::Struct(
- self.lower_qpath(
+ P(self.lower_qpath(
e.id,
&None,
path,
ParamMode::Optional,
ImplTraitContext::disallowed(),
- ),
+ )),
fields.iter().map(|x| self.lower_field(x)).collect(),
maybe_expr.as_ref().map(|x| P(self.lower_expr(x))),
),
//! This module provides a simplified abstraction for working with
-//! code blocks identified by their integer node-id. In particular,
+//! code blocks identified by their integer `NodeId`. In particular,
//! it captures a common set of attributes that all "function-like
-//! things" (represented by `FnLike` instances) share. For example,
+//! things" (represented by `FnLike` instances) share. For example,
//! all `FnLike` instances have a type signature (be it explicit or
-//! inferred). And all `FnLike` instances have a body, i.e., the code
+//! inferred). And all `FnLike` instances have a body, i.e., the code
//! that is run when the function-like thing it represents is invoked.
//!
//! With the above abstraction in place, one can treat the program
use crate::hir::map::{ITEM_LIKE_SPACE, REGULAR_SPACE};
-/// Creates def ids for nodes in the AST.
+/// Creates `DefId`s for nodes in the AST.
pub struct DefCollector<'a> {
definitions: &'a mut Definitions,
parent_def: Option<DefIndex>,
-//! For each definition, we track the following data. A definition
-//! here is defined somewhat circularly as "something with a def-id",
+//! For each definition, we track the following data. A definition
+//! here is defined somewhat circularly as "something with a `DefId`",
//! but it generally corresponds to things like structs, enums, etc.
//! There are also some rather random cases (like const initializer
//! expressions) that are mostly just leftovers.
/// any) with a `DisambiguatedDefPathData`.
#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
pub struct DefKey {
- /// Parent path.
+ /// The parent path.
pub parent: Option<DefIndex>,
- /// Identifier of this node.
+ /// The identifier of this node.
pub disambiguated_data: DisambiguatedDefPathData,
}
}
}
-/// Pair of `DefPathData` and an integer disambiguator. The integer is
+/// A pair of `DefPathData` and an integer disambiguator. The integer is
/// normally 0, but in the event that there are multiple defs with the
/// same `parent` and `data`, we use this field to disambiguate
/// between them. This introduces some artificial ordering dependency
/// but means that if you have (e.g.) two impls for the same type in
-/// the same module, they do get distinct def-ids.
+/// the same module, they do get distinct `DefId`s.
#[derive(Clone, PartialEq, Debug, Hash, RustcEncodable, RustcDecodable)]
pub struct DisambiguatedDefPathData {
pub data: DefPathData,
#[derive(Clone, Debug, Hash, RustcEncodable, RustcDecodable)]
pub struct DefPath {
- /// the path leading from the crate root to the item
+ /// The path leading from the crate root to the item.
pub data: Vec<DisambiguatedDefPathData>,
- /// what krate root is this path relative to?
+ /// The crate root this path is relative to.
pub krate: CrateNum,
}
DefPath { data: data, krate: krate }
}
- /// Returns a string representation of the DefPath without
+ /// Returns a string representation of the `DefPath` without
/// the crate-prefix. This method is useful if you don't have
- /// a TyCtxt available.
+ /// a `TyCtxt` available.
pub fn to_string_no_crate(&self) -> String {
let mut s = String::with_capacity(self.data.len() * 16);
s
}
- /// Return filename friendly string of the DefPah with the
+ /// Returns a filename-friendly string for the `DefPath`, with the
/// crate-prefix.
pub fn to_string_friendly<F>(&self, crate_imported_name: F) -> String
where F: FnOnce(CrateNum) -> Symbol
s
}
- /// Return filename friendly string of the DefPah without
+ /// Returns a filename-friendly string of the `DefPath`, without
/// the crate-prefix. This method is useful if you don't have
- /// a TyCtxt available.
+ /// a `TyCtxt` available.
pub fn to_filename_friendly_no_crate(&self) -> String {
let mut s = String::with_capacity(self.data.len() * 16);
}
impl Definitions {
- /// Create new empty definition map.
+ /// Creates new empty definition map.
///
- /// The DefIndex returned from a new Definitions are as follows:
- /// 1. At DefIndexAddressSpace::Low,
+ /// The `DefIndex` returned from a new `Definitions` are as follows:
+ /// 1. At `DefIndexAddressSpace::Low`,
/// CRATE_ROOT has index 0:0, and then new indexes are allocated in
/// ascending order.
- /// 2. At DefIndexAddressSpace::High,
- /// the first FIRST_FREE_HIGH_DEF_INDEX indexes are reserved for
- /// internal use, then 1:FIRST_FREE_HIGH_DEF_INDEX are allocated in
+ /// 2. At `DefIndexAddressSpace::High`,
+ /// the first `FIRST_FREE_HIGH_DEF_INDEX` indexes are reserved for
+ /// internal use, then `1:FIRST_FREE_HIGH_DEF_INDEX` are allocated in
/// ascending order.
- ///
- /// FIXME: there is probably a better place to put this comment.
+ //
+ // FIXME: there is probably a better place to put this comment.
pub fn new() -> Self {
Self::default()
}
&self.table
}
- /// Get the number of definitions.
+ /// Gets the number of definitions.
pub fn def_index_counts_lo_hi(&self) -> (usize, usize) {
(self.table.index_to_key[DefIndexAddressSpace::Low.index()].len(),
self.table.index_to_key[DefIndexAddressSpace::High.index()].len())
self.node_to_hir_id[node_id]
}
- /// Retrieve the span of the given `DefId` if `DefId` is in the local crate, the span exists and
- /// it's not DUMMY_SP
+ /// Retrieves the span of the given `DefId` if `DefId` is in the local crate, the span exists
+ /// and it's not `DUMMY_SP`.
#[inline]
pub fn opt_span(&self, def_id: DefId) -> Option<Span> {
if def_id.krate == LOCAL_CRATE {
}
}
- /// Add a definition with a parent definition.
+ /// Adds a root definition (no parent).
pub fn create_root_def(&mut self,
crate_name: &str,
crate_disambiguator: CrateDisambiguator)
index
}
- /// Initialize the ast::NodeId to HirId mapping once it has been generated during
+ /// Initialize the `ast::NodeId` to `HirId` mapping once it has been generated during
/// AST to HIR lowering.
pub fn init_node_id_to_hir_id_mapping(&mut self,
mapping: IndexVec<ast::NodeId, hir::HirId>) {
pub const ITEM_LIKE_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::Low;
pub const REGULAR_SPACE: DefIndexAddressSpace = DefIndexAddressSpace::High;
-/// Represents an entry and its parent NodeId.
+/// Represents an entry and its parent `NodeId`.
#[derive(Copy, Clone, Debug)]
pub struct Entry<'hir> {
parent: NodeId,
}
}
- fn is_body_owner(self, node_id: NodeId) -> bool {
+ fn is_body_owner(self, hir_id: HirId) -> bool {
match self.associated_body() {
- Some(b) => b.node_id == node_id,
+ Some(b) => b.hir_id == hir_id,
None => false,
}
}
}
}
-/// Represents a mapping from Node IDs to AST elements and their parent
-/// Node IDs
+/// Represents a mapping from `NodeId`s to AST elements and their parent `NodeId`s.
#[derive(Clone)]
pub struct Map<'hir> {
/// The backing storage for all the AST nodes.
}
pub fn body(&self, id: BodyId) -> &'hir Body {
- self.read(id.node_id);
+ self.read_by_hir_id(id.hir_id);
// N.B., intentionally bypass `self.forest.krate()` so that we
// do not trigger a read of the whole krate here
/// Returns the `NodeId` that corresponds to the definition of
/// which this is the body of, i.e., a `fn`, `const` or `static`
/// item (possibly associated), a closure, or a `hir::AnonConst`.
- pub fn body_owner(&self, BodyId { node_id }: BodyId) -> NodeId {
+ pub fn body_owner(&self, BodyId { hir_id }: BodyId) -> NodeId {
+ let node_id = self.hir_to_node_id(hir_id);
let parent = self.get_parent_node(node_id);
- assert!(self.map[parent.as_usize()].map_or(false, |e| e.is_body_owner(node_id)));
+ assert!(self.map[parent.as_usize()].map_or(false, |e| e.is_body_owner(hir_id)));
parent
}
self.local_def_id(self.body_owner(id))
}
- /// Given a node id, returns the `BodyId` associated with it,
+ /// Given a `NodeId`, returns the `BodyId` associated with it,
/// if the node is a body owner, otherwise returns `None`.
pub fn maybe_body_owned_by(&self, id: NodeId) -> Option<BodyId> {
if let Some(entry) = self.find_entry(id) {
}
}
+ // FIXME(@ljedrz): replace the NodeId variant
+ pub fn maybe_body_owned_by_by_hir_id(&self, id: HirId) -> Option<BodyId> {
+ let node_id = self.hir_to_node_id(id);
+ self.maybe_body_owned_by(node_id)
+ }
+
/// Given a body owner's id, returns the `BodyId` associated with it.
pub fn body_owned_by(&self, id: NodeId) -> BodyId {
self.maybe_body_owned_by(id).unwrap_or_else(|| {
}
}
+ // FIXME(@ljedrz): replace the NodeId variant
+ pub fn body_owner_kind_by_hir_id(&self, id: HirId) -> BodyOwnerKind {
+ let node_id = self.hir_to_node_id(id);
+ self.body_owner_kind(node_id)
+ }
+
pub fn ty_param_owner(&self, id: NodeId) -> NodeId {
match self.get(id) {
Node::Item(&Item { node: ItemKind::Trait(..), .. }) => id,
self.trait_auto_impl(trait_did).is_some()
}
- /// Get the attributes on the krate. This is preferable to
+ /// Gets the attributes on the crate. This is preferable to
/// invoking `krate.attrs` because it registers a tighter
/// dep-graph access.
pub fn krate_attrs(&self) -> &'hir [ast::Attribute] {
self.get_generics(id).map(|generics| generics.span).filter(|sp| *sp != DUMMY_SP)
}
- /// Retrieve the Node corresponding to `id`, returning None if
- /// cannot be found.
+ /// Retrieves the `Node` corresponding to `id`, returning `None` if cannot be found.
pub fn find(&self, id: NodeId) -> Option<Node<'hir>> {
let result = self.find_entry(id).and_then(|entry| {
if let Node::Crate = entry.node {
/// returns the enclosing item. Note that this might not be the actual parent
/// node in the AST - some kinds of nodes are not in the map and these will
/// never appear as the parent_node. So you can always walk the `parent_nodes`
- /// from a node to the root of the ast (unless you get the same id back here
- /// that can happen if the id is not in the map itself or is just weird).
+ /// from a node to the root of the ast (unless you get the same ID back here
+ /// that can happen if the ID is not in the map itself or is just weird).
pub fn get_parent_node(&self, id: NodeId) -> NodeId {
if self.dep_graph.is_fully_enabled() {
let hir_id_owner = self.node_to_hir_id(id).owner;
/// If there is some error when walking the parents (e.g., a node does not
/// have a parent in the map or a node can't be found), then we return the
- /// last good node id we found. Note that reaching the crate root (`id == 0`),
+ /// last good `NodeId` we found. Note that reaching the crate root (`id == 0`),
/// is not an error, since items in the crate module have the crate root as
/// parent.
fn walk_parent_nodes<F, F2>(&self,
}
}
- /// Retrieve the `NodeId` for `id`'s enclosing method, unless there's a
+ /// Retrieves the `NodeId` for `id`'s enclosing method, unless there's a
/// `while` or `loop` before reaching it, as block tail returns are not
/// available in them.
///
self.walk_parent_nodes(id, match_fn, match_non_returning_block).ok()
}
- /// Retrieve the `NodeId` for `id`'s parent item, or `id` itself if no
+ /// Retrieves the `NodeId` for `id`'s parent item, or `id` itself if no
/// parent item is in this map. The "parent item" is the closest parent node
/// in the HIR which is recorded by the map and is an item, either an item
/// in a module, trait, or impl.
self.local_def_id(self.get_module_parent_node(id))
}
+ // FIXME(@ljedrz): replace the NodeId variant
+ pub fn get_module_parent_by_hir_id(&self, id: HirId) -> DefId {
+ let node_id = self.hir_to_node_id(id);
+ self.get_module_parent(node_id)
+ }
+
/// Returns the `NodeId` of `id`'s nearest module parent, or `id` itself if no
/// module parent is in this map.
pub fn get_module_parent_node(&self, id: NodeId) -> NodeId {
}
impl<'a, 'hir> NodesMatchingSuffix<'a, 'hir> {
- /// Returns true only if some suffix of the module path for parent
+ /// Returns `true` only if some suffix of the module path for parent
/// matches `self.in_which`.
///
/// In other words: let `[x_0,x_1,...,x_k]` be `self.in_which`;
pub use self::UnOp::*;
pub use self::UnsafeSource::*;
-use crate::errors::FatalError;
use crate::hir::def::Def;
use crate::hir::def_id::{DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX};
use crate::util::nodemap::{NodeMap, FxHashSet};
use crate::mir::mono::Linkage;
+use errors::FatalError;
use syntax_pos::{Span, DUMMY_SP, symbol::InternedString};
use syntax::source_map::Spanned;
use rustc_target::spec::abi::Abi;
pub mod pat_util;
pub mod print;
-/// A HirId uniquely identifies a node in the HIR of the current crate. It is
-/// composed of the `owner`, which is the DefIndex of the directly enclosing
-/// hir::Item, hir::TraitItem, or hir::ImplItem (i.e., the closest "item-like"),
+/// Uniquely identifies a node in the HIR of the current crate. It is
+/// composed of the `owner`, which is the `DefIndex` of the directly enclosing
+/// `hir::Item`, `hir::TraitItem`, or `hir::ImplItem` (i.e., the closest "item-like"),
/// and the `local_id` which is unique within the given owner.
///
/// This two-level structure makes for more stable values: One can move an item
/// around within the source code, or add or remove stuff before it, without
-/// the local_id part of the HirId changing, which is a very useful property in
+/// the `local_id` part of the `HirId` changing, which is a very useful property in
/// incremental compilation where we have to persist things through changes to
/// the code base.
-#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
+#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
pub struct HirId {
pub owner: DefIndex,
pub local_id: ItemLocalId,
pub use self::item_local_id_inner::ItemLocalId;
-/// The `HirId` corresponding to CRATE_NODE_ID and CRATE_DEF_INDEX
+/// The `HirId` corresponding to `CRATE_NODE_ID` and `CRATE_DEF_INDEX`.
pub const CRATE_HIR_ID: HirId = HirId {
owner: CRATE_DEF_INDEX,
local_id: ItemLocalId::from_u32_const(0)
pub hir_id: HirId,
pub span: Span,
- /// Either "'a", referring to a named lifetime definition,
- /// or "" (aka keywords::Invalid), for elision placeholders.
+ /// Either "`'a`", referring to a named lifetime definition,
+ /// or "``" (i.e., `keywords::Invalid`), for elision placeholders.
///
/// HIR lowering inserts these placeholders in type paths that
/// refer to type definitions needing lifetime parameters,
/// Some user-given name like `T` or `'x`.
Plain(Ident),
- /// Synthetic name generated when user elided a lifetime in an impl header,
- /// e.g., the lifetimes in cases like these:
+ /// Synthetic name generated when user elided a lifetime in an impl header.
+ ///
+ /// E.g., the lifetimes in cases like these:
///
/// impl Foo for &u32
/// impl Foo<'_> for u32
/// Indicates an illegal name was given and an error has been
/// repored (so we should squelch other derived errors). Occurs
- /// when e.g., `'_` is used in the wrong place.
+ /// when, e.g., `'_` is used in the wrong place.
Error,
}
/// User-given names or fresh (synthetic) names.
Param(ParamName),
- /// User typed nothing. e.g., the lifetime in `&u32`.
+ /// User wrote nothing (e.g., the lifetime in `&u32`).
Implicit,
/// Indicates an error during lowering (usually `'_` in wrong place)
/// that was already reported.
Error,
- /// User typed `'_`.
+ /// User wrote specifies `'_`.
Underscore,
- /// User wrote `'static`
+ /// User wrote `'static`.
Static,
}
}
}
-/// A "Path" is essentially Rust's notion of a name; for instance:
+/// A `Path` is essentially Rust's notion of a name; for instance,
/// `std::cmp::PartialEq`. It's represented as a sequence of identifiers,
/// along with a bunch of supporting information.
#[derive(Clone, RustcEncodable, RustcDecodable)]
}
impl PathSegment {
- /// Convert an identifier to the corresponding segment.
+ /// Converts an identifier to the corresponding segment.
pub fn from_ident(ident: Ident) -> PathSegment {
PathSegment {
ident,
}
}
-/// Synthetic Type Parameters are converted to an other form during lowering, this allows
-/// to track the original form they had. Useful for error messages.
+/// Synthetic type parameters are converted to another form during lowering; this allows
+/// us to track the original form they had, and is useful for error messages.
#[derive(Copy, Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug)]
pub enum SyntheticTyParamKind {
ImplTrait
}
-/// A `where` clause in a definition
+/// A where-clause in a definition.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereClause {
pub id: NodeId,
}
}
-/// A single predicate in a `where` clause
+/// A single predicate in a where-clause.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum WherePredicate {
/// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`).
}
}
-/// A type bound, eg `for<'c> Foo: Send+Clone+'c`
+/// A type bound (e.g., `for<'c> Foo: Send + Clone + 'c`).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereBoundPredicate {
pub span: Span,
- /// Any generics from a `for` binding
+ /// Any generics from a `for` binding.
pub bound_generic_params: HirVec<GenericParam>,
- /// The type being bounded
+ /// The type being bounded.
pub bounded_ty: P<Ty>,
- /// Trait and lifetime bounds (`Clone+Send+'static`)
+ /// Trait and lifetime bounds (e.g., `Clone + Send + 'static`).
pub bounds: GenericBounds,
}
-/// A lifetime predicate, e.g., `'a: 'b+'c`
+/// A lifetime predicate (e.g., `'a: 'b + 'c`).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereRegionPredicate {
pub span: Span,
pub bounds: GenericBounds,
}
-/// An equality predicate (unsupported), e.g., `T=int`
+/// An equality predicate (e.g., `T = int`); currently unsupported.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereEqPredicate {
pub id: NodeId,
}
}
- /// A parallel version of visit_all_item_likes
+ /// A parallel version of `visit_all_item_likes`.
pub fn par_visit_all_item_likes<'hir, V>(&'hir self, visitor: &V)
where V: itemlikevisit::ParItemLikeVisitor<'hir> + Sync + Send
{
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Block {
- /// Statements in a block
+ /// Statements in a block.
pub stmts: HirVec<Stmt>,
/// An expression at the end of the block
- /// without a semicolon, if any
+ /// without a semicolon, if any.
pub expr: Option<P<Expr>>,
pub id: NodeId,
pub hir_id: HirId,
- /// Distinguishes between `unsafe { ... }` and `{ ... }`
+ /// Distinguishes between `unsafe { ... }` and `{ ... }`.
pub rules: BlockCheckMode,
pub span: Span,
/// If true, then there may exist `break 'a` values that aim to
}
}
-/// A single field in a struct pattern
+/// A single field in a struct pattern.
///
/// Patterns like the fields of Foo `{ x, ref y, ref mut z }`
/// are treated the same as` x: x, y: ref y, z: ref mut z`,
-/// except is_shorthand is true
+/// except `is_shorthand` is true.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct FieldPat {
pub id: NodeId,
pub hir_id: HirId,
- /// The identifier for the field
+ /// The identifier for the field.
pub ident: Ident,
- /// The pattern the field is destructured to
+ /// The pattern the field is destructured to.
pub pat: P<Pat>,
pub is_shorthand: bool,
}
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum PatKind {
- /// Represents a wildcard pattern (`_`)
+ /// Represents a wildcard pattern (i.e., `_`).
Wild,
/// A fresh binding `ref mut binding @ OPT_SUBPATTERN`.
/// The `NodeId` is the canonical ID for the variable being bound,
- /// e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID,
+ /// (e.g., in `Ok(x) | Err(x)`, both `x` use the same canonical ID),
/// which is the pattern ID of the first `x`.
Binding(BindingAnnotation, NodeId, HirId, Ident, Option<P<Pat>>),
- /// A struct or struct variant pattern, e.g., `Variant {x, y, ..}`.
+ /// A struct or struct variant pattern (e.g., `Variant {x, y, ..}`).
/// The `bool` is `true` in the presence of a `..`.
Struct(QPath, HirVec<Spanned<FieldPat>>, bool),
/// A tuple struct/variant pattern `Variant(x, y, .., z)`.
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
- /// 0 <= position <= subpats.len()
+ /// `0 <= position <= subpats.len()`
TupleStruct(QPath, HirVec<P<Pat>>, Option<usize>),
/// A path pattern for an unit struct/variant or a (maybe-associated) constant.
Path(QPath),
- /// A tuple pattern `(a, b)`.
+ /// A tuple pattern (e.g., `(a, b)`).
/// If the `..` pattern fragment is present, then `Option<usize>` denotes its position.
- /// 0 <= position <= subpats.len()
+ /// `0 <= position <= subpats.len()`
Tuple(HirVec<P<Pat>>, Option<usize>),
- /// A `box` pattern
+ /// A `box` pattern.
Box(P<Pat>),
- /// A reference pattern, e.g., `&mut (a, b)`
+ /// A reference pattern (e.g., `&mut (a, b)`).
Ref(P<Pat>, Mutability),
- /// A literal
+ /// A literal.
Lit(P<Expr>),
- /// A range pattern, e.g., `1...2` or `1..2`
+ /// A range pattern (e.g., `1...2` or `1..2`).
Range(P<Expr>, P<Expr>, RangeEnd),
/// `[a, b, ..i, y, z]` is represented as:
- /// `PatKind::Slice(box [a, b], Some(i), box [y, z])`
+ /// `PatKind::Slice(box [a, b], Some(i), box [y, z])`.
Slice(HirVec<P<Pat>>, Option<P<Pat>>, HirVec<P<Pat>>),
}
}
impl Mutability {
- /// Return MutMutable only if both arguments are mutable.
+ /// Returns `MutMutable` only if both arguments are mutable.
pub fn and(self, other: Self) -> Self {
match self {
MutMutable => other,
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)]
pub enum BinOpKind {
- /// The `+` operator (addition)
+ /// The `+` operator (addition).
Add,
- /// The `-` operator (subtraction)
+ /// The `-` operator (subtraction).
Sub,
- /// The `*` operator (multiplication)
+ /// The `*` operator (multiplication).
Mul,
- /// The `/` operator (division)
+ /// The `/` operator (division).
Div,
- /// The `%` operator (modulus)
+ /// The `%` operator (modulus).
Rem,
- /// The `&&` operator (logical and)
+ /// The `&&` operator (logical and).
And,
- /// The `||` operator (logical or)
+ /// The `||` operator (logical or).
Or,
- /// The `^` operator (bitwise xor)
+ /// The `^` operator (bitwise xor).
BitXor,
- /// The `&` operator (bitwise and)
+ /// The `&` operator (bitwise and).
BitAnd,
- /// The `|` operator (bitwise or)
+ /// The `|` operator (bitwise or).
BitOr,
- /// The `<<` operator (shift left)
+ /// The `<<` operator (shift left).
Shl,
- /// The `>>` operator (shift right)
+ /// The `>>` operator (shift right).
Shr,
- /// The `==` operator (equality)
+ /// The `==` operator (equality).
Eq,
- /// The `<` operator (less than)
+ /// The `<` operator (less than).
Lt,
- /// The `<=` operator (less than or equal to)
+ /// The `<=` operator (less than or equal to).
Le,
- /// The `!=` operator (not equal to)
+ /// The `!=` operator (not equal to).
Ne,
- /// The `>=` operator (greater than or equal to)
+ /// The `>=` operator (greater than or equal to).
Ge,
- /// The `>` operator (greater than)
+ /// The `>` operator (greater than).
Gt,
}
}
}
- /// Returns `true` if the binary operator takes its arguments by value
+ /// Returns `true` if the binary operator takes its arguments by value.
pub fn is_by_value(self) -> bool {
!self.is_comparison()
}
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy, Hash)]
pub enum UnOp {
- /// The `*` operator for dereferencing
+ /// The `*` operator (deferencing).
UnDeref,
- /// The `!` operator for logical inversion
+ /// The `!` operator (logical negation).
UnNot,
- /// The `-` operator for negation
+ /// The `-` operator (negation).
UnNeg,
}
}
}
- /// Returns `true` if the unary operator takes its argument by value
+ /// Returns `true` if the unary operator takes its argument by value.
pub fn is_by_value(self) -> bool {
match self {
UnNeg | UnNot => true,
}
}
-/// A statement
+/// A statement.
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub struct Stmt {
pub id: NodeId,
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub enum StmtKind {
- /// A local (let) binding:
+ /// A local (`let`) binding.
Local(P<Local>),
- /// An item binding:
+ /// An item binding.
Item(P<ItemId>),
- /// Expr without trailing semi-colon (must have unit type):
+ /// An expression without a trailing semi-colon (must have unit type).
Expr(P<Expr>),
- /// Expr with trailing semi-colon (may have any type):
+ /// An expression with a trailing semi-colon (may have any type).
Semi(P<Expr>),
}
}
}
-/// Local represents a `let` statement, e.g., `let <pat>:<ty> = <expr>;`
+/// Represents a `let` statement (i.e., `let <pat>:<ty> = <expr>;`).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Local {
pub pat: P<Pat>,
pub ty: Option<P<Ty>>,
- /// Initializer expression to set the value, if any
+ /// Initializer expression to set the value, if any.
pub init: Option<P<Expr>>,
pub id: NodeId,
pub hir_id: HirId,
pub source: LocalSource,
}
-/// represents one arm of a 'match'
+/// Represents a single arm of a `match` expression.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Arm {
pub attrs: HirVec<Attribute>,
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash, Debug)]
pub struct BodyId {
- pub node_id: NodeId,
+ pub hir_id: HirId,
}
/// The body of a function, closure, or constant value. In the case of
impl Body {
pub fn id(&self) -> BodyId {
BodyId {
- node_id: self.value.id
+ hir_id: self.value.hir_id,
}
}
}
pub hir_id: HirId,
}
+// `Expr` is used a lot. Make sure it doesn't unintentionally get bigger.
+#[cfg(target_arch = "x86_64")]
+static_assert!(MEM_SIZE_OF_EXPR: std::mem::size_of::<Expr>() == 72);
+
impl Expr {
pub fn precedence(&self) -> ExprPrecedence {
match self.node {
pub enum ExprKind {
/// A `box x` expression.
Box(P<Expr>),
- /// An array (`[a, b, c, d]`)
+ /// An array (e.g., `[a, b, c, d]`).
Array(HirVec<Expr>),
- /// A function call
+ /// A function call.
///
/// The first field resolves to the function itself (usually an `ExprKind::Path`),
/// and the second field is the list of arguments.
/// This also represents calling the constructor of
/// tuple-like ADTs such as tuple structs and enum variants.
Call(P<Expr>, HirVec<Expr>),
- /// A method call (`x.foo::<'static, Bar, Baz>(a, b, c, d)`)
+ /// A method call (e.g., `x.foo::<'static, Bar, Baz>(a, b, c, d)`).
///
/// The `PathSegment`/`Span` represent the method name and its generic arguments
/// (within the angle brackets).
/// and the remaining elements are the rest of the arguments.
/// Thus, `x.foo::<Bar, Baz>(a, b, c, d)` is represented as
/// `ExprKind::MethodCall(PathSegment { foo, [Bar, Baz] }, [x, a, b, c, d])`.
- MethodCall(PathSegment, Span, HirVec<Expr>),
- /// A tuple (`(a, b, c ,d)`)
+ MethodCall(P<PathSegment>, Span, HirVec<Expr>),
+ /// A tuple (e.g., `(a, b, c ,d)`).
Tup(HirVec<Expr>),
- /// A binary operation (For example: `a + b`, `a * b`)
+ /// A binary operation (e.g., `a + b`, `a * b`).
Binary(BinOp, P<Expr>, P<Expr>),
- /// A unary operation (For example: `!x`, `*x`)
+ /// A unary operation (e.g., `!x`, `*x`).
Unary(UnOp, P<Expr>),
- /// A literal (For example: `1`, `"foo"`)
+ /// A literal (e.g., `1`, `"foo"`).
Lit(Lit),
- /// A cast (`foo as f64`)
+ /// A cast (e.g., `foo as f64`).
Cast(P<Expr>, P<Ty>),
+ /// A type reference (e.g., `Foo`).
Type(P<Expr>, P<Ty>),
- /// An `if` block, with an optional else block
+ /// An `if` block, with an optional else block.
///
- /// `if expr { expr } else { expr }`
+ /// I.e., `if <expr> { <expr> } else { <expr> }`.
If(P<Expr>, P<Expr>, Option<P<Expr>>),
/// A while loop, with an optional label
///
- /// `'label: while expr { block }`
+ /// I.e., `'label: while expr { <block> }`.
While(P<Expr>, P<Block>, Option<Label>),
- /// Conditionless loop (can be exited with break, continue, or return)
+ /// A conditionless loop (can be exited with `break`, `continue`, or `return`).
///
- /// `'label: loop { block }`
+ /// I.e., `'label: loop { <block> }`.
Loop(P<Block>, Option<Label>, LoopSource),
/// A `match` block, with a source that indicates whether or not it is
/// the result of a desugaring, and if so, which kind.
Match(P<Expr>, HirVec<Arm>, MatchSource),
- /// A closure (for example, `move |a, b, c| {a + b + c}`).
+ /// A closure (e.g., `move |a, b, c| {a + b + c}`).
///
- /// The final span is the span of the argument block `|...|`
+ /// The final span is the span of the argument block `|...|`.
///
/// This may also be a generator literal, indicated by the final boolean,
- /// in that case there is an GeneratorClause.
+ /// in that case there is an `GeneratorClause`.
Closure(CaptureClause, P<FnDecl>, BodyId, Span, Option<GeneratorMovability>),
- /// A block (`'label: { ... }`)
+ /// A block (e.g., `'label: { ... }`).
Block(P<Block>, Option<Label>),
- /// An assignment (`a = foo()`)
+ /// An assignment (e.g., `a = foo()`).
Assign(P<Expr>, P<Expr>),
- /// An assignment with an operator
+ /// An assignment with an operator.
///
- /// For example, `a += 1`.
+ /// E.g., `a += 1`.
AssignOp(BinOp, P<Expr>, P<Expr>),
- /// Access of a named (`obj.foo`) or unnamed (`obj.0`) struct or tuple field
+ /// Access of a named (e.g., `obj.foo`) or unnamed (e.g., `obj.0`) struct or tuple field.
Field(P<Expr>, Ident),
- /// An indexing operation (`foo[2]`)
+ /// An indexing operation (`foo[2]`).
Index(P<Expr>, P<Expr>),
/// Path to a definition, possibly containing lifetime or type parameters.
Path(QPath),
- /// A referencing operation (`&a` or `&mut a`)
+ /// A referencing operation (i.e., `&a` or `&mut a`).
AddrOf(Mutability, P<Expr>),
- /// A `break`, with an optional label to break
+ /// A `break`, with an optional label to break.
Break(Destination, Option<P<Expr>>),
- /// A `continue`, with an optional label
+ /// A `continue`, with an optional label.
Continue(Destination),
- /// A `return`, with an optional value to be returned
+ /// A `return`, with an optional value to be returned.
Ret(Option<P<Expr>>),
/// Inline assembly (from `asm!`), with its outputs and inputs.
///
/// For example, `Foo {x: 1, y: 2}`, or
/// `Foo {x: 1, .. base}`, where `base` is the `Option<Expr>`.
- Struct(QPath, HirVec<Field>, Option<P<Expr>>),
+ Struct(P<QPath>, HirVec<Field>, Option<P<Expr>>),
/// An array literal constructed from one repeated element.
///
/// to be repeated; the second is the number of times to repeat it.
Repeat(P<Expr>, AnonConst),
- /// A suspension point for generators. This is `yield <expr>` in Rust.
+ /// A suspension point for generators (i.e., `yield <expr>`).
Yield(P<Expr>),
- /// Placeholder for an expression that wasn't syntactically well formed in some way.
+ /// A placeholder for an expression that wasn't syntactically well formed in some way.
Err,
}
/// Path to a definition, optionally "fully-qualified" with a `Self`
/// type, if the path points to an associated item in a trait.
///
- /// e.g., an unqualified path like `Clone::clone` has `None` for `Self`,
+ /// E.g., an unqualified path like `Clone::clone` has `None` for `Self`,
/// while `<Vec<T> as Clone>::clone` has `Some(Vec<T>)` for `Self`,
/// even though they both have the same two-segment `Clone::clone` `Path`.
Resolved(Option<P<Ty>>, P<Path>),
- /// Type-related paths, e.g., `<T>::default` or `<T>::Output`.
+ /// Type-related paths (e.g., `<T>::default` or `<T>::Output`).
/// Will be resolved by type-checking to an associated item.
///
/// UFCS source paths can desugar into this, with `Vec::new` turning into
TypeRelative(P<Ty>, P<PathSegment>)
}
-/// Hints at the original code for a let statement
+/// Hints at the original code for a let statement.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum LocalSource {
- /// A `match _ { .. }`
+ /// A `match _ { .. }`.
Normal,
- /// A desugared `for _ in _ { .. }` loop
+ /// A desugared `for _ in _ { .. }` loop.
ForLoopDesugar,
}
-/// Hints at the original code for a `match _ { .. }`
+/// Hints at the original code for a `match _ { .. }`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum MatchSource {
- /// A `match _ { .. }`
+ /// A `match _ { .. }`.
Normal,
- /// An `if let _ = _ { .. }` (optionally with `else { .. }`)
+ /// An `if let _ = _ { .. }` (optionally with `else { .. }`).
IfLetDesugar {
contains_else_clause: bool,
},
/// A `while let _ = _ { .. }` (which was desugared to a
- /// `loop { match _ { .. } }`)
+ /// `loop { match _ { .. } }`).
WhileLetDesugar,
- /// A desugared `for _ in _ { .. }` loop
+ /// A desugared `for _ in _ { .. }` loop.
ForLoopDesugar,
- /// A desugared `?` operator
+ /// A desugared `?` operator.
TryDesugar,
}
-/// The loop type that yielded an ExprKind::Loop
+/// The loop type that yielded an `ExprKind::Loop`.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Debug, Copy)]
pub enum LoopSource {
- /// A `loop { .. }` loop
+ /// A `loop { .. }` loop.
Loop,
- /// A `while let _ = _ { .. }` loop
+ /// A `while let _ = _ { .. }` loop.
WhileLet,
- /// A `for _ in _ { .. }` loop
+ /// A `for _ in _ { .. }` loop.
ForLoop,
}
}
}
-/// Not represented directly in the AST, referred to by name through a ty_path.
+/// Not represented directly in the AST; referred to by name through a `ty_path`.
#[derive(Clone, PartialEq, Eq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum PrimTy {
Int(IntTy),
pub impl_trait_fn: Option<DefId>,
}
+/// The various kinds of types recognized by the compiler.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
-/// The different kinds of types recognized by the compiler
pub enum TyKind {
- /// A variable length slice (`[T]`)
+ /// A variable length slice (i.e., `[T]`).
Slice(P<Ty>),
- /// A fixed length array (`[T; n]`)
+ /// A fixed length array (i.e., `[T; n]`).
Array(P<Ty>, AnonConst),
- /// A raw pointer (`*const T` or `*mut T`)
+ /// A raw pointer (i.e., `*const T` or `*mut T`).
Ptr(MutTy),
- /// A reference (`&'a T` or `&'a mut T`)
+ /// A reference (i.e., `&'a T` or `&'a mut T`).
Rptr(Lifetime, MutTy),
- /// A bare function (e.g., `fn(usize) -> bool`)
+ /// A bare function (e.g., `fn(usize) -> bool`).
BareFn(P<BareFnTy>),
- /// The never type (`!`)
+ /// The never type (`!`).
Never,
- /// A tuple (`(A, B, C, D,...)`)
+ /// A tuple (`(A, B, C, D,...)`).
Tup(HirVec<Ty>),
/// A path to a type definition (`module::module::...::Type`), or an
- /// associated type, e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`.
+ /// associated type (e.g., `<Vec<T> as Trait>::Type` or `<T>::Target`).
///
/// Type parameters may be stored in each `PathSegment`.
Path(QPath),
/// A type definition itself. This is currently only used for the `existential type`
/// item that `impl Trait` in return position desugars to.
///
- /// The generic arg list are the lifetimes (and in the future possibly parameters) that are
- /// actually bound on the `impl Trait`.
+ /// The generic argument list contains the lifetimes (and in the future possibly parameters)
+ /// that are actually bound on the `impl Trait`.
Def(ItemId, HirVec<GenericArg>),
/// A trait object type `Bound1 + Bound2 + Bound3`
/// where `Bound` is a trait or a lifetime.
TraitObject(HirVec<PolyTraitRef>, Lifetime),
- /// Unused for now
+ /// Unused for now.
Typeof(AnonConst),
/// `TyKind::Infer` means the type should be inferred instead of it having been
/// specified. This can appear anywhere in a type.
pub ctxt: SyntaxContext,
}
-/// represents an argument in a function header
+/// Represents an argument in a function header.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct Arg {
pub pat: P<Pat>,
pub hir_id: HirId,
}
-/// Represents the header (not the body) of a function declaration
+/// Represents the header (not the body) of a function declaration.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct FnDecl {
pub inputs: HirVec<Ty>,
/// closures default to inference. Span points to where return
/// type would be inserted.
DefaultReturn(Span),
- /// Everything else
+ /// Everything else.
Return(P<Ty>),
}
pub ident: Ident,
pub attrs: HirVec<Attribute>,
pub data: VariantData,
- /// Explicit discriminant, e.g., `Foo = 1`
+ /// Explicit discriminant (e.g., `Foo = 1`).
pub disr_expr: Option<AnonConst>,
}
}
impl TraitRef {
- /// Get the `DefId` of the referenced trait. It _must_ actually be a trait or trait alias.
+ /// Gets the `DefId` of the referenced trait. It _must_ actually be a trait or trait alias.
pub fn trait_def_id(&self) -> DefId {
match self.path.def {
Def::Trait(did) => did,
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct PolyTraitRef {
- /// The `'a` in `<'a> Foo<&'a T>`
+ /// The `'a` in `<'a> Foo<&'a T>`.
pub bound_generic_params: HirVec<GenericParam>,
- /// The `Foo<&'a T>` in `<'a> Foo<&'a T>`
+ /// The `Foo<&'a T>` in `<'a> Foo<&'a T>`.
pub trait_ref: TraitRef,
pub span: Span,
pub enum ItemKind {
/// An `extern crate` item, with optional *original* crate name if the crate was renamed.
///
- /// e.g., `extern crate foo` or `extern crate foo_bar as foo`
+ /// E.g., `extern crate foo` or `extern crate foo_bar as foo`.
ExternCrate(Option<Name>),
/// `use foo::bar::*;` or `use foo::bar::baz as quux;`
ItemKind::Union(..) => "union",
ItemKind::Trait(..) => "trait",
ItemKind::TraitAlias(..) => "trait alias",
- ItemKind::Impl(..) => "item",
+ ItemKind::Impl(..) => "impl",
}
}
/// contains the item's id, naturally, but also the item's name and
/// some other high-level details (like whether it is an associated
/// type or method, and whether it is public). This allows other
-/// passes to find the impl they want without loading the id (which
+/// passes to find the impl they want without loading the ID (which
/// means fewer edges in the incremental compilation graph).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TraitItemRef {
}
/// A reference from an impl to one of its associated items. This
-/// contains the item's id, naturally, but also the item's name and
+/// contains the item's ID, naturally, but also the item's name and
/// some other high-level details (like whether it is an associated
/// type or method, and whether it is public). This allows other
-/// passes to find the impl they want without loading the id (which
+/// passes to find the impl they want without loading the ID (which
/// means fewer edges in the incremental compilation graph).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ImplItemRef {
pub vis: Visibility,
}
-/// An item within an `extern` block
+/// An item within an `extern` block.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum ForeignItemKind {
- /// A foreign function
+ /// A foreign function.
Fn(P<FnDecl>, HirVec<Ident>, Generics),
/// A foreign static item (`static ext: u8`), with optional mutability
- /// (the boolean is true when mutable)
+ /// (the boolean is true when mutable).
Static(P<Ty>, bool),
- /// A foreign type
+ /// A foreign type.
Type,
}
bitflags! {
#[derive(RustcEncodable, RustcDecodable)]
pub struct CodegenFnAttrFlags: u32 {
- /// #[cold], a hint to LLVM that this function, when called, is never on
- /// the hot path
+ /// `#[cold]`: a hint to LLVM that this function, when called, is never on
+ /// the hot path.
const COLD = 1 << 0;
- /// #[allocator], a hint to LLVM that the pointer returned from this
- /// function is never null
+ /// `#[allocator]`: a hint to LLVM that the pointer returned from this
+ /// function is never null.
const ALLOCATOR = 1 << 1;
- /// #[unwind], an indicator that this function may unwind despite what
- /// its ABI signature may otherwise imply
+ /// `#[unwind]`: an indicator that this function may unwind despite what
+ /// its ABI signature may otherwise imply.
const UNWIND = 1 << 2;
- /// #[rust_allocator_nounwind], an indicator that an imported FFI
+ /// `#[rust_allocator_nounwind]`, an indicator that an imported FFI
/// function will never unwind. Probably obsolete by recent changes with
/// #[unwind], but hasn't been removed/migrated yet
const RUSTC_ALLOCATOR_NOUNWIND = 1 << 3;
- /// #[naked], indicates to LLVM that no function prologue/epilogue
- /// should be generated
+ /// `#[naked]`: an indicator to LLVM that no function prologue/epilogue
+ /// should be generated.
const NAKED = 1 << 4;
- /// #[no_mangle], the function's name should be the same as its symbol
+ /// `#[no_mangle]`: an indicator that the function's name should be the same
+ /// as its symbol.
const NO_MANGLE = 1 << 5;
- /// #[rustc_std_internal_symbol], and indicator that this symbol is a
+ /// `#[rustc_std_internal_symbol]`: an indicator that this symbol is a
/// "weird symbol" for the standard library in that it has slightly
/// different linkage, visibility, and reachability rules.
const RUSTC_STD_INTERNAL_SYMBOL = 1 << 6;
- /// #[no_debug], indicates that no debugging information should be
- /// generated for this function by LLVM
+ /// `#[no_debug]`: an indicator that no debugging information should be
+ /// generated for this function by LLVM.
const NO_DEBUG = 1 << 7;
- /// #[thread_local], indicates a static is actually a thread local
+ /// `#[thread_local]`: indicates a static is actually a thread local
/// piece of memory
const THREAD_LOCAL = 1 << 8;
- /// #[used], indicates that LLVM can't eliminate this function (but the
- /// linker can!)
+ /// `#[used]`: indicates that LLVM can't eliminate this function (but the
+ /// linker can!).
const USED = 1 << 9;
}
}
}
}
- /// True if `#[inline]` or `#[inline(always)]` is present.
+ /// Returns `true` if `#[inline]` or `#[inline(always)]` is present.
pub fn requests_inline(&self) -> bool {
match self.inline {
InlineAttr::Hint | InlineAttr::Always => true,
}
}
- /// Return variants that are necessary to exist for the pattern to match.
+ /// Returns variants that are necessary to exist for the pattern to match.
pub fn necessary_variants(&self) -> Vec<DefId> {
let mut variants = vec![];
self.walk(|p| {
/// Checks if the pattern contains any `ref` or `ref mut` bindings, and if
/// yes whether it contains mutable or just immutables ones.
- ///
- /// FIXME(tschottdorf): this is problematic as the HIR is being scraped, but
- /// ref bindings are be implicit after #42640 (default match binding modes).
- ///
- /// See #44848.
+ //
+ // FIXME(tschottdorf): this is problematic as the HIR is being scraped, but
+ // ref bindings are be implicit after #42640 (default match binding modes). See issue #44848.
pub fn contains_explicit_ref_binding(&self) -> Option<hir::Mutability> {
let mut result = None;
self.each_binding(|annotation, _, _, _| {
fn to_stable_hash_key(&self,
hcx: &StableHashingContext<'a>)
-> (DefPathHash, hir::ItemLocalId) {
- let hir::BodyId { node_id } = *self;
- node_id.to_stable_hash_key(hcx)
+ let hir::BodyId { hir_id } = *self;
+ hir_id.to_stable_hash_key(hcx)
}
}
-//! A nice interface for working with the infcx. The basic idea is to
+//! A nice interface for working with the infcx. The basic idea is to
//! do `infcx.at(cause, param_env)`, which sets the "cause" of the
-//! operation as well as the surrounding parameter environment. Then
+//! operation as well as the surrounding parameter environment. Then
//! you can do something like `.sub(a, b)` or `.eq(a, b)` to create a
//! subtype or equality relationship respectively. The first argument
//! is always the "expected" output from the POV of diagnostics.
}
}
- /// Make `a <: b` where `a` may or may not be expected
+ /// Makes `a <: b`, where `a` may or may not be expected.
pub fn sub_exp<T>(self,
a_is_expected: bool,
a: T,
self.trace_exp(a_is_expected, a, b).sub(&a, &b)
}
- /// Make `actual <: expected`. For example, if type-checking a
+ /// Makes `actual <: expected`. For example, if type-checking a
/// call like `foo(x)`, where `foo: fn(i32)`, you might have
/// `sup(i32, x)`, since the "expected" type is the type that
/// appears in the signature.
self.sub_exp(false, actual, expected)
}
- /// Make `expected <: actual`
+ /// Makes `expected <: actual`.
pub fn sub<T>(self,
expected: T,
actual: T)
self.sub_exp(true, expected, actual)
}
- /// Make `expected <: actual`
+ /// Makes `expected <: actual`.
pub fn eq_exp<T>(self,
a_is_expected: bool,
a: T,
self.trace_exp(a_is_expected, a, b).eq(&a, &b)
}
- /// Make `expected <: actual`
+ /// Makes `expected <: actual`.
pub fn eq<T>(self,
expected: T,
actual: T)
}
}
- /// Compute the least-upper-bound, or mutual supertype, of two
+ /// Computes the least-upper-bound, or mutual supertype, of two
/// values. The order of the arguments doesn't matter, but since
/// this can result in an error (e.g., if asked to compute LUB of
/// u32 and i32), it is meaningful to call one of them the
self.trace(expected, actual).lub(&expected, &actual)
}
- /// Compute the greatest-lower-bound, or mutual subtype, of two
+ /// Computes the greatest-lower-bound, or mutual subtype, of two
/// values. As with `lub` order doesn't matter, except for error
/// cases.
pub fn glb<T>(self,
}
impl<'a, 'gcx, 'tcx> Trace<'a, 'gcx, 'tcx> {
- /// Make `a <: b` where `a` may or may not be expected (if
+ /// Makes `a <: b` where `a` may or may not be expected (if
/// `a_is_expected` is true, then `a` is expected).
- /// Make `expected <: actual`
+ /// Makes `expected <: actual`.
pub fn sub<T>(self,
a: &T,
b: &T)
})
}
- /// Make `a == b`; the expectation is set by the call to
+ /// Makes `a == b`; the expectation is set by the call to
/// `trace()`.
pub fn eq<T>(self,
a: &T,
}
/// A hacky variant of `canonicalize_query` that does not
- /// canonicalize `'static`. Unfortunately, the existing leak
+ /// canonicalize `'static`. Unfortunately, the existing leak
/// check treaks `'static` differently in some cases (see also
/// #33684), so if we are performing an operation that may need to
/// prove "leak-check" related things, we leave `'static`
/// alone.
- ///
- /// FIXME(#48536) -- once we have universes, we can remove this and just use
- /// `canonicalize_query`.
+ //
+ // FIXME(#48536): once we have universes, we can remove this and just use
+ // `canonicalize_query`.
pub fn canonicalize_hr_query_hack<V>(
&self,
value: &V,
.var_universe(vid)
}
- /// Create a canonical variable (with the given `info`)
+ /// Creates a canonical variable (with the given `info`)
/// representing the region `r`; return a region referencing it.
fn canonical_var_for_region(
&mut self,
///
/// This is only meant to be invoked as part of constructing an
/// inference context at the start of a query (see
- /// `InferCtxtBuilder::enter_with_canonical`). It basically
+ /// `InferCtxtBuilder::enter_with_canonical`). It basically
/// brings the canonical value "into scope" within your new infcx.
///
/// At the end of processing, the substitution S (once
self.var_values.len()
}
- /// Make an identity substitution from this one: each bound var
+ /// Makes an identity substitution from this one: each bound var
/// is matched to the same bound var, preserving the original kinds.
/// For example, if we have:
/// `self.var_values == [Type(u32), Lifetime('a), Type(u64)]`
/// If you DO want to keep track of pending obligations (which
/// include all region obligations, so this includes all cases
/// that care about regions) with this function, you have to
- /// do it yourself, by e.g. having them be a part of the answer.
+ /// do it yourself, by e.g., having them be a part of the answer.
pub fn make_query_response_ignoring_pending_obligations<T>(
&self,
inference_vars: CanonicalVarValues<'tcx>,
/// they should be ignored).
/// - It **can happen** (though it rarely does currently) that
/// equating types and things will give rise to subobligations
- /// that must be processed. In this case, those subobligations
+ /// that must be processed. In this case, those subobligations
/// are propagated back in the return value.
/// - Finally, the query result (of type `R`) is propagated back,
/// after applying the substitution `S`.
/// Given a "guess" at the values for the canonical variables in
/// the input, try to unify with the *actual* values found in the
- /// query result. Often, but not always, this is a no-op, because
+ /// query result. Often, but not always, this is a no-op, because
/// we already found the mapping in the "guessing" step.
///
/// See also: `query_response_substitution_guess`
Glb::new(self, a_is_expected)
}
- /// Here dir is either EqTo, SubtypeOf, or SupertypeOf. The
- /// idea is that we should ensure that the type `a_ty` is equal
+ /// Here, `dir` is either `EqTo`, `SubtypeOf`, or `SupertypeOf`.
+ /// The idea is that we should ensure that the type `a_ty` is equal
/// to, a subtype of, or a supertype of (respectively) the type
/// to which `b_vid` is bound.
///
struct Generalizer<'cx, 'gcx: 'cx+'tcx, 'tcx: 'cx> {
infcx: &'cx InferCtxt<'cx, 'gcx, 'tcx>,
- /// Span, used when creating new type variables and things.
+ /// The span, used when creating new type variables and things.
span: Span,
/// The vid of the type variable that is in the process of being
/// particular around 'bivariant' type parameters that are only
/// constrained by a where-clause. As an example, imagine a type:
///
- /// struct Foo<A, B> where A: Iterator<Item=B> {
+ /// struct Foo<A, B> where A: Iterator<Item = B> {
/// data: A
/// }
///
/// <: ?C`, but no particular relationship between `?B` and `?D`
/// (after all, we do not know the variance of the normalized form
/// of `A::Item` with respect to `A`). If we do nothing else, this
- /// may mean that `?D` goes unconstrained (as in #41677). So, in
+ /// may mean that `?D` goes unconstrained (as in #41677). So, in
/// this scenario where we create a new type variable in a
/// bivariant context, we set the `needs_wf` flag to true. This
/// will force the calling code to check that `WF(Foo<?C, ?D>)`
use super::{InferCtxt, RegionVariableOrigin, SubregionOrigin, TypeTrace, ValuePairs};
use crate::infer::{self, SuppressRegionErrors};
-use crate::errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString};
use crate::hir;
use crate::hir::def_id::DefId;
use crate::hir::Node;
use crate::middle::region;
-use std::{cmp, fmt};
-use syntax_pos::{Pos, Span};
use crate::traits::{ObligationCause, ObligationCauseCode};
use crate::ty::error::TypeError;
use crate::ty::{self, subst::Subst, Region, Ty, TyCtxt, TyKind, TypeFoldable};
+use errors::{Applicability, DiagnosticBuilder, DiagnosticStyledString};
+use std::{cmp, fmt};
+use syntax_pos::{Pos, Span};
mod note;
}
}
}
- ObligationCauseCode::MatchExpressionArm { arm_span, source } => match source {
+ ObligationCauseCode::MatchExpressionArm {
+ source,
+ ref prior_arms,
+ last_ty,
+ ..
+ } => match source {
hir::MatchSource::IfLetDesugar { .. } => {
- let msg = "`if let` arm with an incompatible type";
- if self.tcx.sess.source_map().is_multiline(arm_span) {
- err.span_note(arm_span, msg);
- } else {
- err.span_label(arm_span, msg);
- }
+ let msg = "`if let` arms have incompatible types";
+ err.span_label(cause.span, msg);
}
hir::MatchSource::TryDesugar => {}
_ => {
- let msg = "match arm with an incompatible type";
- if self.tcx.sess.source_map().is_multiline(arm_span) {
- err.span_note(arm_span, msg);
- } else {
- err.span_label(arm_span, msg);
+ let msg = "`match` arms have incompatible types";
+ err.span_label(cause.span, msg);
+ if prior_arms.len() <= 4 {
+ for sp in prior_arms {
+ err.span_label(*sp, format!(
+ "this is found to be of type `{}`",
+ last_ty,
+ ));
+ }
+ } else if let Some(sp) = prior_arms.last() {
+ err.span_label(*sp, format!(
+ "this and all prior arms are found to be of type `{}`", last_ty,
+ ));
}
}
},
None
}
- /// Add a `,` to the type representation only if it is appropriate.
+ /// Adds a `,` to the type representation only if it is appropriate.
fn push_comma(
&self,
value: &mut DiagnosticStyledString,
substs.truncate_to(self.tcx, &generics)
}
- /// Compare two given types, eliding parts that are the same between them and highlighting
+ /// Compares two given types, eliding parts that are the same between them and highlighting
/// relevant differences, and return two representation of those types for highlighted printing.
fn cmp(&self, t1: Ty<'tcx>, t2: Ty<'tcx>) -> (DiagnosticStyledString, DiagnosticStyledString) {
fn equals<'tcx>(a: &Ty<'tcx>, b: &Ty<'tcx>) -> bool {
use crate::ty::{self, Ty, Infer, TyVar};
use syntax::source_map::CompilerDesugaringKind;
use syntax_pos::Span;
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
struct FindLocalByTypeVisitor<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
}
impl<'a, 'gcx, 'tcx> FindLocalByTypeVisitor<'a, 'gcx, 'tcx> {
- fn node_matches_type(&mut self, node_id: HirId) -> bool {
+ fn node_matches_type(&mut self, hir_id: HirId) -> bool {
let ty_opt = self.infcx.in_progress_tables.and_then(|tables| {
- tables.borrow().node_id_to_type_opt(node_id)
+ tables.borrow().node_type_opt(hir_id)
});
match ty_opt {
Some(ty) => {
};
if let Some(body_id) = body_id {
- let expr = self.tcx.hir().expect_expr(body_id.node_id);
+ let expr = self.tcx.hir().expect_expr_by_hir_id(body_id.hir_id);
local_visitor.visit_expr(expr);
}
/// x.push(y);
/// ^ ...but data from `y` flows into `x` here
/// }
- /// ````
+ /// ```
///
/// It will later be extended to trait objects.
pub(super) fn try_report_anon_anon_conflict(&self) -> Option<ErrorReported> {
use crate::infer::InferCtxt;
use crate::infer::lexical_region_resolve::RegionResolutionError;
use crate::infer::lexical_region_resolve::RegionResolutionError::*;
-use syntax::source_map::Span;
use crate::ty::{self, TyCtxt};
use crate::util::common::ErrorReported;
+use errors::DiagnosticBuilder;
+use syntax::source_map::Span;
mod different_lifetimes;
mod find_anon_type;
self.infcx.tcx
}
- pub fn try_report_from_nll(&self) -> Option<ErrorReported> {
+ pub fn try_report_from_nll(&self) -> Option<DiagnosticBuilder<'cx>> {
// Due to the improved diagnostics returned by the MIR borrow checker, only a subset of
// the nice region errors are required when running under the MIR borrow checker.
self.try_report_named_anon_conflict()
pub fn try_report(&self) -> Option<ErrorReported> {
self.try_report_from_nll()
+ .map(|mut diag| { diag.emit(); ErrorReported })
.or_else(|| self.try_report_anon_anon_conflict())
.or_else(|| self.try_report_outlives_closure())
.or_else(|| self.try_report_static_impl_trait())
//! where one region is named and the other is anonymous.
use crate::infer::error_reporting::nice_region_error::NiceRegionError;
use crate::ty;
-use crate::util::common::ErrorReported;
-use crate::errors::Applicability;
+use errors::{Applicability, DiagnosticBuilder};
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit an descriptive diagnostic error.
- pub(super) fn try_report_named_anon_conflict(&self) -> Option<ErrorReported> {
+ pub(super) fn try_report_named_anon_conflict(&self) -> Option<DiagnosticBuilder<'a>> {
let (span, sub, sup) = self.get_regions();
debug!(
("parameter type".to_owned(), "type".to_owned())
};
- struct_span_err!(
+ let mut diag = struct_span_err!(
self.tcx().sess,
span,
E0621,
"explicit lifetime required in {}",
error_var
- ).span_suggestion(
- new_ty_span,
- &format!("add explicit lifetime `{}` to {}", named, span_label_var),
- new_ty.to_string(),
- Applicability::Unspecified,
- )
- .span_label(span, format!("lifetime `{}` required", named))
- .emit();
- return Some(ErrorReported);
+ );
+
+ diag.span_suggestion(
+ new_ty_span,
+ &format!("add explicit lifetime `{}` to {}", named, span_label_var),
+ new_ty.to_string(),
+ Applicability::Unspecified,
+ )
+ .span_label(span, format!("lifetime `{}` required", named));
+
+ Some(diag)
}
// This method returns whether the given Region is Named
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use crate::hir::def_id::DefId;
use crate::infer::error_reporting::nice_region_error::NiceRegionError;
use crate::infer::lexical_region_resolve::RegionResolutionError;
use crate::ty;
use crate::ty::error::ExpectedFound;
use crate::ty::subst::Substs;
-use crate::util::common::ErrorReported;
use crate::util::ppaux::RegionHighlightMode;
impl NiceRegionError<'me, 'gcx, 'tcx> {
/// When given a `ConcreteFailure` for a function with arguments containing a named region and
/// an anonymous region, emit a descriptive diagnostic error.
- pub(super) fn try_report_placeholder_conflict(&self) -> Option<ErrorReported> {
+ pub(super) fn try_report_placeholder_conflict(&self) -> Option<DiagnosticBuilder<'me>> {
match &self.error {
///////////////////////////////////////////////////////////////////////////
// NB. The ordering of cases in this match is very
trait_def_id: DefId,
expected_substs: &'tcx Substs<'tcx>,
actual_substs: &'tcx Substs<'tcx>,
- ) -> ErrorReported {
+ ) -> DiagnosticBuilder<'me> {
debug!(
"try_report_placeholders_trait(\
vid={:?}, \
any_self_ty_has_vid,
);
- err.emit();
- ErrorReported
+ err
}
/// Add notes with details about the expected and actual trait refs, with attention to cases
use crate::infer::lexical_region_resolve::RegionResolutionError;
use crate::ty::{BoundRegion, FreeRegion, RegionKind};
use crate::util::common::ErrorReported;
-use crate::errors::Applicability;
+use errors::Applicability;
impl<'a, 'gcx, 'tcx> NiceRegionError<'a, 'gcx, 'tcx> {
/// Print the error message for lifetime errors when the return type is a static impl Trait.
// May return None; sometimes the tables are not yet populated.
let ty_hir_id = fn_decl.inputs[index].hir_id;
let arg_ty_span = hir.span(hir.hir_to_node_id(ty_hir_id));
- let ty = tables.node_id_to_type_opt(arg.hir_id)?;
+ let ty = tables.node_type_opt(arg.hir_id)?;
let mut found_anon_region = false;
let new_arg_ty = self.tcx().fold_regions(&ty, &mut false, |r, _| {
if *r == *anon_region {
use crate::middle::region;
use crate::ty::{self, Region};
use crate::ty::error::TypeError;
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
pub(super) fn note_region_origin(&self,
/// closure `f`. In our example above, what this closure will do
/// is to unify the expectation (`Option<&[u32]>`) with the actual
/// return type (`Option<?T>`, where `?T` represents the variable
- /// instantiated for `T`). This will cause `?T` to be unified
+ /// instantiated for `T`). This will cause `?T` to be unified
/// with `&?a [u32]`, where `?a` is a fresh lifetime variable. The
/// input type (`?T`) is then returned by `f()`.
///
/// At this point, `fudge_regions_if_ok` will normalize all type
/// variables, converting `?T` to `&?a [u32]` and end the
- /// snapshot. The problem is that we can't just return this type
+ /// snapshot. The problem is that we can't just return this type
/// out, because it references the region variable `?a`, and that
/// region variable was popped when we popped the snapshot.
///
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
- /// Replace all regions (resp. types) bound by `binder` with placeholder
+ /// Replaces all regions (resp. types) bound by `binder` with placeholder
/// regions (resp. types) and return a map indicating which bound-region
/// placeholder region. This is the first step of checking subtyping
/// when higher-ranked things are involved.
//! # Lattice Variables
//!
//! This file contains generic code for operating on inference variables
-//! that are characterized by an upper- and lower-bound. The logic and
+//! that are characterized by an upper- and lower-bound. The logic and
//! reasoning is explained in detail in the large comment in `infer.rs`.
//!
//! The code in here is defined quite generically so that it can be
//!
//! Although all the functions are generic, we generally write the
//! comments in a way that is specific to type variables and the LUB
-//! operation. It's just easier that way.
+//! operation. It's just easier that way.
//!
//! In general all of the functions are defined parametrically
//! over a `LatticeValue`, which is a value defined with respect to
-//! The code to do lexical region resolution.
+//! Lexical region resolution.
use crate::infer::region_constraints::Constraint;
use crate::infer::region_constraints::GenericKind;
match *value {
VarValue::Value(_) => { /* Inference successful */ }
VarValue::ErrorValue => {
- /* Inference impossible, this value contains
+ /* Inference impossible: this value contains
inconsistent constraints.
I think that in this case we should report an
- error now---unlike the case above, we can't
+ error now -- unlike the case above, we can't
wait to see whether the user needs the result
- of this variable. The reason is that the mere
+ of this variable. The reason is that the mere
existence of this variable implies that the
region graph is inconsistent, whether or not it
is used.
For example, we may have created a region
variable that is the GLB of two other regions
- which do not have a GLB. Even if that variable
+ which do not have a GLB. Even if that variable
is not used, it implies that those two regions
*should* have a GLB.
pub use self::ValuePairs::*;
pub use crate::ty::IntVarValue;
-use arena::SyncDroplessArena;
-use crate::errors::DiagnosticBuilder;
+use crate::hir;
use crate::hir::def_id::DefId;
use crate::infer::canonical::{Canonical, CanonicalVarValues};
use crate::middle::free_region::RegionRelations;
use crate::middle::lang_items;
use crate::middle::region;
-use rustc_data_structures::unify as ut;
use crate::session::config::BorrowckMode;
-use std::cell::{Cell, Ref, RefCell, RefMut};
-use std::collections::BTreeMap;
-use std::fmt;
-use syntax::ast;
-use syntax_pos::symbol::InternedString;
-use syntax_pos::{self, Span};
use crate::traits::{self, ObligationCause, PredicateObligations, TraitEngine};
use crate::ty::error::{ExpectedFound, TypeError, UnconstrainedNumeric};
use crate::ty::fold::TypeFoldable;
use crate::ty::{FloatVid, IntVid, TyVid};
use crate::util::nodemap::FxHashMap;
+use arena::SyncDroplessArena;
+use errors::DiagnosticBuilder;
+use rustc_data_structures::unify as ut;
+use std::cell::{Cell, Ref, RefCell, RefMut};
+use std::collections::BTreeMap;
+use std::fmt;
+use syntax::ast;
+use syntax_pos::symbol::InternedString;
+use syntax_pos::Span;
+
use self::combine::CombineFields;
use self::lexical_region_resolve::LexicalRegionResolutions;
use self::outlives::env::OutlivesEnvironment;
// for each body-id in this map, which will process the
// obligations within. This is expected to be done 'late enough'
// that all type inference variables have been bound and so forth.
- pub region_obligations: RefCell<Vec<(ast::NodeId, RegionObligation<'tcx>)>>,
+ pub region_obligations: RefCell<Vec<(hir::HirId, RegionObligation<'tcx>)>>,
/// What is the innermost universe we have created? Starts out as
/// `UniverseIndex::root()` but grows from there as we enter
/// replaced with.
pub type PlaceholderMap<'tcx> = BTreeMap<ty::BoundRegion, ty::Region<'tcx>>;
-/// See `error_reporting` module for more details
+/// See the `error_reporting` module for more details.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ValuePairs<'tcx> {
Types(ExpectedFound<Ty<'tcx>>),
/// The trace designates the path through inference that we took to
/// encounter an error or subtyping constraint.
///
-/// See `error_reporting` module for more details.
+/// See the `error_reporting` module for more details.
#[derive(Clone)]
pub struct TypeTrace<'tcx> {
cause: ObligationCause<'tcx>,
}
}
-/// Helper type of a temporary returned by tcx.infer_ctxt().
+/// Helper type of a temporary returned by `tcx.infer_ctxt()`.
/// Necessary because we can't write the following bound:
-/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>).
+/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(InferCtxt<'b, 'gcx, 'tcx>)`.
pub struct InferCtxtBuilder<'a, 'gcx: 'a + 'tcx, 'tcx: 'a> {
global_tcx: TyCtxt<'a, 'gcx, 'gcx>,
arena: SyncDroplessArena,
/// inference context that contains each of the bound values
/// within instantiated as a fresh variable. The `f` closure is
/// invoked with the new infcx, along with the instantiated value
- /// `V` and a substitution `S`. This substitution `S` maps from
+ /// `V` and a substitution `S`. This substitution `S` maps from
/// the bound values in `C` to their instantiated values in `V`
/// (in other words, `S(C) = V`).
pub fn enter_with_canonical<T, R>(
}
}
- /// Extract `value`, registering any obligations into `fulfill_cx`
+ /// Extracts `value`, registering any obligations into `fulfill_cx`.
pub fn into_value_registering_obligations(
self,
infcx: &InferCtxt<'_, '_, 'tcx>,
.commit(region_constraints_snapshot);
}
- /// Execute `f` and commit the bindings
+ /// Executes `f` and commit the bindings.
pub fn commit_unconditionally<R, F>(&self, f: F) -> R
where
F: FnOnce() -> R,
r
}
- /// Execute `f` and commit the bindings if closure `f` returns `Ok(_)`
+ /// Executes `f` and commit the bindings if closure `f` returns `Ok(_)`.
pub fn commit_if_ok<T, E, F>(&self, f: F) -> Result<T, E>
where
F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> Result<T, E>,
r
}
- /// Execute `f` then unroll any bindings it creates
+ /// Executes `f` then unroll any bindings it creates.
pub fn probe<R, F>(&self, f: F) -> R
where
F: FnOnce(&CombinedSnapshot<'a, 'tcx>) -> R,
self.float_unification_table.borrow_mut().new_key(None)
}
- /// Create a fresh region variable with the next available index.
+ /// Creates a fresh region variable with the next available index.
/// The variable will be created in the maximum universe created
/// thus far, allowing it to name any region created thus far.
pub fn next_region_var(&self, origin: RegionVariableOrigin) -> ty::Region<'tcx> {
self.next_region_var_in_universe(origin, self.universe())
}
- /// Create a fresh region variable with the next available index
+ /// Creates a fresh region variable with the next available index
/// in the given universe; typically, you can use
/// `next_region_var` and just use the maximal universe.
pub fn next_region_var_in_universe(
Substs::for_item(self.tcx, def_id, |param, _| self.var_for_def(span, param))
}
- /// True if errors have been reported since this infcx was
- /// created. This is sometimes used as a heuristic to skip
+ /// Returns `true` if errors have been reported since this infcx was
+ /// created. This is sometimes used as a heuristic to skip
/// reporting errors that often occur as a result of earlier
/// errors, but where it's hard to be 100% sure (e.g., unresolved
/// inference variables, regionck errors).
value.fold_with(&mut r)
}
- /// Returns true if `T` contains unresolved type variables. In the
+ /// Returns `true` if `T` contains unresolved type variables. In the
/// process of visiting `T`, this will resolve (where possible)
/// type variables in `T`, but it never constructs the final,
/// resolved type, so it's more efficient than
self.tcx.replace_bound_vars(value, fld_r, fld_t)
}
- /// See `verify_generic_bound` method in `region_constraints`
+ /// See the [`region_constraints::verify_generic_bound`] method.
pub fn verify_generic_bound(
&self,
origin: SubregionOrigin<'tcx>,
closure_kind_ty.to_opt_closure_kind()
}
- /// Obtain the signature of a closure. For closures, unlike
+ /// Obtain the signature of a closure. For closures, unlike
/// `tcx.fn_sig(def_id)`, this method will work during the
/// type-checking of the enclosing function and return the closure
/// signature in its partially inferred state.
pub fn partially_normalize_associated_types_in<T>(
&self,
span: Span,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
value: &T,
) -> InferOk<'tcx, T>
}
/// Clears the selection, evaluation, and projection caches. This is useful when
- /// repeatedly attempting to select an Obligation while changing only
- /// its ParamEnv, since FulfillmentContext doesn't use 'probe'
+ /// repeatedly attempting to select an `Obligation` while changing only
+ /// its `ParamEnv`, since `FulfillmentContext` doesn't use probing.
pub fn clear_caches(&self) {
self.selection_cache.clear();
self.evaluation_cache.clear();
self.universe.get()
}
- /// Create and return a fresh universe that extends all previous
+ /// Creates and return a fresh universe that extends all previous
/// universes. Updates `self.universe` to that new universe.
pub fn create_next_universe(&self) -> ty::UniverseIndex {
let u = self.universe.get().next_universe();
/// How are we relating `a` and `b`?
///
- /// - covariant means `a <: b`
- /// - contravariant means `b <: a`
- /// - invariant means `a == b
- /// - bivariant means that it doesn't matter
+ /// - Covariant means `a <: b`.
+ /// - Contravariant means `b <: a`.
+ /// - Invariant means `a == b.
+ /// - Bivariant means that it doesn't matter.
ambient_variance: ty::Variance,
/// When we pass through a set of binders (e.g., when looking into
- /// a `fn` type), we push a new bound region scope onto here. This
+ /// a `fn` type), we push a new bound region scope onto here. This
/// will contain the instantiated region for each region in those
/// binders. When we then encounter a `ReLateBound(d, br)`, we can
- /// use the debruijn index `d` to find the right scope, and then
+ /// use the De Bruijn index `d` to find the right scope, and then
/// bound region name `br` to find the specific instantiation from
/// within that scope. See `replace_bound_region`.
///
/// Define the normalization strategy to use, eager or lazy.
fn normalization() -> NormalizationStrategy;
- /// Enable some optimizations if we do not expect inference variables
+ /// Enables some optimizations if we do not expect inference variables
/// in the RHS of the relation.
fn forbid_inference_vars() -> bool;
}
/// When we encounter binders during the type traversal, we record
/// the value to substitute for each of the things contained in
/// that binder. (This will be either a universal placeholder or
- /// an existential inference variable.) Given the debruijn index
+ /// an existential inference variable.) Given the De Bruijn index
/// `debruijn` (and name `br`) of some binder we have now
/// encountered, this routine finds the value that we instantiated
/// the region with; to do so, it indexes backwards into the list
/// lifetime parameter on `foo`.)
pub concrete_ty: Ty<'tcx>,
- /// True if the `impl Trait` bounds include region bounds.
+ /// Returns `true` if the `impl Trait` bounds include region bounds.
/// For example, this would be true for:
///
/// fn foo<'a, 'b, 'c>() -> impl Trait<'c> + 'a + 'b
}
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
- /// Replace all opaque types in `value` with fresh inference variables
+ /// Replaces all opaque types in `value` with fresh inference variables
/// and creates appropriate obligations. For example, given the input:
///
/// impl Iterator<Item = impl Debug>
///
/// # Parameters
///
- /// - `parent_def_id` -- the def-id of the function in which the opaque type
+ /// - `parent_def_id` -- the `DefId` of the function in which the opaque type
/// is defined
/// - `body_id` -- the body-id with which the resulting obligations should
/// be associated
pub fn instantiate_opaque_types<T: TypeFoldable<'tcx>>(
&self,
parent_def_id: DefId,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
value: &T,
) -> InferOk<'tcx, (T, OpaqueTypeMap<'tcx>)> {
///
/// # The Problem
///
- /// Let's work through an example to explain how it works. Assume
+ /// Let's work through an example to explain how it works. Assume
/// the current function is as follows:
///
/// ```text
/// replace each of the references (`Foo1<'a>`, `Foo2<'b>`) with
/// fresh inference variables C1 and C2. We wish to use the values
/// of these variables to infer the underlying types of `Foo1` and
- /// `Foo2`. That is, this gives rise to higher-order (pattern) unification
+ /// `Foo2`. That is, this gives rise to higher-order (pattern) unification
/// constraints like:
///
/// ```text
///
/// Ordinarily, the subtyping rules would ensure that these are
/// sufficiently large. But since `impl Bar<'a>` isn't a specific
- /// type per se, we don't get such constraints by default. This
+ /// type per se, we don't get such constraints by default. This
/// is where this function comes into play. It adds extra
/// constraints to ensure that all the regions which appear in the
/// inferred type are regions that could validly appear.
struct Instantiator<'a, 'gcx: 'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
parent_def_id: DefId,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
opaque_types: OpaqueTypeMap<'tcx>,
obligations: Vec<PredicateObligation<'tcx>>,
}
}
-/// Whether `opaque_node_id` is a sibling or a child of a sibling of `def_id`
+/// Returns `true` if `opaque_node_id` is a sibling or a child of a sibling of `def_id`.
///
/// ```rust
/// pub mod foo {
/// }
/// ```
///
-/// Here, `def_id` will be the `DefId` of the existential type `Baz`.
-/// `opaque_node_id` is the `NodeId` of the reference to Baz --
-/// so either the return type of f1 or f2.
-/// We will return true if the reference is within the same module as the existential type
-/// So true for f1, false for f2.
+/// Here, `def_id` is the `DefId` of the existential type `Baz` and `opaque_node_id` is the
+/// `NodeId` of the reference to `Baz` (i.e., the return type of both `f1` and `f2`).
+/// We return `true` if the reference is within the same module as the existential type
+/// (i.e., `true` for `f1`, `false` for `f2`).
pub fn may_define_existential_type(
tcx: TyCtxt<'_, '_, '_>,
def_id: DefId,
use crate::infer::outlives::free_region_map::FreeRegionMap;
use crate::infer::{GenericKind, InferCtxt};
+use crate::hir;
use rustc_data_structures::fx::FxHashMap;
-use syntax::ast;
use syntax_pos::Span;
use crate::traits::query::outlives_bounds::{self, OutlivesBound};
use crate::ty::{self, Ty};
// results when proving outlives obligations like `T: 'x` later
// (e.g., if `T: 'x` must be proven within the body B1, then we
// know it is true if either `'a: 'x` or `'b: 'x`).
- region_bound_pairs_map: FxHashMap<ast::NodeId, RegionBoundPairs<'tcx>>,
+ region_bound_pairs_map: FxHashMap<hir::HirId, RegionBoundPairs<'tcx>>,
// Used to compute `region_bound_pairs_map`: contains the set of
// in-scope region-bound pairs thus far.
}
/// "Region-bound pairs" tracks outlives relations that are known to
-/// be true, either because of explicit where clauses like `T: 'a` or
+/// be true, either because of explicit where-clauses like `T: 'a` or
/// because of implied bounds.
pub type RegionBoundPairs<'tcx> = Vec<(ty::Region<'tcx>, GenericKind<'tcx>)>;
}
/// Borrows current value of the `region_bound_pairs`.
- pub fn region_bound_pairs_map(&self) -> &FxHashMap<ast::NodeId, RegionBoundPairs<'tcx>> {
+ pub fn region_bound_pairs_map(&self) -> &FxHashMap<hir::HirId, RegionBoundPairs<'tcx>> {
&self.region_bound_pairs_map
}
&mut self,
infcx: &InferCtxt<'a, 'gcx, 'tcx>,
fn_sig_tys: &[Ty<'tcx>],
- body_id: ast::NodeId,
+ body_id: hir::HirId,
span: Span,
) {
debug!("add_implied_bounds()");
}
/// Save the current set of region-bound pairs under the given `body_id`.
- pub fn save_implied_bounds(&mut self, body_id: ast::NodeId) {
+ pub fn save_implied_bounds(&mut self, body_id: hir::HirId) {
let old = self.region_bound_pairs_map.insert(
body_id,
self.region_bound_pairs_accum.clone(),
}
}
- /// Compute the least-upper-bound of two free regions. In some
+ /// Computes the least-upper-bound of two free regions. In some
/// cases, this is more conservative than necessary, in order to
/// avoid making arbitrary choices. See
/// `TransitiveRelation::postdom_upper_bound` for more details.
//! fn foo<U, F: for<'a> FnMut(&'a U)>(_f: F) {}
//! ```
//!
-//! the type of the closure's first argument would be `&'a ?U`. We
+//! the type of the closure's first argument would be `&'a ?U`. We
//! might later infer `?U` to something like `&'b u32`, which would
//! imply that `'b: 'a`.
use crate::infer::outlives::verify::VerifyBoundCx;
use crate::infer::{self, GenericKind, InferCtxt, RegionObligation, SubregionOrigin, VerifyBound};
use rustc_data_structures::fx::FxHashMap;
-use syntax::ast;
+use crate::hir;
use crate::traits::ObligationCause;
use crate::ty::outlives::Component;
use crate::ty::{self, Region, Ty, TyCtxt, TypeFoldable};
/// information).
pub fn register_region_obligation(
&self,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
obligation: RegionObligation<'tcx>,
) {
debug!(
}
/// Trait queries just want to pass back type obligations "as is"
- pub fn take_registered_region_obligations(&self) -> Vec<(ast::NodeId, RegionObligation<'tcx>)> {
+ pub fn take_registered_region_obligations(&self) -> Vec<(hir::HirId, RegionObligation<'tcx>)> {
::std::mem::replace(&mut *self.region_obligations.borrow_mut(), vec![])
}
/// processed.
pub fn process_registered_region_obligations(
&self,
- region_bound_pairs_map: &FxHashMap<ast::NodeId, RegionBoundPairs<'tcx>>,
+ region_bound_pairs_map: &FxHashMap<hir::HirId, RegionBoundPairs<'tcx>>,
implicit_region_bound: Option<ty::Region<'tcx>>,
param_env: ty::ParamEnv<'tcx>,
) {
/// This is an "approximate" check -- it may not find all
/// applicable bounds, and not all the bounds it returns can be
/// relied upon. In particular, this check ignores region
- /// identity. So, for example, if we have `<T as
+ /// identity. So, for example, if we have `<T as
/// Trait<'0>>::Item` where `'0` is a region variable, and the
/// user has `<T as Trait<'a>>::Item: 'b` in the environment, then
/// the clause from the environment only applies if `'0 = 'a`,
})
}
- /// Searches the where clauses in scope for regions that
+ /// Searches the where-clauses in scope for regions that
/// `projection_ty` is known to outlive. Currently requires an
/// exact match.
pub fn projection_declared_bounds_from_trait(
.map(move |r| r.subst(tcx, projection_ty.substs))
}
- /// Given the def-id of an associated item, returns any region
+ /// Given the `DefId` of an associated item, returns any region
/// bounds attached to that associated item from the trait definition.
///
/// For example:
/// }
/// ```
///
- /// If we were given the def-id of `Foo::Bar`, we would return
+ /// If we were given the `DefId` of `Foo::Bar`, we would return
/// `'a`. You could then apply the substitutions from the
/// projection to convert this into your namespace. This also
/// works if the user writes `where <Self as Foo<'a>>::Bar: 'a` on
-//! See README.md
+//! See `README.md`.
use self::CombineMapType::*;
use self::UndoLog::*;
pub givens: FxHashSet<(Region<'tcx>, ty::RegionVid)>,
}
-/// A constraint that influences the inference process.
+/// Represents a constraint that influences the inference process.
#[derive(Clone, Copy, PartialEq, Eq, Hash, Debug, PartialOrd, Ord)]
pub enum Constraint<'tcx> {
- /// One region variable is subregion of another
+ /// A region variable is a subregion of another.
VarSubVar(RegionVid, RegionVid),
- /// Concrete region is subregion of region variable
+ /// A concrete region is a subregion of region variable.
RegSubVar(Region<'tcx>, RegionVid),
- /// Region variable is subregion of concrete region. This does not
+ /// A region variable is a subregion of a concrete region. This does not
/// directly affect inference, but instead is checked after
/// inference is complete.
VarSubReg(RegionVid, Region<'tcx>),
}
}
-/// VerifyGenericBound(T, _, R, RS): The parameter type `T` (or
+/// `VerifyGenericBound(T, _, R, RS)`: the parameter type `T` (or
/// associated type) must outlive the region `R`. `T` is known to
-/// outlive `RS`. Therefore verify that `R <= RS[i]` for some
+/// outlive `RS`. Therefore, verify that `R <= RS[i]` for some
/// `i`. Inference variables may be involved (but this verification
/// step doesn't influence inference).
#[derive(Debug, Clone)]
}
}
-/// Describes the things that some `GenericKind` value G is known to
+/// Describes the things that some `GenericKind` value `G` is known to
/// outlive. Each variant of `VerifyBound` can be thought of as a
/// function:
///
/// following, where `G` is the generic for which this verify
/// bound was created:
///
- /// fn(min) -> bool {
- /// if G == K {
+ /// ```rust
+ /// fn(min) -> bool {
+ /// if G == K {
/// B(min)
- /// } else {
+ /// } else {
/// false
- /// }
/// }
+ /// }
+ /// ```
///
/// In other words, if the generic `G` that we are checking is
/// equal to `K`, then check the associated verify bound
/// This is used when we have something in the environment that
/// may or may not be relevant, depending on the region inference
/// results. For example, we may have `where <T as
- /// Trait<'a>>::Item: 'b` in our where clauses. If we are
+ /// Trait<'a>>::Item: 'b` in our where-clauses. If we are
/// generating the verify-bound for `<T as Trait<'0>>::Item`, then
/// this where-clause is only relevant if `'0` winds up inferred
/// to `'a`.
///
/// So we would compile to a verify-bound like
///
- /// IfEq(<T as Trait<'a>>::Item, AnyRegion('a))
+ /// ```
+ /// IfEq(<T as Trait<'a>>::Item, AnyRegion('a))
+ /// ```
///
/// meaning, if the subject G is equal to `<T as Trait<'a>>::Item`
/// (after inference), and `'a: min`, then `G: min`.
/// Given a region `R`, expands to the function:
///
- /// fn(min) -> bool {
- /// R: min
- /// }
+ /// ```
+ /// fn(min) -> bool {
+ /// R: min
+ /// }
+ /// ```
///
/// This is used when we can establish that `G: R` -- therefore,
/// if `R: min`, then by transitivity `G: min`.
/// Given a set of bounds `B`, expands to the function:
///
- /// fn(min) -> bool {
- /// exists (b in B) { b(min) }
- /// }
+ /// ```rust
+ /// fn(min) -> bool {
+ /// exists (b in B) { b(min) }
+ /// }
+ /// ```
///
/// In other words, if we meet some bound in `B`, that suffices.
- /// This is used when all the bounds in `B` are known to apply to
- /// G.
+ /// This is used when all the bounds in `B` are known to apply to `G`.
AnyBound(Vec<VerifyBound<'tcx>>),
/// Given a set of bounds `B`, expands to the function:
///
- /// fn(min) -> bool {
- /// forall (b in B) { b(min) }
- /// }
+ /// ```rust
+ /// fn(min) -> bool {
+ /// forall (b in B) { b(min) }
+ /// }
+ /// ```
///
/// In other words, if we meet *all* bounds in `B`, that suffices.
/// This is used when *some* bound in `B` is known to suffice, but
#[derive(Copy, Clone, PartialEq)]
enum UndoLog<'tcx> {
- /// We added `RegionVid`
+ /// We added `RegionVid`.
AddVar(RegionVid),
- /// We added the given `constraint`
+ /// We added the given `constraint`.
AddConstraint(Constraint<'tcx>),
- /// We added the given `verify`
+ /// We added the given `verify`.
AddVerify(usize),
- /// We added the given `given`
+ /// We added the given `given`.
AddGiven(Region<'tcx>, ty::RegionVid),
- /// We added a GLB/LUB "combination variable"
+ /// We added a GLB/LUB "combination variable".
AddCombination(CombineMapType, TwoRegions<'tcx>),
/// During skolemization, we sometimes purge entries from the undo
/// When working with placeholder regions, we often wish to find all of
/// the regions that are either reachable from a placeholder region, or
/// which can reach a placeholder region, or both. We call such regions
-/// *tainted* regions. This struct allows you to decide what set of
+/// *tainted* regions. This struct allows you to decide what set of
/// tainted regions you want.
#[derive(Debug)]
pub struct TaintDirections {
/// Takes (and clears) the current set of constraints. Note that
/// the set of variables remains intact, but all relationships
- /// between them are reset. This is used during NLL checking to
+ /// between them are reset. This is used during NLL checking to
/// grab the set of constraints that arose from a particular
/// operation.
///
}
}
- /// See `Verify::VerifyGenericBound`
+ /// See [`Verify::VerifyGenericBound`].
pub fn verify_generic_bound(
&mut self,
origin: SubregionOrigin<'tcx>,
}).collect()
}
- /// See [`RegionInference::region_constraints_added_in_snapshot`]
+ /// See [`RegionInference::region_constraints_added_in_snapshot`].
pub fn region_constraints_added_in_snapshot(&self, mark: &RegionSnapshot) -> Option<bool> {
self.undo_log[mark.length..]
.iter()
}
impl<'tcx> RegionConstraintData<'tcx> {
- /// True if this region constraint data contains no constraints.
+ /// Returns `true` if this region constraint data contains no constraints, and `false`
+ /// otherwise.
pub fn is_empty(&self) -> bool {
let RegionConstraintData {
constraints,
self.sub_relations.find(vid)
}
- /// True if `a` and `b` have same "sub-root" (i.e., exists some
+ /// Returns `true` if `a` and `b` have same "sub-root" (i.e., exists some
/// type X such that `forall i in {a, b}. (i <: X || X <: i)`.
pub fn sub_unified(&mut self, a: ty::TyVid, b: ty::TyVid) -> bool {
self.sub_root_var(a) == self.sub_root_var(b)
}
}
- /// Creates a snapshot of the type variable state. This snapshot
+ /// Creates a snapshot of the type variable state. This snapshot
/// must later be committed (`commit()`) or rolled back
- /// (`rollback_to()`). Nested snapshots are permitted, but must
+ /// (`rollback_to()`). Nested snapshots are permitted, but must
/// be processed in a stack-like fashion.
pub fn snapshot(&mut self) -> Snapshot<'tcx> {
Snapshot {
.collect()
}
- /// Find the set of type variables that existed *before* `s`
+ /// Finds the set of type variables that existed *before* `s`
/// but which have only been unified since `s` started, and
/// return the types with which they were unified. So if we had
/// a type variable `V0`, then we started the snapshot, then we
pub use crate::lint;
}
-use rustc_errors as errors;
-
// FIXME(#27438): right now the unit tests of librustc don't refer to any actual
// functions generated in librustc_data_structures (all
// references are through generic functions), but statics are
//! compiler code, rather than using their own custom pass. Those
//! lints are all available in `rustc_lint::builtin`.
-use crate::errors::{Applicability, DiagnosticBuilder};
use crate::lint::{LintPass, LateLintPass, LintArray};
use crate::session::Session;
+use errors::{Applicability, DiagnosticBuilder};
use syntax::ast;
use syntax::source_map::Span;
}
BuiltinLintDiagnostics::UnusedImports(message, replaces) => {
if !replaces.is_empty() {
- db.multipart_suggestion(
+ db.tool_only_multipart_suggestion(
&message,
replaces,
Applicability::MachineApplicable,
//! The lint checking is mostly consolidated into one pass which runs
//! after all other analyses. Throughout compilation, lint warnings
//! can be added via the `add_lint` method on the Session structure. This
-//! requires a span and an id of the node that the lint is being added to. The
+//! requires a span and an ID of the node that the lint is being added to. The
//! lint isn't actually emitted at that time because it is unknown what the
//! actual lint level at that location is.
//!
//! A context keeps track of the current state of all lint levels.
//! Upon entering a node of the ast which can modify the lint settings, the
//! previous lint state is pushed onto a stack and the ast is then recursed
-//! upon. As the ast is traversed, this keeps track of the current lint level
+//! upon. As the ast is traversed, this keeps track of the current lint level
//! for all lint attributes.
use self::TargetLint::*;
use syntax::ast;
use syntax::edition;
use syntax_pos::{MultiSpan, Span, symbol::{LocalInternedString, Symbol}};
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use crate::hir;
use crate::hir::def_id::LOCAL_CRATE;
use crate::hir::intravisit as hir_visit;
impl<'a, 'tcx> LintContext<'tcx> for LateContext<'a, 'tcx> {
type PassObject = LateLintPassObject;
- /// Get the overall compiler `Session` object.
+ /// Gets the overall compiler `Session` object.
fn sess(&self) -> &Session {
&self.tcx.sess
}
impl<'a> LintContext<'a> for EarlyContext<'a> {
type PassObject = EarlyLintPassObject;
- /// Get the overall compiler `Session` object.
+ /// Gets the overall compiler `Session` object.
fn sess(&self) -> &Session {
&self.sess
}
}
-/// Perform lint checking on a crate.
+/// Performs lint checking on a crate.
///
/// Consumes the `lint_store` field of the `Session`.
pub fn check_crate<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>) {
use std::cmp;
-use crate::errors::{Applicability, DiagnosticBuilder};
use crate::hir::HirId;
use crate::ich::StableHashingContext;
use crate::lint::builtin;
use crate::lint::context::CheckLintNameResult;
use crate::lint::{self, Lint, LintId, Level, LintSource};
+use crate::session::Session;
+use crate::util::nodemap::FxHashMap;
+use errors::{Applicability, DiagnosticBuilder};
use rustc_data_structures::stable_hasher::{HashStable, ToStableHashKey,
StableHasher, StableHasherResult};
-use crate::session::Session;
use syntax::ast;
use syntax::attr;
use syntax::feature_gate;
use syntax::source_map::MultiSpan;
use syntax::symbol::Symbol;
-use crate::util::nodemap::FxHashMap;
pub struct LintLevelSets {
list: Vec<LintSet>,
use rustc_data_structures::sync::{self, Lrc};
-use crate::errors::{DiagnosticBuilder, DiagnosticId};
use crate::hir::def_id::{CrateNum, LOCAL_CRATE};
use crate::hir::intravisit;
use crate::hir;
use crate::lint::builtin::{BuiltinLintDiagnostics, DUPLICATE_MATCHER_BINDING_NAME};
use crate::lint::builtin::parser::{QUESTION_MARK_MACRO_SEP, ILL_FORMED_ATTRIBUTE_INPUT};
use crate::session::{Session, DiagnosticMessageId};
+use crate::ty::TyCtxt;
+use crate::ty::query::Providers;
+use crate::util::nodemap::NodeMap;
+use errors::{DiagnosticBuilder, DiagnosticId};
use std::{hash, ptr};
use syntax::ast;
use syntax::source_map::{MultiSpan, ExpnFormat};
use syntax::edition::Edition;
use syntax::symbol::Symbol;
use syntax_pos::Span;
-use crate::ty::TyCtxt;
-use crate::ty::query::Providers;
-use crate::util::nodemap::NodeMap;
pub use crate::lint::context::{LateContext, EarlyContext, LintContext, LintStore,
check_crate, check_ast_crate, CheckLintNameResult,
/// `default_level`.
pub edition_lint_opts: Option<(Edition, Level)>,
- /// Whether this lint is reported even inside expansions of external macros
+ /// `true` if this lint is reported even inside expansions of external macros.
pub report_in_external_macro: bool,
}
}
}
- /// Get the lint's name, with ASCII letters converted to lowercase.
+ /// Gets the lint's name, with ASCII letters converted to lowercase.
pub fn name_lower(&self) -> String {
self.name.to_ascii_lowercase()
}
}
}
-/// Declare a static item of type `&'static Lint`.
+/// Declares a static item of type `&'static Lint`.
#[macro_export]
macro_rules! declare_lint {
($vis: vis $NAME: ident, $Level: ident, $desc: expr) => (
);
}
-/// Declare a static `LintArray` and return it as an expression.
+/// Declares a static `LintArray` and return it as an expression.
#[macro_export]
macro_rules! lint_array {
($( $lint:expr ),* ,) => { lint_array!( $($lint),* ) };
pub trait LintPass {
fn name(&self) -> &'static str;
- /// Get descriptions of the lints this `LintPass` object can emit.
+ /// Gets descriptions of the lints this `LintPass` object can emit.
///
/// N.B., there is no enforcement that the object only emits lints it registered.
/// And some `rustc` internal `LintPass`es register lints to be emitted by other
}
impl LintId {
- /// Get the `LintId` for a `Lint`.
+ /// Gets the `LintId` for a `Lint`.
pub fn of(lint: &'static Lint) -> LintId {
LintId {
lint,
self.lint.name
}
- /// Get the name of the lint.
+ /// Gets the name of the lint.
pub fn to_string(&self) -> String {
self.lint.name_lower()
}
});
impl Level {
- /// Convert a level to a lower-case string.
+ /// Converts a level to a lower-case string.
pub fn as_str(self) -> &'static str {
match self {
Allow => "allow",
}
}
- /// Convert a lower-case string to a level.
+ /// Converts a lower-case string to a level.
pub fn from_str(x: &str) -> Option<Level> {
match x {
"allow" => Some(Allow),
fn handle_field_pattern_match(&mut self, lhs: &hir::Pat, def: Def,
pats: &[source_map::Spanned<hir::FieldPat>]) {
- let variant = match self.tables.node_id_to_type(lhs.hir_id).sty {
+ let variant = match self.tables.node_type(lhs.hir_id).sty {
ty::Adt(adt, _) => adt.variant_of_def(def),
_ => span_bug!(lhs.span, "non-ADT in struct pattern")
};
-//! A different sort of visitor for walking fn bodies. Unlike the
+//! A different sort of visitor for walking fn bodies. Unlike the
//! normal visitor, which just walks the entire body in one shot, the
//! `ExprUseVisitor` determines how expressions are being used.
self.consume_expr(&arm.body);
}
- /// Walks a pat that occurs in isolation (i.e., top-level of fn
- /// arg or let binding. *Not* a match arm or nested pat.)
+ /// Walks a pat that occurs in isolation (i.e., top-level of fn argument or
+ /// let binding, and *not* a match arm or nested pat.)
fn walk_irrefutable_pat(&mut self, cmt_discr: mc::cmt<'tcx>, pat: &hir::Pat) {
let mut mode = Unknown;
self.determine_pat_move_mode(cmt_discr.clone(), pat, &mut mode);
-//! This file handles the relationships between free regions --
-//! meaning lifetime parameters. Ordinarily, free regions are
-//! unrelated to one another, but they can be related via implied or
-//! explicit bounds. In that case, we track the bounds using the
-//! `TransitiveRelation` type and use that to decide when one free
-//! region outlives another and so forth.
+//! This module handles the relationships between "free regions", i.e., lifetime parameters.
+//! Ordinarily, free regions are unrelated to one another, but they can be related via implied
+//! or explicit bounds. In that case, we track the bounds using the `TransitiveRelation` type,
+//! and use that to decide when one free region outlives another, and so forth.
use crate::infer::outlives::free_region_map::{FreeRegionMap, FreeRegionRelations};
use crate::hir::def_id::DefId;
/// regions.
///
/// This stuff is a bit convoluted and should be refactored, but as we
-/// move to NLL it'll all go away anyhow.
+/// transition to NLL, it'll all go away anyhow.
pub struct RegionRelations<'a, 'gcx: 'tcx, 'tcx: 'a> {
pub tcx: TyCtxt<'a, 'gcx, 'tcx>,
- /// context used to fetch the region maps
+ /// The context used to fetch the region maps.
pub context: DefId,
- /// region maps for the given context
+ /// The region maps for the given context.
pub region_scope_tree: &'a region::ScopeTree,
- /// free-region relationships
+ /// Free-region relationships.
pub free_regions: &'a FreeRegionMap<'tcx>,
}
}
}
- /// Determines whether one region is a subregion of another. This is intended to run *after
+ /// Determines whether one region is a subregion of another. This is intended to run *after
/// inference* and sadly the logic is somewhat duplicated with the code in infer.rs.
pub fn is_subregion_of(&self,
sub_region: ty::Region<'tcx>,
result
}
- /// Determines whether this free-region is required to be 'static
+ /// Determines whether this free region is required to be `'static`.
fn is_static(&self, super_region: ty::Region<'tcx>) -> bool {
debug!("is_static(super_region={:?})", super_region);
match *super_region {
};
if let Def::Fn(did) = def {
if self.def_id_is_transmute(did) {
- let typ = self.tables.node_id_to_type(expr.hir_id);
+ let typ = self.tables.node_type(expr.hir_id);
let sig = typ.fn_sig(self.tcx);
let from = sig.inputs().skip_binder()[0];
let to = *sig.output().skip_binder();
// (unlike lang features), which means we need to collect them instead.
use crate::ty::TyCtxt;
+use crate::hir::intravisit::{self, NestedVisitorMap, Visitor};
use syntax::symbol::Symbol;
use syntax::ast::{Attribute, MetaItem, MetaItemKind};
use syntax_pos::Span;
-use crate::hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_data_structures::fx::{FxHashSet, FxHashMap};
-use crate::errors::DiagnosticId;
+use errors::DiagnosticId;
pub struct LibFeatures {
// A map from feature to stabilisation version.
-//! A classic liveness analysis based on dataflow over the AST. Computes,
+//! A classic liveness analysis based on dataflow over the AST. Computes,
//! for each local variable in a function, whether that variable is live
-//! at a given point. Program execution points are identified by their
-//! id.
+//! at a given point. Program execution points are identified by their
+//! IDs.
//!
//! # Basic idea
//!
-//! The basic model is that each local variable is assigned an index. We
+//! The basic model is that each local variable is assigned an index. We
//! represent sets of local variables using a vector indexed by this
-//! index. The value in the vector is either 0, indicating the variable
-//! is dead, or the id of an expression that uses the variable.
+//! index. The value in the vector is either 0, indicating the variable
+//! is dead, or the ID of an expression that uses the variable.
//!
-//! We conceptually walk over the AST in reverse execution order. If we
-//! find a use of a variable, we add it to the set of live variables. If
+//! We conceptually walk over the AST in reverse execution order. If we
+//! find a use of a variable, we add it to the set of live variables. If
//! we find an assignment to a variable, we remove it from the set of live
-//! variables. When we have to merge two flows, we take the union of
-//! those two flows---if the variable is live on both paths, we simply
-//! pick one id. In the event of loops, we continue doing this until a
+//! variables. When we have to merge two flows, we take the union of
+//! those two flows -- if the variable is live on both paths, we simply
+//! pick one ID. In the event of loops, we continue doing this until a
//! fixed point is reached.
//!
//! ## Checking initialization
//!
-//! At the function entry point, all variables must be dead. If this is
-//! not the case, we can report an error using the id found in the set of
+//! At the function entry point, all variables must be dead. If this is
+//! not the case, we can report an error using the ID found in the set of
//! live variables, which identifies a use of the variable which is not
//! dominated by an assignment.
//!
//!
//! The actual implementation contains two (nested) walks over the AST.
//! The outer walk has the job of building up the ir_maps instance for the
-//! enclosing function. On the way down the tree, it identifies those AST
+//! enclosing function. On the way down the tree, it identifies those AST
//! nodes and variable IDs that will be needed for the liveness analysis
-//! and assigns them contiguous IDs. The liveness id for an AST node is
-//! called a `live_node` (it's a newtype'd u32) and the id for a variable
-//! is called a `variable` (another newtype'd u32).
+//! and assigns them contiguous IDs. The liveness ID for an AST node is
+//! called a `live_node` (it's a newtype'd `u32`) and the ID for a variable
+//! is called a `variable` (another newtype'd `u32`).
//!
//! On the way back up the tree, as we are about to exit from a function
-//! declaration we allocate a `liveness` instance. Now that we know
+//! declaration we allocate a `liveness` instance. Now that we know
//! precisely how many nodes and variables we need, we can allocate all
-//! the various arrays that we will need to precisely the right size. We then
+//! the various arrays that we will need to precisely the right size. We then
//! perform the actual propagation on the `liveness` instance.
//!
//! This propagation is encoded in the various `propagate_through_*()`
-//! methods. It effectively does a reverse walk of the AST; whenever we
+//! methods. It effectively does a reverse walk of the AST; whenever we
//! reach a loop node, we iterate until a fixed point is reached.
//!
//! ## The `RWU` struct
//! variable `V` (these are encapsulated in the `RWU` struct):
//!
//! - `reader`: the `LiveNode` ID of some node which will read the value
-//! that `V` holds on entry to `N`. Formally: a node `M` such
+//! that `V` holds on entry to `N`. Formally: a node `M` such
//! that there exists a path `P` from `N` to `M` where `P` does not
-//! write `V`. If the `reader` is `invalid_node()`, then the current
+//! write `V`. If the `reader` is `invalid_node()`, then the current
//! value will never be read (the variable is dead, essentially).
//!
//! - `writer`: the `LiveNode` ID of some node which will write the
-//! variable `V` and which is reachable from `N`. Formally: a node `M`
+//! variable `V` and which is reachable from `N`. Formally: a node `M`
//! such that there exists a path `P` from `N` to `M` and `M` writes
-//! `V`. If the `writer` is `invalid_node()`, then there is no writer
+//! `V`. If the `writer` is `invalid_node()`, then there is no writer
//! of `V` that follows `N`.
//!
-//! - `used`: a boolean value indicating whether `V` is *used*. We
+//! - `used`: a boolean value indicating whether `V` is *used*. We
//! distinguish a *read* from a *use* in that a *use* is some read that
-//! is not just used to generate a new value. For example, `x += 1` is
-//! a read but not a use. This is used to generate better warnings.
+//! is not just used to generate a new value. For example, `x += 1` is
+//! a read but not a use. This is used to generate better warnings.
//!
//! ## Special Variables
//!
//! - `fallthrough_ln`: a live node that represents a fallthrough
//!
//! - `clean_exit_var`: a synthetic variable that is only 'read' from the
-//! fallthrough node. It is only live if the function could converge
+//! fallthrough node. It is only live if the function could converge
//! via means other than an explicit `return` expression. That is, it is
//! only dead if the end of the function's block can never be reached.
//! It is the responsibility of typeck to ensure that there are no
use crate::ty::{self, TyCtxt};
use crate::ty::query::Providers;
use crate::lint;
-use crate::errors::Applicability;
use crate::util::nodemap::{NodeMap, HirIdMap, HirIdSet};
+use errors::Applicability;
use std::collections::{BTreeMap, VecDeque};
use std::{fmt, u32};
use std::io::prelude::*;
//! The job of the categorization module is to analyze an expression to
//! determine what kind of memory is used in evaluating it (for example,
//! where dereferences occur and what kind of pointer is dereferenced;
-//! whether the memory is mutable; etc)
+//! whether the memory is mutable, etc.).
//!
//! Categorization effectively transforms all of our expressions into
//! expressions of the following forms (the actual enum has many more
//! | E.comp // access to an interior component
//!
//! Imagine a routine ToAddr(Expr) that evaluates an expression and returns an
-//! address where the result is to be found. If Expr is a place, then this
-//! is the address of the place. If Expr is an rvalue, this is the address of
+//! address where the result is to be found. If Expr is a place, then this
+//! is the address of the place. If `Expr` is an rvalue, this is the address of
//! some temporary spot in memory where the result is stored.
//!
-//! Now, cat_expr() classifies the expression Expr and the address A=ToAddr(Expr)
+//! Now, `cat_expr()` classifies the expression `Expr` and the address `A = ToAddr(Expr)`
//! as follows:
//!
-//! - cat: what kind of expression was this? This is a subset of the
+//! - `cat`: what kind of expression was this? This is a subset of the
//! full expression forms which only includes those that we care about
//! for the purpose of the analysis.
-//! - mutbl: mutability of the address A
-//! - ty: the type of data found at the address A
+//! - `mutbl`: mutability of the address `A`.
+//! - `ty`: the type of data found at the address `A`.
//!
//! The resulting categorization tree differs somewhat from the expressions
-//! themselves. For example, auto-derefs are explicit. Also, an index a[b] is
+//! themselves. For example, auto-derefs are explicit. Also, an index a[b] is
//! decomposed into two operations: a dereference to reach the array data and
//! then an index to jump forward to the relevant item.
//!
// which the value is stored.
//
// *WARNING* The field `cmt.type` is NOT necessarily the same as the
-// result of `node_id_to_type(cmt.id)`.
+// result of `node_type(cmt.id)`.
//
// (FIXME: rewrite the following comment given that `@x` managed
// pointers have been obsolete for quite some time.)
hir_id: hir::HirId)
-> McResult<Ty<'tcx>> {
self.resolve_type_vars_or_error(hir_id,
- self.tables.node_id_to_type_opt(hir_id))
+ self.tables.node_type_opt(hir_id))
}
pub fn expr_ty(&self, expr: &hir::Expr) -> McResult<Ty<'tcx>> {
/// values live long enough; phrased another way, the starting point
/// of each range is not really the important thing in the above
/// picture, but rather the ending point.
-///
-/// FIXME (pnkfelix): This currently derives `PartialOrd` and `Ord` to
-/// placate the same deriving in `ty::FreeRegion`, but we may want to
-/// actually attach a more meaningful ordering to scopes than the one
-/// generated via deriving here.
+//
+// FIXME(pnkfelix): this currently derives `PartialOrd` and `Ord` to
+// placate the same deriving in `ty::FreeRegion`, but we may want to
+// actually attach a more meaningful ordering to scopes than the one
+// generated via deriving here.
#[derive(Clone, PartialEq, PartialOrd, Eq, Ord, Hash, Copy, RustcEncodable, RustcDecodable)]
pub struct Scope {
pub id: hir::ItemLocalId,
///
/// For example, given `{ let (a, b) = EXPR_1; let c = EXPR_2; ... }`:
///
-/// * the subscope with `first_statement_index == 0` is scope of both
+/// * The subscope with `first_statement_index == 0` is scope of both
/// `a` and `b`; it does not include EXPR_1, but does include
/// everything after that first `let`. (If you want a scope that
/// includes EXPR_1 as well, then do not use `Scope::Remainder`,
/// but instead another `Scope` that encompasses the whole block,
/// e.g., `Scope::Node`.
///
-/// * the subscope with `first_statement_index == 1` is scope of `c`,
+/// * The subscope with `first_statement_index == 1` is scope of `c`,
/// and thus does not include EXPR_2, but covers the `...`.
newtype_index! {
static_assert!(ASSERT_SCOPE_DATA: mem::size_of::<ScopeData>() == 4);
impl Scope {
- /// Returns a item-local id associated with this scope.
+ /// Returns a item-local ID associated with this scope.
///
/// N.B., likely to be replaced as API is refined; e.g., pnkfelix
/// anticipates `fn entry_node_id` and `fn each_exit_node_id`.
}
}
- /// Returns the span of this Scope. Note that in general the
- /// returned span may not correspond to the span of any node id in
+ /// Returns the span of this `Scope`. Note that in general the
+ /// returned span may not correspond to the span of any `NodeId` in
/// the AST.
pub fn span(&self, tcx: TyCtxt<'_, '_, '_>, scope_tree: &ScopeTree) -> Span {
let node_id = self.node_id(tcx, scope_tree);
/// have lifetime parameters free in this body.
root_parent: Option<ast::NodeId>,
- /// `parent_map` maps from a scope id to the enclosing scope id;
+ /// `parent_map` maps from a scope ID to the enclosing scope id;
/// this is usually corresponding to the lexical nesting, though
/// in the case of closures the parent scope is the innermost
/// conditional expression or repeating block. (Note that the
- /// enclosing scope id for the block associated with a closure is
+ /// enclosing scope ID for the block associated with a closure is
/// the closure itself.)
parent_map: FxHashMap<Scope, (Scope, ScopeDepth)>,
- /// `var_map` maps from a variable or binding id to the block in
+ /// `var_map` maps from a variable or binding ID to the block in
/// which that variable is declared.
var_map: FxHashMap<hir::ItemLocalId, Scope>,
- /// maps from a node-id to the associated destruction scope (if any)
+ /// maps from a `NodeId` to the associated destruction scope (if any)
destruction_scopes: FxHashMap<hir::ItemLocalId, Scope>,
/// `rvalue_scopes` includes entries for those expressions whose cleanup scope is
/// Encodes the hierarchy of fn bodies. Every fn body (including
/// closures) forms its own distinct region hierarchy, rooted in
- /// the block that is the fn body. This map points from the id of
- /// that root block to the id of the root block for the enclosing
+ /// the block that is the fn body. This map points from the ID of
+ /// that root block to the ID of the root block for the enclosing
/// fn, if any. Thus the map structures the fn bodies into a
/// hierarchy based on their lexical mapping. This is used to
/// handle the relationships between regions in a fn and in a
/// upon exiting the parent scope, we cannot statically know how
/// many times the expression executed, and thus if the expression
/// creates temporaries we cannot know statically how many such
- /// temporaries we would have to cleanup. Therefore we ensure that
+ /// temporaries we would have to cleanup. Therefore, we ensure that
/// the temporaries never outlast the conditional/repeating
/// expression, preventing the need for dynamic checks and/or
/// arbitrary amounts of stack space. Terminating scopes end
}
/// Records that `sub_closure` is defined within `sup_closure`. These ids
- /// should be the id of the block that is the fn body, which is
+ /// should be the ID of the block that is the fn body, which is
/// also the root of the region hierarchy for that fn.
fn record_closure_parent(&mut self,
sub_closure: hir::ItemLocalId,
self.is_subscope_of(scope2, scope1)
}
- /// Returns true if `subscope` is equal to or is lexically nested inside `superscope` and false
- /// otherwise.
+ /// Returns `true` if `subscope` is equal to or is lexically nested inside `superscope`, and
+ /// `false` otherwise.
pub fn is_subscope_of(&self,
subscope: Scope,
superscope: Scope)
return true;
}
- /// Returns the id of the innermost containing body
+ /// Returns the ID of the innermost containing body
pub fn containing_body(&self, mut scope: Scope) -> Option<hir::ItemLocalId> {
loop {
if let ScopeData::CallSite = scope.data {
}
}
- /// Finds the nearest common ancestor of two scopes. That is, finds the
+ /// Finds the nearest common ancestor of two scopes. That is, finds the
/// smallest scope which is greater than or equal to both `scope_a` and
/// `scope_b`.
pub fn nearest_common_ancestor(&self, scope_a: Scope, scope_b: Scope) -> Scope {
visitor.visit_pat(pat);
}
- /// True if `pat` match the `P&` nonterminal:
+ /// Returns `true` if `pat` match the `P&` non-terminal.
///
/// P& = ref X
/// | StructName { ..., P&, ... }
//! Name resolution for lifetimes follows MUCH simpler rules than the
//! full resolve. For example, lifetime names are never exported or
//! used between functions, and they operate in a purely top-down
-//! way. Therefore we break lifetime name resolution into a separate pass.
+//! way. Therefore, we break lifetime name resolution into a separate pass.
use crate::hir::def::Def;
use crate::hir::def_id::{CrateNum, DefId, LocalDefId, LOCAL_CRATE};
use crate::hir::{GenericArg, GenericParam, ItemLocalId, LifetimeName, Node, ParamName};
use crate::ty::{self, DefIdTree, GenericParamDefKind, TyCtxt};
-use crate::errors::{Applicability, DiagnosticBuilder};
use crate::rustc::lint;
-use rustc_data_structures::sync::Lrc;
use crate::session::Session;
+use crate::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap, NodeSet};
+use errors::{Applicability, DiagnosticBuilder};
+use rustc_data_structures::sync::Lrc;
use std::borrow::Cow;
use std::cell::Cell;
use std::mem::replace;
use syntax::ptr::P;
use syntax::symbol::keywords;
use syntax_pos::Span;
-use crate::util::nodemap::{DefIdMap, FxHashMap, FxHashSet, NodeMap, NodeSet};
use crate::hir::intravisit::{self, NestedVisitorMap, Visitor};
use crate::hir::{self, GenericParamKind, LifetimeParamKind};
pub object_lifetime_defaults: NodeMap<Vec<ObjectLifetimeDefault>>,
}
-/// See `NamedRegionMap`.
+/// See [`NamedRegionMap`].
#[derive(Default)]
pub struct ResolveLifetimes {
defs: FxHashMap<LocalDefId, Lrc<FxHashMap<ItemLocalId, Region>>>,
map: &'a mut NamedRegionMap,
scope: ScopeRef<'a>,
- /// Deep breath. Our representation for poly trait refs contains a single
+ /// This is slightly complicated. Our representation for poly-trait-refs contains a single
/// binder and thus we only allow a single level of quantification. However,
/// the syntax of Rust permits quantification in two places, e.g., `T: for <'a> Foo<'a>`
- /// and `for <'a, 'b> &'b T: Foo<'a>`. In order to get the de Bruijn indices
+ /// and `for <'a, 'b> &'b T: Foo<'a>`. In order to get the De Bruijn indices
/// correct when representing these constraints, we should only introduce one
/// scope. However, we want to support both locations for the quantifier and
/// during lifetime resolution we want precise information (so we can't
/// desugar in an earlier phase).
///
- /// SO, if we encounter a quantifier at the outer scope, we set
- /// trait_ref_hack to true (and introduce a scope), and then if we encounter
- /// a quantifier at the inner scope, we error. If trait_ref_hack is false,
+ /// So, if we encounter a quantifier at the outer scope, we set
+ /// `trait_ref_hack` to `true` (and introduce a scope), and then if we encounter
+ /// a quantifier at the inner scope, we error. If `trait_ref_hack` is `false`,
/// then we introduce the scope at the inner quantifier.
- ///
- /// I'm sorry.
trait_ref_hack: bool,
/// Used to disallow the use of in-band lifetimes in `fn` or `Fn` syntax.
/// If early bound lifetimes are present, we separate them into their own list (and likewise
/// for late bound). They will be numbered sequentially, starting from the lowest index that is
/// already in scope (for a fn item, that will be 0, but for a method it might not be). Late
- /// bound lifetimes are resolved by name and associated with a binder id (`binder_id`), so the
+ /// bound lifetimes are resolved by name and associated with a binder ID (`binder_id`), so the
/// ordering is not important there.
fn visit_early_late<F>(
&mut self,
}
}
- /// Returns true if, in the current scope, replacing `'_` would be
+ /// Returns `true` if, in the current scope, replacing `'_` would be
/// equivalent to a single-use lifetime.
fn track_lifetime_uses(&self) -> bool {
let mut scope = self.scope;
/// - it does not appear in a where-clause.
///
/// "Constrained" basically means that it appears in any type but
-/// not amongst the inputs to a projection. In other words, `<&'a
+/// not amongst the inputs to a projection. In other words, `<&'a
/// T as Trait<''b>>::Foo` does not constrain `'a` or `'b`.
fn insert_late_bound_lifetimes(
map: &mut NamedRegionMap,
pub struct DeprecationEntry {
/// The metadata of the attribute associated with this entry.
pub attr: Deprecation,
- /// The def id where the attr was originally attached. `None` for non-local
+ /// The `DefId` where the attr was originally attached. `None` for non-local
/// `DefId`'s.
origin: Option<HirId>,
}
}
impl<'a, 'tcx: 'a> MissingStabilityAnnotations<'a, 'tcx> {
- fn check_missing_stability(&self, id: NodeId, span: Span) {
+ fn check_missing_stability(&self, id: NodeId, span: Span, name: &str) {
let hir_id = self.tcx.hir().node_to_hir_id(id);
let stab = self.tcx.stability().local_stability(hir_id);
let is_error = !self.tcx.sess.opts.test &&
stab.is_none() &&
self.access_levels.is_reachable(id);
if is_error {
- self.tcx.sess.span_err(span, "This node does not have a stability attribute");
+ self.tcx.sess.span_err(
+ span,
+ &format!("{} has missing stability attribute", name),
+ );
}
}
}
// optional. They inherit stability from their parents when unannotated.
hir::ItemKind::Impl(.., None, _, _) | hir::ItemKind::ForeignMod(..) => {}
- _ => self.check_missing_stability(i.id, i.span)
+ _ => self.check_missing_stability(i.id, i.span, i.node.descriptive_variant())
}
intravisit::walk_item(self, i)
}
fn visit_trait_item(&mut self, ti: &'tcx hir::TraitItem) {
- self.check_missing_stability(ti.id, ti.span);
+ self.check_missing_stability(ti.id, ti.span, "item");
intravisit::walk_trait_item(self, ti);
}
fn visit_impl_item(&mut self, ii: &'tcx hir::ImplItem) {
let impl_def_id = self.tcx.hir().local_def_id(self.tcx.hir().get_parent(ii.id));
if self.tcx.impl_trait_ref(impl_def_id).is_none() {
- self.check_missing_stability(ii.id, ii.span);
+ self.check_missing_stability(ii.id, ii.span, "item");
}
intravisit::walk_impl_item(self, ii);
}
fn visit_variant(&mut self, var: &'tcx Variant, g: &'tcx Generics, item_id: NodeId) {
- self.check_missing_stability(var.node.data.id(), var.span);
+ self.check_missing_stability(var.node.data.id(), var.span, "variant");
intravisit::walk_variant(self, var, g, item_id);
}
fn visit_struct_field(&mut self, s: &'tcx StructField) {
- self.check_missing_stability(s.id, s.span);
+ self.check_missing_stability(s.id, s.span, "field");
intravisit::walk_struct_field(self, s);
}
fn visit_foreign_item(&mut self, i: &'tcx hir::ForeignItem) {
- self.check_missing_stability(i.id, i.span);
+ self.check_missing_stability(i.id, i.span, i.node.descriptive_variant());
intravisit::walk_foreign_item(self, i);
}
fn visit_macro_def(&mut self, md: &'tcx hir::MacroDef) {
- self.check_missing_stability(md.id, md.span);
+ self.check_missing_stability(md.id, md.span, "macro");
}
}
};
}
-/// Check whether an item marked with `deprecated(since="X")` is currently
+/// Checks whether an item marked with `deprecated(since="X")` is currently
/// deprecated (i.e., whether X is not greater than the current rustc version).
pub fn deprecation_in_effect(since: &str) -> bool {
fn parse_version(ver: &str) -> Vec<u32> {
// Deprecated attributes apply in-crate and cross-crate.
if let Some(id) = id {
if let Some(depr_entry) = self.lookup_deprecation_entry(def_id) {
- // If the deprecation is scheduled for a future Rust
- // version, then we should display no warning message.
- let deprecated_in_future_version = if let Some(sym) = depr_entry.attr.since {
- let since = sym.as_str();
- if !deprecation_in_effect(&since) {
- Some(since)
- } else {
- None
- }
- } else {
- None
- };
-
let parent_def_id = self.hir().local_def_id(self.hir().get_parent(id));
let skip = self.lookup_deprecation_entry(parent_def_id)
.map_or(false, |parent_depr| parent_depr.same_origin(&depr_entry));
- if let Some(since) = deprecated_in_future_version {
- let path = self.item_path_str(def_id);
- let message = format!("use of item '{}' \
- that will be deprecated in future version {}",
- path,
- since);
-
- lint_deprecated(def_id,
- id,
- depr_entry.attr.note,
- None,
- &message,
- lint::builtin::DEPRECATED_IN_FUTURE);
- } else if !skip {
+ if !skip {
let path = self.item_path_str(def_id);
let message = format!("use of deprecated item '{}'", path);
lint_deprecated(def_id,
tcx,
access_levels,
};
- missing.check_missing_stability(ast::CRATE_NODE_ID, krate.span);
+ missing.check_missing_stability(ast::CRATE_NODE_ID, krate.span, "crate");
intravisit::walk_crate(&mut missing, krate);
krate.visit_all_item_likes(&mut missing.as_deep_visitor());
}
})
}
-/// Returns whether the specified `lang_item` doesn't actually need to be
+/// Returns `true` if the specified `lang_item` doesn't actually need to be
/// present for this compilation.
///
/// Not all lang items are always required for each compilation, particularly in
-//! The virtual memory representation of the MIR interpreter
+//! The virtual memory representation of the MIR interpreter.
use super::{
Pointer, EvalResult, AllocId, ScalarMaybeUndef, write_target_uint, read_target_uint, Scalar,
/// Hook for performing extra checks on a memory read access.
///
/// Takes read-only access to the allocation so we can keep all the memory read
- /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
+ /// operations take `&self`. Use a `RefCell` in `AllocExtra` if you
/// need to mutate.
#[inline(always)]
fn memory_read(
/// Alignment and bounds checks
impl<'tcx, Tag, Extra> Allocation<Tag, Extra> {
- /// Check if the pointer is "in-bounds". Notice that a pointer pointing at the end
+ /// Checks if the pointer is "in-bounds". Notice that a pointer pointing at the end
/// of an allocation (i.e., at the first *inaccessible* location) *is* considered
/// in-bounds! This follows C's/LLVM's rules.
/// If you want to check bounds before doing a memory access, better use `check_bounds`.
ptr.check_in_alloc(Size::from_bytes(allocation_size), InboundsCheck::Live)
}
- /// Check if the memory range beginning at `ptr` and of size `Size` is "in-bounds".
+ /// Checks if the memory range beginning at `ptr` and of size `Size` is "in-bounds".
#[inline(always)]
pub fn check_bounds(
&self,
/// Byte accessors
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
/// The last argument controls whether we error out when there are undefined
- /// or pointer bytes. You should never call this, call `get_bytes` or
+ /// or pointer bytes. You should never call this, call `get_bytes` or
/// `get_bytes_with_undef_and_ptr` instead,
///
/// This function also guarantees that the resulting pointer will remain stable
/// Relocations
impl<'tcx, Tag: Copy, Extra> Allocation<Tag, Extra> {
- /// Return all relocations overlapping with the given ptr-offset pair.
+ /// Returns all relocations overlapping with the given ptr-offset pair.
pub fn relocations(
&self,
cx: &impl HasDataLayout,
self.relocations.range(Size::from_bytes(start)..end)
}
- /// Check that there are no relocations overlapping with the given range.
+ /// Checks that there are no relocations overlapping with the given range.
#[inline(always)]
fn check_relocations(
&self,
}
}
- /// Remove all relocations inside the given range.
+ /// Removes all relocations inside the given range.
/// If there are relocations overlapping with the edges, they
/// are removed as well *and* the bytes they cover are marked as
- /// uninitialized. This is a somewhat odd "spooky action at a distance",
+ /// uninitialized. This is a somewhat odd "spooky action at a distance",
/// but it allows strictly more code to run than if we would just error
/// immediately in that case.
fn clear_relocations(
m
}
- /// Check whether the range `start..end` (end-exclusive) is entirely defined.
+ /// Checks whether the range `start..end` (end-exclusive) is entirely defined.
///
/// Returns `Ok(())` if it's defined. Otherwise returns the index of the byte
/// at which the first undefined access begins.
use backtrace::Backtrace;
use crate::ty::query::TyCtxtAt;
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use syntax_pos::{Pos, Span};
use syntax::ast;
#[derive(Debug, Copy, Clone, PartialEq, Eq)]
pub enum ErrorHandled {
- /// Already reported a lint or an error for this evaluation
+ /// Already reported a lint or an error for this evaluation.
Reported,
/// Don't emit an error, the evaluation failed because the MIR was generic
/// and the substs didn't fully monomorphize it.
#[derive(Clone, RustcEncodable, RustcDecodable)]
pub enum EvalErrorKind<'tcx, O> {
/// This variant is used by machines to signal their own errors that do not
- /// match an existing variant
+ /// match an existing variant.
MachineError(String),
FunctionAbiMismatch(Abi, Abi),
#[derive(Debug, Clone, Eq, PartialEq, Hash, RustcDecodable, RustcEncodable)]
pub enum AllocKind<'tcx> {
- /// The alloc id is used as a function pointer
+ /// The alloc ID is used as a function pointer
Function(Instance<'tcx>),
- /// The alloc id points to a "lazy" static variable that did not get computed (yet).
+ /// The alloc ID points to a "lazy" static variable that did not get computed (yet).
/// This is also used to break the cycle in recursive statics.
Static(DefId),
- /// The alloc id points to memory
+ /// The alloc ID points to memory.
Memory(&'tcx Allocation),
}
pub struct AllocMap<'tcx> {
- /// Lets you know what an AllocId refers to
+ /// Lets you know what an `AllocId` refers to.
id_to_kind: FxHashMap<AllocId, AllocKind<'tcx>>,
- /// Used to ensure that statics only get one associated AllocId
+ /// Used to ensure that statics only get one associated `AllocId`.
type_interner: FxHashMap<AllocKind<'tcx>, AllocId>,
- /// The AllocId to assign to the next requested id.
+ /// The `AllocId` to assign to the next requested ID.
/// Always incremented, never gets smaller.
next_id: AllocId,
}
}
}
- /// Generate an `AllocId` for a static or return a cached one in case this function has been
+ /// Generates an `AllocId` for a static or return a cached one in case this function has been
/// called on the same static before.
pub fn intern_static(&mut self, static_id: DefId) -> AllocId {
self.intern(AllocKind::Static(static_id))
pub ty: Ty<'tcx>,
}
-/// Represents a constant value in Rust. Scalar and ScalarPair are optimizations which
-/// matches the LocalState optimizations for easy conversions between Value and ConstValue.
+/// Represents a constant value in Rust. `Scalar` and `ScalarPair` are optimizations that
+/// match the `LocalState` optimizations for easy conversions between `Value` and `ConstValue`.
#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
pub enum ConstValue<'tcx> {
- /// Used only for types with layout::abi::Scalar ABI and ZSTs
+ /// Used only for types with `layout::abi::Scalar` ABI and ZSTs.
///
- /// Not using the enum `Value` to encode that this must not be `Undef`
+ /// Not using the enum `Value` to encode that this must not be `Undef`.
Scalar(Scalar),
- /// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`, ...)
+ /// Used only for slices and strings (`&[T]`, `&str`, `*const [T]`, `*mut str`, `Box<str>`,
+ /// etc.).
///
/// Empty slices don't necessarily have an address backed by an `AllocId`, thus we also need to
/// enable integer pointers. The `Scalar` type covers exactly those two cases. While we could
/// it.
Slice(Scalar, u64),
- /// An allocation + offset into the allocation.
- /// Invariant: The AllocId matches the allocation.
+ /// An allocation together with an offset into the allocation.
+ /// Invariant: the `AllocId` matches the allocation.
ByRef(AllocId, &'tcx Allocation, Size),
}
/// in scope, but a separate set of locals.
pub promoted: IndexVec<Promoted, Mir<'tcx>>,
- /// Yield type of the function, if it is a generator.
+ /// Yields type of the function, if it is a generator.
pub yield_ty: Option<Ty<'tcx>>,
/// Generator drop glue
}
}
- /// Check if `sub` is a sub scope of `sup`
+ /// Checks if `sub` is a sub scope of `sup`
pub fn is_sub_scope(&self, mut sub: SourceScope, sup: SourceScope) -> bool {
while sub != sup {
match self.source_scopes[sub].parent_scope {
true
}
- /// Return the return type, it always return first element from `local_decls` array
+ /// Returns the return type, it always return first element from `local_decls` array
pub fn return_ty(&self) -> Ty<'tcx> {
self.local_decls[RETURN_PLACE].ty
}
- /// Get the location of the terminator for the given block
+ /// Gets the location of the terminator for the given block
pub fn terminator_loc(&self, bb: BasicBlock) -> Location {
Location {
block: bb,
/// We can also report errors with this kind of borrow differently.
Shallow,
- /// Data must be immutable but not aliasable. This kind of borrow
+ /// Data must be immutable but not aliasable. This kind of borrow
/// cannot currently be expressed by the user and is used only in
/// implicit closure bindings. It is needed when the closure is
/// borrowing or mutating a mutable referent, e.g.:
/// Data is mutable and not aliasable.
Mut {
- /// True if this borrow arose from method-call auto-ref
- /// (i.e., `adjustment::Adjust::Borrow`)
+ /// `true` if this borrow arose from method-call auto-ref
+ /// (i.e., `adjustment::Adjust::Borrow`).
allow_two_phase_borrow: bool,
},
}
/// If an explicit type was provided for this variable binding,
/// this holds the source Span of that type.
///
- /// NOTE: If you want to change this to a `HirId`, be wary that
+ /// NOTE: if you want to change this to a `HirId`, be wary that
/// doing so breaks incremental compilation (as of this writing),
/// while a `Span` does not cause our tests to fail.
pub opt_ty_info: Option<Span>,
/// `ClearCrossCrate` as long as it carries as `HirId`.
pub is_user_variable: Option<ClearCrossCrate<BindingForm<'tcx>>>,
- /// True if this is an internal local
+ /// `true` if this is an internal local.
///
/// These locals are not based on types in the source code and are only used
/// for a few desugarings at the moment.
}
impl<'tcx> LocalDecl<'tcx> {
- /// Returns true only if local is a binding that can itself be
+ /// Returns `true` only if local is a binding that can itself be
/// made mutable via the addition of the `mut` keyword, namely
/// something like the occurrences of `x` in:
/// - `fn foo(x: Type) { ... }`,
}
}
- /// Returns true if local is definitely not a `ref ident` or
+ /// Returns `true` if local is definitely not a `ref ident` or
/// `ref mut ident` binding. (Such bindings cannot be made into
/// mutable bindings, but the inverse does not necessarily hold).
pub fn is_nonref_binding(&self) -> bool {
}
}
- /// Create a new `LocalDecl` for a temporary.
+ /// Creates a new `LocalDecl` for a temporary.
#[inline]
pub fn new_temp(ty: Ty<'tcx>, span: Span) -> Self {
Self::new_local(ty, Mutability::Mut, false, span)
self
}
- /// Create a new `LocalDecl` for a internal temporary.
+ /// Creates a new `LocalDecl` for a internal temporary.
#[inline]
pub fn new_internal(ty: Ty<'tcx>, span: Span) -> Self {
Self::new_local(ty, Mutability::Mut, true, span)
/// Terminator for this block.
///
- /// NB. This should generally ONLY be `None` during construction.
+ /// N.B., this should generally ONLY be `None` during construction.
/// Therefore, you should generally access it via the
/// `terminator()` or `terminator_mut()` methods. The only
/// exception is that certain passes, such as `simplify_cfg`, swap
}
}
- /// Return the list of labels for the edges to the successor basic blocks.
+ /// Returns the list of labels for the edges to the successor basic blocks.
pub fn fmt_successor_labels(&self) -> Vec<Cow<'static, str>> {
use self::TerminatorKind::*;
match *self {
/// error messages to these specific patterns.
///
/// Note that this also is emitted for regular `let` bindings to ensure that locals that are
- /// never accessed still get some sanity checks for e.g. `let x: ! = ..;`
+ /// never accessed still get some sanity checks for, e.g., `let x: ! = ..;`
FakeRead(FakeReadCause, Place<'tcx>),
/// Write the discriminant for a variant to the enum Place.
/// End the current live range for the storage of the local.
StorageDead(Local),
- /// Execute a piece of inline Assembly.
+ /// Executes a piece of inline Assembly.
InlineAsm {
asm: Box<InlineAsm>,
outputs: Box<[Place<'tcx>]>,
inputs: Box<[(Span, Operand<'tcx>)]>,
},
- /// Retag references in the given place, ensuring they got fresh tags. This is
+ /// Retag references in the given place, ensuring they got fresh tags. This is
/// part of the Stacked Borrows model. These statements are currently only interpreted
/// by miri and only generated when "-Z mir-emit-retag" is passed.
/// See <https://internals.rust-lang.org/t/stacked-borrows-an-aliasing-model-for-rust/8153/>
Projection(Box<PlaceProjection<'tcx>>),
}
-/// The def-id of a static, along with its normalized type (which is
+/// The `DefId` of a static, along with its normalized type (which is
/// stored to avoid requiring normalization when reading MIR).
#[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, RustcEncodable, RustcDecodable)]
pub struct Static<'tcx> {
Place::Projection(Box::new(PlaceProjection { base: self, elem }))
}
- /// Find the innermost `Local` from this `Place`, *if* it is either a local itself or
+ /// Finds the innermost `Local` from this `Place`, *if* it is either a local itself or
/// a single deref of a local.
- ///
- /// FIXME: can we safely swap the semantics of `fn base_local` below in here instead?
+ //
+ // FIXME: can we safely swap the semantics of `fn base_local` below in here instead?
pub fn local(&self) -> Option<Local> {
match self {
Place::Local(local) |
}
}
- /// Find the innermost `Local` from this `Place`.
+ /// Finds the innermost `Local` from this `Place`.
pub fn base_local(&self) -> Option<Local> {
match self {
Place::Local(local) => Some(*local),
impl<'tcx> Operand<'tcx> {
/// Convenience helper to make a constant that refers to the fn
- /// with given def-id and substs. Since this is used to synthesize
+ /// with given `DefId` and substs. Since this is used to synthesize
/// MIR, assumes `user_ty` is None.
pub fn function_handle<'a>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
/// be defined to return, say, a 0) if ADT is not an enum.
Discriminant(Place<'tcx>),
- /// Create an aggregate value, like a tuple or struct. This is
+ /// Creates an aggregate value, like a tuple or struct. This is
/// only needed because we want to distinguish `dest = Foo { x:
/// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
/// that `Foo` has a destructor. These rvalues can be optimized
pub enum CastKind {
Misc,
- /// Convert unique, zero-sized type for a fn to fn()
+ /// Converts unique, zero-sized type for a fn to fn()
ReifyFnPointer,
- /// Convert non capturing closure to fn()
+ /// Converts non capturing closure to fn()
ClosureFnPointer,
- /// Convert safe fn() to unsafe fn()
+ /// Converts safe fn() to unsafe fn()
UnsafeFnPointer,
/// "Unsize" -- convert a thin-or-fat pointer to a fat pointer.
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
pub enum NullOp {
- /// Return the size of a value of that type
+ /// Returns the size of a value of that type
SizeOf,
- /// Create a new uninitialized box for a value of that type
+ /// Creates a new uninitialized box for a value of that type
Box,
}
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum UnsafetyViolationKind {
General,
- /// Permitted in const fn and regular fns
+ /// Permitted in const fn and regular fns.
GeneralAndConstFn,
ExternStatic(ast::NodeId),
BorrowPacked(ast::NodeId),
/// After we borrow check a closure, we are left with various
/// requirements that we have inferred between the free regions that
-/// appear in the closure's signature or on its field types. These
+/// appear in the closure's signature or on its field types. These
/// requirements are then verified and proved by the closure's
/// creating function. This struct encodes those requirements.
///
/// internally within the rest of the NLL code).
#[derive(Clone, Debug, RustcEncodable, RustcDecodable)]
pub struct ClosureRegionRequirements<'gcx> {
- /// The number of external regions defined on the closure. In our
+ /// The number of external regions defined on the closure. In our
/// example above, it would be 3 -- one for `'static`, then `'1`
/// and `'2`. This is just used for a sanity check later on, to
/// make sure that the number of regions we see at the callsite
pub struct CodegenUnit<'tcx> {
/// A name for this CGU. Incremental compilation requires that
- /// name be unique amongst **all** crates. Therefore, it should
+ /// name be unique amongst **all** crates. Therefore, it should
/// contain something unique to this crate (e.g., a module path)
/// as well as the crate name and disambiguator.
name: InternedString,
}
#[inline]
- /// Returns whether this rvalue is deeply initialized (most rvalues) or
+ /// Returns `true` if this rvalue is deeply initialized (most rvalues) or
/// whether its only shallowly initialized (`Rvalue::Box`).
pub fn initialization_state(&self) -> RvalueInitializationState {
match *self {
use syntax::symbol::Symbol;
use syntax::feature_gate::UnstableFeatures;
-use crate::errors::{ColorConfig, FatalError, Handler};
+use errors::{ColorConfig, FatalError, Handler};
use getopts;
use std::collections::{BTreeMap, BTreeSet};
}
#[derive(Clone, PartialEq, Hash)]
-pub enum CrossLangLto {
+pub enum LinkerPluginLto {
LinkerPlugin(PathBuf),
LinkerPluginAuto,
Disabled
}
-impl CrossLangLto {
+impl LinkerPluginLto {
pub fn enabled(&self) -> bool {
match *self {
- CrossLangLto::LinkerPlugin(_) |
- CrossLangLto::LinkerPluginAuto => true,
- CrossLangLto::Disabled => false,
+ LinkerPluginLto::LinkerPlugin(_) |
+ LinkerPluginLto::LinkerPluginAuto => true,
+ LinkerPluginLto::Disabled => false,
}
}
}
}
pub enum Input {
- /// Load source from file
+ /// Loads source from file
File(PathBuf),
Str {
/// String that is shown in place of a filename
.unwrap_or_else(|| self.temp_path(flavor, None))
}
- /// Get the path where a compilation artifact of the given type for the
+ /// Gets the path where a compilation artifact of the given type for the
/// given codegen unit should be placed on disk. If codegen_unit_name is
/// None, a path distinct from those of any codegen unit will be generated.
pub fn temp_path(&self, flavor: OutputType, codegen_unit_name: Option<&str>) -> PathBuf {
}
/// Like temp_path, but also supports things where there is no corresponding
- /// OutputType, like no-opt-bitcode or lto-bitcode.
+ /// OutputType, like noopt-bitcode or lto-bitcode.
pub fn temp_path_ext(&self, ext: &str, codegen_unit_name: Option<&str>) -> PathBuf {
let base = self.out_directory.join(&self.filestem());
}
impl Options {
- /// True if there is a reason to build the dep graph.
+ /// Returns `true` if there is a reason to build the dep graph.
pub fn build_dep_graph(&self) -> bool {
self.incremental.is_some() || self.debugging_opts.dump_dep_graph
|| self.debugging_opts.query_dep_graph
FilePathMapping::new(self.remap_path_prefix.clone())
}
- /// True if there will be an output file generated
+ /// Returns `true` if there will be an output file generated
pub fn will_create_output_file(&self) -> bool {
!self.debugging_opts.parse_only && // The file is just being parsed
!self.debugging_opts.ls // The file is just being queried
pub const parse_lto: Option<&str> =
Some("either a boolean (`yes`, `no`, `on`, `off`, etc), `thin`, \
`fat`, or omitted");
- pub const parse_cross_lang_lto: Option<&str> =
+ pub const parse_linker_plugin_lto: Option<&str> =
Some("either a boolean (`yes`, `no`, `on`, `off`, etc), \
or the path to the linker plugin");
pub const parse_merge_functions: Option<&str> =
#[allow(dead_code)]
mod $mod_set {
- use super::{$struct_name, Passes, Sanitizer, LtoCli, CrossLangLto};
+ use super::{$struct_name, Passes, Sanitizer, LtoCli, LinkerPluginLto};
use rustc_target::spec::{LinkerFlavor, MergeFunctions, PanicStrategy, RelroLevel};
use std::path::PathBuf;
use std::str::FromStr;
true
}
- fn parse_cross_lang_lto(slot: &mut CrossLangLto, v: Option<&str>) -> bool {
+ fn parse_linker_plugin_lto(slot: &mut LinkerPluginLto, v: Option<&str>) -> bool {
if v.is_some() {
let mut bool_arg = None;
if parse_opt_bool(&mut bool_arg, v) {
*slot = if bool_arg.unwrap() {
- CrossLangLto::LinkerPluginAuto
+ LinkerPluginLto::LinkerPluginAuto
} else {
- CrossLangLto::Disabled
+ LinkerPluginLto::Disabled
};
return true
}
}
*slot = match v {
- None => CrossLangLto::LinkerPluginAuto,
- Some(path) => CrossLangLto::LinkerPlugin(PathBuf::from(path)),
+ None => LinkerPluginLto::LinkerPluginAuto,
+ Some(path) => LinkerPluginLto::LinkerPlugin(PathBuf::from(path)),
};
true
}
"allow the linker to link its default libraries"),
linker_flavor: Option<LinkerFlavor> = (None, parse_linker_flavor, [UNTRACKED],
"Linker flavor"),
+ linker_plugin_lto: LinkerPluginLto = (LinkerPluginLto::Disabled,
+ parse_linker_plugin_lto, [TRACKED],
+ "generate build artifacts that are compatible with linker-based LTO."),
+
}
options! {DebuggingOptions, DebuggingSetter, basic_debugging_options,
"show extended diagnostic help"),
continue_parse_after_error: bool = (false, parse_bool, [TRACKED],
"attempt to recover from parse errors (experimental)"),
+ dep_tasks: bool = (false, parse_bool, [UNTRACKED],
+ "print tasks that execute and the color their dep node gets (requires debug build)"),
incremental: Option<String> = (None, parse_opt_string, [UNTRACKED],
"enable incremental compilation (experimental)"),
incremental_queries: bool = (true, parse_bool, [UNTRACKED],
"make the current crate share its generic instantiations"),
chalk: bool = (false, parse_bool, [TRACKED],
"enable the experimental Chalk-based trait solving engine"),
- cross_lang_lto: CrossLangLto = (CrossLangLto::Disabled, parse_cross_lang_lto, [TRACKED],
- "generate build artifacts that are compatible with linker-based LTO."),
no_parallel_llvm: bool = (false, parse_bool, [UNTRACKED],
"don't run LLVM in parallel (while keeping codegen-units and ThinLTO)"),
no_leak_check: bool = (false, parse_bool, [UNTRACKED],
use std::path::PathBuf;
use std::collections::hash_map::DefaultHasher;
use super::{CrateType, DebugInfo, ErrorOutputType, OptLevel, OutputTypes,
- Passes, Sanitizer, LtoCli, CrossLangLto};
+ Passes, Sanitizer, LtoCli, LinkerPluginLto};
use syntax::feature_gate::UnstableFeatures;
use rustc_target::spec::{MergeFunctions, PanicStrategy, RelroLevel, TargetTriple};
use syntax::edition::Edition;
impl_dep_tracking_hash_via_hash!(Option<Sanitizer>);
impl_dep_tracking_hash_via_hash!(TargetTriple);
impl_dep_tracking_hash_via_hash!(Edition);
- impl_dep_tracking_hash_via_hash!(CrossLangLto);
+ impl_dep_tracking_hash_via_hash!(LinkerPluginLto);
impl_dep_tracking_hash_for_sortable_vec_of!(String);
impl_dep_tracking_hash_for_sortable_vec_of!(PathBuf);
#[cfg(test)]
mod tests {
- use crate::errors;
use getopts;
use crate::lint;
use crate::middle::cstore;
use crate::session::config::{build_configuration, build_session_options_and_crate_config};
- use crate::session::config::{LtoCli, CrossLangLto};
+ use crate::session::config::{LtoCli, LinkerPluginLto};
use crate::session::build_session;
use crate::session::search_paths::SearchPath;
use std::collections::{BTreeMap, BTreeSet};
opts = reference.clone();
opts.cg.panic = Some(PanicStrategy::Abort);
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
+
+ opts = reference.clone();
+ opts.cg.linker_plugin_lto = LinkerPluginLto::LinkerPluginAuto;
+ assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
}
#[test]
opts.debugging_opts.relro_level = Some(RelroLevel::Full);
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
- opts = reference.clone();
- opts.debugging_opts.cross_lang_lto = CrossLangLto::LinkerPluginAuto;
- assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
-
opts = reference.clone();
opts.debugging_opts.merge_functions = Some(MergeFunctions::Disabled);
assert!(reference.dep_tracking_hash() != opts.dep_tracking_hash());
use crate::lint::builtin::BuiltinLintDiagnostics;
use crate::middle::allocator::AllocatorKind;
use crate::middle::dependency_format;
-use crate::session::config::{OutputType, Lto};
+use crate::session::config::OutputType;
use crate::session::search_paths::{PathKind, SearchPath};
use crate::util::nodemap::{FxHashMap, FxHashSet};
use crate::util::common::{duration_to_secs_str, ErrorReported};
Ordering::SeqCst,
};
-use crate::errors::{self, DiagnosticBuilder, DiagnosticId, Applicability};
-use crate::errors::emitter::{Emitter, EmitterWriter};
+use errors::{DiagnosticBuilder, DiagnosticId, Applicability};
+use errors::emitter::{Emitter, EmitterWriter};
use syntax::ast::{self, NodeId};
use syntax::edition::Edition;
use syntax::feature_gate::{self, AttributeType};
pub mod search_paths;
pub struct OptimizationFuel {
- /// If -zfuel=crate=n is specified, initially set to n. Otherwise 0.
+ /// If `-zfuel=crate=n` is specified, initially set to `n`, otherwise `0`.
remaining: u64,
/// We're rejecting all further optimizations.
out_of_fuel: bool,
pub host: Target,
pub opts: config::Options,
pub host_tlib_path: SearchPath,
- /// This is `None` if the host and target are the same.
+ /// `None` if the host and target are the same.
pub target_tlib_path: Option<SearchPath>,
pub parse_sess: ParseSess,
pub sysroot: PathBuf,
/// The maximum length of types during monomorphization.
pub type_length_limit: Once<usize>,
- /// The maximum number of stackframes allowed in const eval
+ /// The maximum number of stackframes allowed in const eval.
pub const_eval_stack_frame_limit: usize,
/// The metadata::creader module may inject an allocator/panic_runtime
/// `-Zquery-dep-graph` is specified.
pub cgu_reuse_tracker: CguReuseTracker,
- /// Used by -Z profile-queries in util::common
+ /// Used by `-Z profile-queries` in `util::common`.
pub profile_channel: Lock<Option<mpsc::Sender<ProfileQueriesMsg>>>,
- /// Used by -Z self-profile
+ /// Used by `-Z self-profile`.
pub self_profiling_active: bool,
- /// Used by -Z self-profile
+ /// Used by `-Z self-profile`.
pub self_profiling: Lock<SelfProfiler>,
/// Some measurements that are being gathered during compilation.
next_node_id: OneThread<Cell<ast::NodeId>>,
- /// If -zfuel=crate=n is specified, Some(crate).
+ /// If `-zfuel=crate=n` is specified, `Some(crate)`.
optimization_fuel_crate: Option<String>,
- /// Tracks fuel info if If -zfuel=crate=n is specified
+ /// Tracks fuel info if `-zfuel=crate=n` is specified.
optimization_fuel: Lock<OptimizationFuel>,
// The next two are public because the driver needs to read them.
- /// If -zprint-fuel=crate, Some(crate).
+ /// If `-zprint-fuel=crate`, `Some(crate)`.
pub print_fuel_crate: Option<String>,
/// Always set to zero and incremented so that we can print fuel expended by a crate.
pub print_fuel: AtomicU64,
/// false positives about a job server in our environment.
pub jobserver: Client,
- /// Metadata about the allocators for the current crate being compiled
+ /// Metadata about the allocators for the current crate being compiled.
pub has_global_allocator: Once<bool>,
- /// Metadata about the panic handlers for the current crate being compiled
+ /// Metadata about the panic handlers for the current crate being compiled.
pub has_panic_handler: Once<bool>,
/// Cap lint level specified by a driver specifically.
}
pub struct PerfStats {
- /// The accumulated time spent on computing symbol hashes
+ /// The accumulated time spent on computing symbol hashes.
pub symbol_hash_time: Lock<Duration>,
- /// The accumulated time spent decoding def path tables from metadata
+ /// The accumulated time spent decoding def path tables from metadata.
pub decode_def_path_tables_time: Lock<Duration>,
/// Total number of values canonicalized queries constructed.
pub queries_canonicalized: AtomicUsize,
self.opts.debugging_opts.print_llvm_passes
}
- /// Get the features enabled for the current compilation session.
+ /// Gets the features enabled for the current compilation session.
/// DO NOT USE THIS METHOD if there is a TyCtxt available, as it circumvents
/// dependency tracking. Use tcx.features() instead.
#[inline]
self.opts.edition
}
- /// True if we cannot skip the PLT for shared library calls.
+ /// Returns `true` if we cannot skip the PLT for shared library calls.
pub fn needs_plt(&self) -> bool {
// Check if the current target usually needs PLT to be enabled.
// The user can use the command line flag to override it.
// If it is useful to have a Session available already for validating a
// commandline argument, you can do so here.
fn validate_commandline_args_with_session_available(sess: &Session) {
-
- if sess.opts.incremental.is_some() {
- match sess.lto() {
- Lto::Thin |
- Lto::Fat => {
- sess.err("can't perform LTO when compiling incrementally");
- }
- Lto::ThinLocal |
- Lto::No => {
- // This is fine
- }
- }
- }
-
// Since we don't know if code in an rlib will be linked to statically or
// dynamically downstream, rustc generates `__imp_` symbols that help the
// MSVC linker deal with this lack of knowledge (#27438). Unfortunately,
// bitcode during ThinLTO. Therefore we disallow dynamic linking on MSVC
// when compiling for LLD ThinLTO. This way we can validly just not generate
// the `dllimport` attributes and `__imp_` symbols in that case.
- if sess.opts.debugging_opts.cross_lang_lto.enabled() &&
+ if sess.opts.cg.linker_plugin_lto.enabled() &&
sess.opts.cg.prefer_dynamic &&
sess.target.target.options.is_like_msvc {
sess.err("Linker plugin based LTO is not supported together with \
AutoTraitFinder { tcx }
}
- /// Make a best effort to determine whether and under which conditions an auto trait is
+ /// Makes a best effort to determine whether and under which conditions an auto trait is
/// implemented for a type. For example, if you have
///
/// ```
full_env,
ty,
trait_did,
- ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID),
+ ObligationCause::misc(DUMMY_SP, hir::DUMMY_HIR_ID),
);
fulfill.select_all_or_error(&infcx).unwrap_or_else(|e| {
panic!(
user_env.caller_bounds.iter().cloned().collect();
let mut new_env = param_env.clone();
- let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID);
+ let dummy_cause = ObligationCause::misc(DUMMY_SP, hir::DUMMY_HIR_ID);
while let Some(pred) = predicates.pop_front() {
infcx.clear_caches();
select: &mut SelectionContext<'c, 'd, 'cx>,
only_projections: bool,
) -> bool {
- let dummy_cause = ObligationCause::misc(DUMMY_SP, ast::DUMMY_NODE_ID);
+ let dummy_cause = ObligationCause::misc(DUMMY_SP, hir::DUMMY_HIR_ID);
for (obligation, mut predicate) in nested
.map(|o| (o.clone(), o.predicate.clone()))
use crate::ty::subst::{Subst, Substs};
use crate::ty::fold::TypeFoldable;
-/// Attempts to resolve an obligation to a vtable.. The result is
-/// a shallow vtable resolution -- meaning that we do not
+/// Attempts to resolve an obligation to a vtable. The result is
+/// a shallow vtable resolution, meaning that we do not
/// (necessarily) resolve all nested obligations on the impl. Note
/// that type check should guarantee to us that all nested
/// obligations *could be* resolved if we wanted to.
-//! See rustc guide chapters on [trait-resolution] and [trait-specialization] for more info on how
+//! See Rustc Guide chapters on [trait-resolution] and [trait-specialization] for more info on how
//! this works.
//!
//! [trait-resolution]: https://rust-lang.github.io/rustc-guide/traits/resolution.html
pub impl_header: ty::ImplHeader<'tcx>,
pub intercrate_ambiguity_causes: Vec<IntercrateAmbiguityCause>,
- /// True if the overlap might've been permitted before the shift
+ /// `true` if the overlap might've been permitted before the shift
/// to universes.
pub involves_placeholder: bool,
}
-pub fn add_placeholder_note(err: &mut crate::errors::DiagnosticBuilder<'_>) {
+pub fn add_placeholder_note(err: &mut errors::DiagnosticBuilder<'_>) {
err.note(&format!(
"this behavior recently changed as a result of a bug fix; \
see rust-lang/rust#56105 for details"
}
/// Can both impl `a` and impl `b` be satisfied by a common type (including
-/// `where` clauses)? If so, returns an `ImplHeader` that unifies the two impls.
+/// where-clauses)? If so, returns an `ImplHeader` that unifies the two impls.
fn overlap<'cx, 'gcx, 'tcx>(
selcx: &mut SelectionContext<'cx, 'gcx, 'tcx>,
a_def_id: DefId,
}
/// Checks the coherence orphan rules. `impl_def_id` should be the
-/// def-id of a trait impl. To pass, either the trait must be local, or else
+/// `DefId` of a trait impl. To pass, either the trait must be local, or else
/// two conditions must be satisfied:
///
/// 1. All type parameters in `Self` must be "covered" by some local type constructor.
orphan_check_trait_ref(tcx, trait_ref, InCrate::Local)
}
-/// Check whether a trait-ref is potentially implementable by a crate.
+/// Checks whether a trait-ref is potentially implementable by a crate.
///
/// The current rule is that a trait-ref orphan checks in a crate C:
///
Overflow,
};
-use crate::errors::{Applicability, DiagnosticBuilder};
use crate::hir;
use crate::hir::Node;
use crate::hir::def_id::DefId;
use crate::infer::{self, InferCtxt};
use crate::infer::type_variable::TypeVariableOrigin;
-use std::fmt;
-use syntax::ast;
use crate::session::DiagnosticMessageId;
use crate::ty::{self, AdtKind, ToPredicate, ToPolyTraitRef, Ty, TyCtxt, TypeFoldable};
use crate::ty::GenericParamDefKind;
use crate::ty::SubtypePredicate;
use crate::util::nodemap::{FxHashMap, FxHashSet};
+use errors::{Applicability, DiagnosticBuilder};
+use std::fmt;
+use syntax::ast;
use syntax_pos::{DUMMY_SP, Span, ExpnInfo, ExpnFormat};
impl<'a, 'gcx, 'tcx> InferCtxt<'a, 'gcx, 'tcx> {
}
- /// Get the parent trait chain start
+ /// Gets the parent trait chain start
fn get_parent_trait_ref(&self, code: &ObligationCauseCode<'tcx>) -> Option<String> {
match code {
&ObligationCauseCode::BuiltinDerivedObligation(ref data) => {
}
}
- /// Returns whether the trait predicate may apply for *some* assignment
+ /// Returns `true` if the trait predicate may apply for *some* assignment
/// to the type parameters.
fn predicate_can_apply(&self,
param_env: ty::ParamEnv<'tcx>,
fn as_predicate(&self) -> &Self::Predicate { &self.obligation.predicate }
}
-/// The fulfillment context is used to drive trait resolution. It
+/// The fulfillment context is used to drive trait resolution. It
/// consists of a list of obligations that must be (eventually)
/// satisfied. The job is to track which are satisfied, which yielded
/// errors, and which are still pending. At any point, users can call
/// creating a fresh type variable `$0` as well as a projection
/// predicate `<SomeType as SomeTrait>::X == $0`. When the
/// inference engine runs, it will attempt to find an impl of
- /// `SomeTrait` or a where clause that lets us unify `$0` with
+ /// `SomeTrait` or a where-clause that lets us unify `$0` with
/// something concrete. If this fails, we'll unify `$0` with
/// `projection_ty` again.
fn normalize_projection_type<'a, 'gcx>(&mut self,
}
}
-/// Return the set of type variables contained in a trait ref
+/// Returns the set of type variables contained in a trait ref
fn trait_ref_type_vars<'a, 'gcx, 'tcx>(selcx: &mut SelectionContext<'a, 'gcx, 'tcx>,
t: ty::PolyTraitRef<'tcx>) -> Vec<Ty<'tcx>>
{
pub use self::SelectionError::*;
pub use self::Vtable::*;
-// Whether to enable bug compatibility with issue #43355
+/// Whether to enable bug compatibility with issue #43355.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum IntercrateMode {
Issue43355,
Fixed
}
-// The mode that trait queries run in
+/// The mode that trait queries run in.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
pub enum TraitQueryMode {
// Standard/un-canonicalized queries get accurate
Canonical,
}
-/// An `Obligation` represents some trait reference (e.g., `int:Eq`) for
-/// which the vtable must be found. The process of finding a vtable is
+/// An `Obligation` represents some trait reference (e.g., `int: Eq`) for
+/// which the vtable must be found. The process of finding a vtable is
/// called "resolving" the `Obligation`. This process consists of
/// either identifying an `impl` (e.g., `impl Eq for int`) that
/// provides the required vtable, or else finding a bound that is in
/// scope. The eventual result is usually a `Selection` (defined below).
#[derive(Clone, PartialEq, Eq, Hash)]
pub struct Obligation<'tcx, T> {
- /// Why do we have to prove this thing?
+ /// The reason we have to prove this thing.
pub cause: ObligationCause<'tcx>,
- /// In which environment should we prove this thing?
+ /// The environment in which we should prove this thing.
pub param_env: ty::ParamEnv<'tcx>,
- /// What are we trying to prove?
+ /// The thing we are trying to prove.
pub predicate: T,
/// If we started proving this as a result of trying to prove
/// something else, track the total depth to ensure termination.
/// If this goes over a certain threshold, we abort compilation --
/// in such cases, we can not say whether or not the predicate
- /// holds for certain. Stupid halting problem. Such a drag.
+ /// holds for certain. Stupid halting problem; such a drag.
pub recursion_depth: usize,
}
pub type PredicateObligation<'tcx> = Obligation<'tcx, ty::Predicate<'tcx>>;
pub type TraitObligation<'tcx> = Obligation<'tcx, ty::PolyTraitPredicate<'tcx>>;
-/// Why did we incur this obligation? Used for error reporting.
+/// The reason why we incurred this obligation; used for error reporting.
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ObligationCause<'tcx> {
pub span: Span,
- // The id of the fn body that triggered this obligation. This is
- // used for region obligations to determine the precise
- // environment in which the region obligation should be evaluated
- // (in particular, closures can add new assumptions). See the
- // field `region_obligations` of the `FulfillmentContext` for more
- // information.
- pub body_id: ast::NodeId,
+ /// The ID of the fn body that triggered this obligation. This is
+ /// used for region obligations to determine the precise
+ /// environment in which the region obligation should be evaluated
+ /// (in particular, closures can add new assumptions). See the
+ /// field `region_obligations` of the `FulfillmentContext` for more
+ /// information.
+ pub body_id: hir::HirId,
pub code: ObligationCauseCode<'tcx>
}
ObligationCauseCode::StartFunctionType => {
tcx.sess.source_map().def_span(self.span)
}
+ ObligationCauseCode::MatchExpressionArm { arm_span, .. } => arm_span,
_ => self.span,
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum ObligationCauseCode<'tcx> {
- /// Not well classified or should be obvious from span.
+ /// Not well classified or should be obvious from the span.
MiscObligation,
- /// A slice or array is WF only if `T: Sized`
+ /// A slice or array is WF only if `T: Sized`.
SliceOrArrayElem,
- /// A tuple is WF only if its middle elements are Sized
+ /// A tuple is WF only if its middle elements are `Sized`.
TupleElem,
- /// This is the trait reference from the given projection
+ /// This is the trait reference from the given projection.
ProjectionWf(ty::ProjectionTy<'tcx>),
- /// In an impl of trait X for type Y, type Y must
- /// also implement all supertraits of X.
+ /// In an impl of trait `X` for type `Y`, type `Y` must
+ /// also implement all supertraits of `X`.
ItemObligation(DefId),
/// A type like `&'a T` is WF only if `T: 'a`.
MatchExpressionArm {
arm_span: Span,
source: hir::MatchSource,
+ prior_arms: Vec<Span>,
+ last_ty: Ty<'tcx>,
},
/// Computing common supertype in the pattern guard for the arms of a match expression
/// directly.
parent_trait_ref: ty::PolyTraitRef<'tcx>,
- /// The parent trait had this cause
+ /// The parent trait had this cause.
parent_code: Rc<ObligationCauseCode<'tcx>>
}
pub type TraitObligations<'tcx> = Vec<TraitObligation<'tcx>>;
/// The following types:
-/// * `WhereClause`
-/// * `WellFormed`
-/// * `FromEnv`
-/// * `DomainGoal`
-/// * `Goal`
-/// * `Clause`
-/// * `Environment`
-/// * `InEnvironment`
+/// * `WhereClause`,
+/// * `WellFormed`,
+/// * `FromEnv`,
+/// * `DomainGoal`,
+/// * `Goal`,
+/// * `Clause`,
+/// * `Environment`,
+/// * `InEnvironment`,
/// are used for representing the trait system in the form of
/// logic programming clauses. They are part of the interface
/// for the chalk SLG solver.
/// with the goal to solve and proceeds from there).
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug)]
pub struct ProgramClause<'tcx> {
- /// This goal will be considered true...
+ /// This goal will be considered true ...
pub goal: DomainGoal<'tcx>,
- /// ...if we can prove these hypotheses (there may be no hypotheses at all):
+ /// ... if we can prove these hypotheses (there may be no hypotheses at all):
pub hypotheses: Goals<'tcx>,
/// Useful for filtering clauses.
/// For example, the vtable may be tied to a specific impl (case A),
/// or it may be relative to some bound that is in scope (case B).
///
-///
/// ```
/// impl<T:Clone> Clone<T> for Option<T> { ... } // Impl_1
/// impl<T:Clone> Clone<T> for Box<T> { ... } // Impl_2
/// Vtable identifying a particular impl.
VtableImpl(VtableImplData<'tcx, N>),
- /// Vtable for auto trait implementations
+ /// Vtable for auto trait implementations.
/// This carries the information and nested obligations with regards
/// to an auto implementation for a trait `Trait`. The nested obligations
/// ensure the trait implementation holds for all the constituent types.
/// any).
VtableParam(Vec<N>),
- /// Virtual calls through an object
+ /// Virtual calls through an object.
VtableObject(VtableObjectData<'tcx, N>),
/// Successful resolution for a builtin trait.
VtableBuiltin(VtableBuiltinData<N>),
- /// Vtable automatically generated for a closure. The def ID is the ID
+ /// Vtable automatically generated for a closure. The `DefId` is the ID
/// of the closure expression. This is a `VtableImpl` in spirit, but the
/// impl is generated by the compiler and does not appear in the source.
VtableClosure(VtableClosureData<'tcx, N>),
- /// Same as above, but for a fn pointer type with the given signature.
+ /// Same as above, but for a function pointer type with the given signature.
VtableFnPointer(VtableFnPointerData<'tcx, N>),
/// Vtable automatically generated for a generator.
};
let obligation = Obligation {
param_env,
- cause: ObligationCause::misc(span, ast::DUMMY_NODE_ID),
+ cause: ObligationCause::misc(span, hir::DUMMY_HIR_ID),
recursion_depth: 0,
predicate: trait_ref.to_predicate(),
};
// We can use a dummy node-id here because we won't pay any mind
// to region obligations that arise (there shouldn't really be any
// anyhow).
- let cause = ObligationCause::misc(span, ast::DUMMY_NODE_ID);
+ let cause = ObligationCause::misc(span, hir::DUMMY_HIR_ID);
fulfill_cx.register_bound(infcx, param_env, ty, def_id, cause);
}
pub fn misc(span: Span,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
trait_ref: O)
-> Obligation<'tcx, O> {
impl<'tcx> ObligationCause<'tcx> {
#[inline]
pub fn new(span: Span,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
code: ObligationCauseCode<'tcx>)
-> ObligationCause<'tcx> {
- ObligationCause { span: span, body_id: body_id, code: code }
+ ObligationCause { span, body_id, code }
}
- pub fn misc(span: Span, body_id: ast::NodeId) -> ObligationCause<'tcx> {
- ObligationCause { span: span, body_id: body_id, code: MiscObligation }
+ pub fn misc(span: Span, body_id: hir::HirId) -> ObligationCause<'tcx> {
+ ObligationCause { span, body_id, code: MiscObligation }
}
pub fn dummy() -> ObligationCause<'tcx> {
- ObligationCause { span: DUMMY_SP, body_id: ast::CRATE_NODE_ID, code: MiscObligation }
+ ObligationCause { span: DUMMY_SP, body_id: hir::CRATE_HIR_ID, code: MiscObligation }
}
}
//! - have a suitable receiver from which we can extract a vtable and coerce to a "thin" version
//! that doesn't contain the vtable;
//! - not reference the erased type `Self` except for in this receiver;
-//! - not have generic type parameters
+//! - not have generic type parameters.
use super::elaborate_predicates;
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash)]
pub enum ObjectSafetyViolation {
- /// Self : Sized declared on the trait
+ /// `Self: Sized` declared on the trait.
SizedSelf,
/// Supertrait reference references `Self` an in illegal location
- /// (e.g., `trait Foo : Bar<Self>`)
+ /// (e.g., `trait Foo : Bar<Self>`).
SupertraitSelf,
- /// Method has something illegal
+ /// Method has something illegal.
Method(ast::Name, MethodViolationCode),
- /// Associated const
+ /// Associated const.
AssociatedConst(ast::Name),
}
impl<'a, 'tcx> TyCtxt<'a, 'tcx, 'tcx> {
/// Returns the object safety violations that affect
- /// astconv - currently, Self in supertraits. This is needed
+ /// astconv -- currently, `Self` in supertraits. This is needed
/// because `object_safety_violations` can't be used during
/// type collection.
pub fn astconv_object_safety_violations(self, trait_def_id: DefId)
None
}
- /// performs a type substitution to produce the version of receiver_ty when `Self = self_ty`
- /// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`
+ /// Performs a type substitution to produce the version of receiver_ty when `Self = self_ty`
+ /// e.g., for receiver_ty = `Rc<Self>` and self_ty = `Foo`, returns `Rc<Foo>`.
fn receiver_for_self_ty(
self, receiver_ty: Ty<'tcx>, self_ty: Ty<'tcx>, method_def_id: DefId
) -> Ty<'tcx> {
result
}
- /// creates the object type for the current trait. For example,
+ /// Creates the object type for the current trait. For example,
/// if the current trait is `Deref`, then this will be
- /// `dyn Deref<Target=Self::Target> + 'static`
+ /// `dyn Deref<Target = Self::Target> + 'static`.
fn object_ty_for_trait(self, trait_def_id: DefId, lifetime: ty::Region<'tcx>) -> Ty<'tcx> {
debug!("object_ty_for_trait: trait_def_id={:?}", trait_def_id);
object_ty
}
- /// checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a
+ /// Checks the method's receiver (the `self` argument) can be dispatched on when `Self` is a
/// trait object. We require that `DispatchableFromDyn` be implemented for the receiver type
/// in the following way:
- /// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>`
+ /// - let `Receiver` be the type of the `self` argument, i.e `Self`, `&Self`, `Rc<Self>`,
/// - require the following bound:
///
- /// Receiver[Self => T]: DispatchFromDyn<Receiver[Self => dyn Trait]>
+ /// ```
+ /// Receiver[Self => T]: DispatchFromDyn<Receiver[Self => dyn Trait]>
+ /// ```
///
- /// where `Foo[X => Y]` means "the same type as `Foo`, but with `X` replaced with `Y`"
+ /// where `Foo[X => Y]` means "the same type as `Foo`, but with `X` replaced with `Y`"
/// (substitution notation).
///
- /// some examples of receiver types and their required obligation
- /// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>`
- /// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>`
- /// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>`
+ /// Some examples of receiver types and their required obligation:
+ /// - `&'a mut self` requires `&'a mut Self: DispatchFromDyn<&'a mut dyn Trait>`,
+ /// - `self: Rc<Self>` requires `Rc<Self>: DispatchFromDyn<Rc<dyn Trait>>`,
+ /// - `self: Pin<Box<Self>>` requires `Pin<Box<Self>>: DispatchFromDyn<Pin<Box<dyn Trait>>>`.
///
/// The only case where the receiver is not dispatchable, but is still a valid receiver
/// type (just not object-safe), is when there is more than one level of pointer indirection.
- /// e.g., `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there
+ /// E.g., `self: &&Self`, `self: &Rc<Self>`, `self: Box<Box<Self>>`. In these cases, there
/// is no way, or at least no inexpensive way, to coerce the receiver from the version where
/// `Self = dyn Trait` to the version where `Self = T`, where `T` is the unknown erased type
/// contained by the trait object, because the object that needs to be coerced is behind
/// Also, `impl Trait` is normalized to the concrete type,
/// which has to be already collected by type-checking.
///
- /// NOTE: As `impl Trait`'s concrete type should *never*
+ /// NOTE: as `impl Trait`'s concrete type should *never*
/// be observable directly by the user, `Reveal::All`
/// should not be used by checks which may expose
/// type equality or type contents to the user.
///
/// Concern #2. Even within the snapshot, if those original
/// obligations are not yet proven, then we are able to do projections
-/// that may yet turn out to be wrong. This *may* lead to some sort
+/// that may yet turn out to be wrong. This *may* lead to some sort
/// of trouble, though we don't have a concrete example of how that
-/// can occur yet. But it seems risky at best.
+/// can occur yet. But it seems risky at best.
fn get_paranoid_cache_value_obligation<'a, 'gcx, 'tcx>(
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
/// cycles to arise, where you basically had a setup like `<MyType<$0>
/// as Trait>::Foo == $0`. Here, normalizing `<MyType<$0> as
/// Trait>::Foo> to `[type error]` would lead to an obligation of
-/// `<MyType<[type error]> as Trait>::Foo`. We are supposed to report
+/// `<MyType<[type error]> as Trait>::Foo`. We are supposed to report
/// an error for this obligation, but we legitimately should not,
/// because it contains `[type error]`. Yuck! (See issue #29857 for
/// one case where this arose.)
}
}
-/// Compute the result of a projection type (if we can).
+/// Computes the result of a projection type (if we can).
///
/// IMPORTANT:
/// - `obligation` must be fully normalized
// # Cache
/// The projection cache. Unlike the standard caches, this can include
-/// infcx-dependent type variables - therefore, we have to roll the
+/// infcx-dependent type variables, therefore we have to roll the
/// cache back each time we roll a snapshot back, to avoid assumptions
/// on yet-unresolved inference variables. Types with placeholder
/// regions also have to be removed when the respective snapshot ends.
/// (for the lifetime of the infcx).
///
/// Entries in the projection cache might contain inference variables
-/// that will be resolved by obligations on the projection cache entry - e.g.
+/// that will be resolved by obligations on the projection cache entry (e.g.,
/// when a type parameter in the associated type is constrained through
-/// an "RFC 447" projection on the impl.
+/// an "RFC 447" projection on the impl).
///
/// When working with a fulfillment context, the derived obligations of each
/// projection cache entry will be registered on the fulfillcx, so any users
/// If that is done, after evaluation the obligations, it is a good idea to
/// call `ProjectionCache::complete` to make sure the obligations won't be
/// re-evaluated and avoid an exponential worst-case.
-///
-/// FIXME: we probably also want some sort of cross-infcx cache here to
-/// reduce the amount of duplication. Let's see what we get with the Chalk
-/// reforms.
+//
+// FIXME: we probably also want some sort of cross-infcx cache here to
+// reduce the amount of duplication. Let's see what we get with the Chalk reforms.
#[derive(Default)]
pub struct ProjectionCache<'tcx> {
map: SnapshotMap<ProjectionCacheKey<'tcx>, ProjectionCacheEntry<'tcx>>,
/// outlive. This is similar but not *quite* the same as the
/// `needs_drop` test in the compiler already -- that is, for every
/// type T for which this function return true, needs-drop would
-/// return false. But the reverse does not hold: in particular,
+/// return `false`. But the reverse does not hold: in particular,
/// `needs_drop` returns false for `PhantomData`, but it is not
/// trivial for dropck-outlives.
///
/// the normalized value along with various outlives relations (in
/// the form of obligations that must be discharged).
///
- /// NB. This will *eventually* be the main means of
+ /// N.B., this will *eventually* be the main means of
/// normalizing, but for now should be used only when we actually
/// know that normalization will succeed, since error reporting
/// and other details are still "under development".
/// a `T` (with regions erased). This is appropriate when the
/// binder is being instantiated at the call site.
///
- /// NB. Currently, higher-ranked type bounds inhibit
+ /// N.B., currently, higher-ranked type bounds inhibit
/// normalization. Therefore, each time we erase them in
/// codegen, we need to normalize the contents.
pub fn normalize_erasing_late_bound_regions<T>(
use crate::infer::InferCtxt;
use crate::infer::canonical::OriginalQueryValues;
-use syntax::ast;
+use crate::hir;
use syntax::source_map::Span;
use crate::traits::{FulfillmentContext, ObligationCause, TraitEngine, TraitEngineExt};
use crate::traits::query::NoSolution;
/// Outlives bounds are relationships between generic parameters,
/// whether they both be regions (`'a: 'b`) or whether types are
-/// involved (`T: 'a`). These relationships can be extracted from the
+/// involved (`T: 'a`). These relationships can be extracted from the
/// full set of predicates we understand or also from types (in which
/// case they are called implied bounds). They are fed to the
/// `OutlivesEnv` which in turn is supplied to the region checker and
impl<'cx, 'gcx, 'tcx> InferCtxt<'cx, 'gcx, 'tcx> {
/// Implied bounds are region relationships that we deduce
- /// automatically. The idea is that (e.g.) a caller must check that a
+ /// automatically. The idea is that (e.g.) a caller must check that a
/// function's argument types are well-formed immediately before
/// calling that fn, and hence the *callee* can assume that its
/// argument types are well-formed. This may imply certain relationships
pub fn implied_outlives_bounds(
&self,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
ty: Ty<'tcx>,
span: Span,
) -> Vec<OutlivesBound<'tcx>> {
canonicalized: Canonicalized<'gcx, ParamEnvAnd<'tcx, Normalize<Self>>>,
) -> Fallible<CanonicalizedQueryResponse<'gcx, Self>>;
- /// Convert from the `'gcx` (lifted) form of `Self` into the `tcx`
+ /// Converts from the `'gcx` (lifted) form of `Self` into the `tcx`
/// form of `Self`.
fn shrink_to_tcx_lifetime(
v: &'a CanonicalizedQueryResponse<'gcx, Self>,
/// See #23980 for details.
pub fn add_intercrate_ambiguity_hint<'a, 'tcx>(
&self,
- err: &mut crate::errors::DiagnosticBuilder<'_>,
+ err: &mut errors::DiagnosticBuilder<'_>,
) {
err.note(&self.intercrate_ambiguity_hint());
}
}
/// The selection process begins by considering all impls, where
-/// clauses, and so forth that might resolve an obligation. Sometimes
+/// clauses, and so forth that might resolve an obligation. Sometimes
/// we'll be able to say definitively that (e.g.) an impl does not
/// apply to the obligation: perhaps it is defined for `usize` but the
/// obligation is for `int`. In that case, we drop the impl out of the
-/// list. But the other cases are considered *candidates*.
+/// list. But the other cases are considered *candidates*.
///
/// For selection to succeed, there must be exactly one matching
/// candidate. If the obligation is fully known, this is guaranteed
/// - `EvaluatedToErr` implies `EvaluatedToRecur`
/// - the "union" of evaluation results is equal to their maximum -
/// all the "potential success" candidates can potentially succeed,
-/// so they are no-ops when unioned with a definite error, and within
+/// so they are noops when unioned with a definite error, and within
/// the categories it's easy to see that the unions are correct.
pub enum EvaluationResult {
/// Evaluation successful
/// ```
///
/// When we try to prove it, we first go the first option, which
- /// recurses. This shows us that the impl is "useless" - it won't
+ /// recurses. This shows us that the impl is "useless" -- it won't
/// tell us that `T: Trait` unless it already implemented `Trait`
/// by some other means. However, that does not prevent `T: Trait`
/// does not hold, because of the bound (which can indeed be satisfied
/// by `SomeUnsizedType` from another crate).
- ///
- /// FIXME: when an `EvaluatedToRecur` goes past its parent root, we
- /// ought to convert it to an `EvaluatedToErr`, because we know
- /// there definitely isn't a proof tree for that obligation. Not
- /// doing so is still sound - there isn't any proof tree, so the
- /// branch still can't be a part of a minimal one - but does not
- /// re-enable caching.
+ //
+ // FIXME: when an `EvaluatedToRecur` goes past its parent root, we
+ // ought to convert it to an `EvaluatedToErr`, because we know
+ // there definitely isn't a proof tree for that obligation. Not
+ // doing so is still sound -- there isn't any proof tree, so the
+ // branch still can't be a part of a minimal one -- but does not re-enable caching.
EvaluatedToRecur,
- /// Evaluation failed
+ /// Evaluation failed.
EvaluatedToErr,
}
impl EvaluationResult {
- /// True if this evaluation result is known to apply, even
+ /// Returns `true` if this evaluation result is known to apply, even
/// considering outlives constraints.
pub fn must_apply_considering_regions(self) -> bool {
self == EvaluatedToOk
}
- /// True if this evaluation result is known to apply, ignoring
+ /// Returns `true` if this evaluation result is known to apply, ignoring
/// outlives constraints.
pub fn must_apply_modulo_regions(self) -> bool {
self <= EvaluatedToOkModuloRegions
/// that recursion is ok. This routine returns true if the top of the
/// stack (`cycle[0]`):
///
- /// - is a defaulted trait, and
- /// - it also appears in the backtrace at some position `X`; and,
+ /// - is a defaulted trait,
+ /// - it also appears in the backtrace at some position `X`,
/// - all the predicates at positions `X..` between `X` an the top are
/// also defaulted traits.
pub fn coinductive_match<I>(&mut self, cycle: I) -> bool
}
/// Further evaluate `candidate` to decide whether all type parameters match and whether nested
- /// obligations are met. Returns true if `candidate` remains viable after this further
+ /// obligations are met. Returns whether `candidate` remains viable after this further
/// scrutiny.
fn evaluate_candidate<'o>(
&mut self,
}
}
- /// Returns true if the global caches can be used.
+ /// Returns `true` if the global caches can be used.
/// Do note that if the type itself is not in the
/// global tcx, the local caches will be used.
fn can_use_global_caches(&self, param_env: ty::ParamEnv<'tcx>) -> bool {
Ok(())
}
- /// Check for the artificial impl that the compiler will create for an obligation like `X :
+ /// Checks for the artificial impl that the compiler will create for an obligation like `X :
/// FnMut<..>` where `X` is a closure type.
///
/// Note: the type parameters on a closure candidate are modeled as *output* type
// type variables and then we also attempt to evaluate recursive
// bounds to see if they are satisfied.
- /// Returns true if `victim` should be dropped in favor of
- /// `other`. Generally speaking we will drop duplicate
+ /// Returns `true` if `victim` should be dropped in favor of
+ /// `other`. Generally speaking we will drop duplicate
/// candidates and prefer where-clause candidates.
///
/// See the comment for "SelectionCandidate" for more details.
/// we currently treat the input type parameters on the trait as
/// outputs. This means that when we have a match we have only
/// considered the self type, so we have to go back and make sure
- /// to relate the argument types too. This is kind of wrong, but
+ /// to relate the argument types too. This is kind of wrong, but
/// since we control the full set of impls, also not that wrong,
/// and it DOES yield better error messages (since we don't report
/// errors as if there is no applicable impl, but rather report
/// impl Fn(int) for Closure { ... }
///
/// Now imagine our obligation is `Fn(usize) for Closure`. So far
- /// we have matched the self-type `Closure`. At this point we'll
+ /// we have matched the self type `Closure`. At this point we'll
/// compare the `int` to `usize` and generate an error.
///
/// Note that this checking occurs *after* the impl has selected,
}
/// Normalize `where_clause_trait_ref` and try to match it against
- /// `obligation`. If successful, return any predicates that
+ /// `obligation`. If successful, return any predicates that
/// result from the normalization. Normalization is necessary
/// because where-clauses are stored in the parameter environment
/// unnormalized.
/// Suppose we have selected "source impl" with `V` instantiated with `u32`.
/// This function will produce a substitution with `T` and `U` both mapping to `u32`.
///
-/// Where clauses add some trickiness here, because they can be used to "define"
+/// where-clauses add some trickiness here, because they can be used to "define"
/// an argument indirectly:
///
/// ```rust
/// impl<'a, I, T: 'a> Iterator for Cloned<I>
-/// where I: Iterator<Item=&'a T>, T: Clone
+/// where I: Iterator<Item = &'a T>, T: Clone
/// ```
///
/// In a case like this, the substitution for `T` is determined indirectly,
}
}
-/// Is impl1 a specialization of impl2?
+/// Is `impl1` a specialization of `impl2`?
///
/// Specialization is determined by the sets of types to which the impls apply;
-/// impl1 specializes impl2 if it applies to a subset of the types impl2 applies
+/// `impl1` specializes `impl2` if it applies to a subset of the types `impl2` applies
/// to.
pub(super) fn specializes<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
(impl1_def_id, impl2_def_id): (DefId, DefId))
}
}
- /// Remove an impl from this set of children. Used when replacing
+ /// Removes an impl from this set of children. Used when replacing
/// an impl with a parent. The impl must be present in the list of
/// children already.
fn remove_existing(&mut self,
self.children.entry(parent).or_default().insert_blindly(tcx, child);
}
- /// The parent of a given impl, which is the def id of the trait when the
+ /// The parent of a given impl, which is the `DefId` of the trait when the
/// impl is a "specialization root".
pub fn parent(&self, child: DefId) -> DefId {
*self.parent.get(&child).unwrap()
trait_item_def_id,
}),
super::ExprAssignable => Some(super::ExprAssignable),
- super::MatchExpressionArm { arm_span, source } => Some(super::MatchExpressionArm {
+ super::MatchExpressionArm {
arm_span,
- source: source,
- }),
+ source,
+ ref prior_arms,
+ last_ty,
+ } => {
+ tcx.lift(&last_ty).map(|last_ty| {
+ super::MatchExpressionArm {
+ arm_span,
+ source,
+ prior_arms: prior_arms.clone(),
+ last_ty,
+ }
+ })
+ }
super::MatchExpressionArmPattern { span, ty } => {
tcx.lift(&ty).map(|ty| super::MatchExpressionArmPattern { span, ty })
}
/// Here the pointer will be dereferenced N times (where a dereference can
/// happen to raw or borrowed pointers or any smart pointer which implements
/// Deref, including Box<_>). The types of dereferences is given by
-/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
+/// `autoderefs`. It can then be auto-referenced zero or one times, indicated
/// by `autoref`, to either a raw or borrowed pointer. In these cases unsize is
/// `false`.
///
/// stored in `unsize` is `Foo<[i32]>`, we don't store any further detail about
/// the underlying conversions from `[i32; 4]` to `[i32]`.
///
-/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In
+/// 3. Coercing a `Box<T>` to `Box<dyn Trait>` is an interesting special case. In
/// that case, we have the pointer we need coming in, so there are no
/// autoderefs, and no autoref. Instead we just do the `Unsize` transformation.
/// At some point, of course, `Box` should move out of the compiler, in which
/// This will do things like convert thin pointers to fat
/// pointers, or convert structs containing thin pointers to
/// structs containing fat pointers, or convert between fat
- /// pointers. We don't store the details of how the transform is
+ /// pointers. We don't store the details of how the transform is
/// done (in fact, we don't know that, because it might depend on
/// the precise type parameters). We just store the target
/// type. Codegen backends and miri figure out what has to be done
}
/// At least for initial deployment, we want to limit two-phase borrows to
-/// only a few specific cases. Right now, those mostly "things that desugar"
-/// into method calls
-/// - using x.some_method() syntax, where some_method takes &mut self
-/// - using Foo::some_method(&mut x, ...) syntax
-/// - binary assignment operators (+=, -=, *=, etc.)
-/// Anything else should be rejected until generalized two phase borrow support
+/// only a few specific cases. Right now, those are mostly "things that desugar"
+/// into method calls:
+/// - using `x.some_method()` syntax, where some_method takes `&mut self`,
+/// - using `Foo::some_method(&mut x, ...)` syntax,
+/// - binary assignment operators (`+=`, `-=`, `*=`, etc.).
+/// Anything else should be rejected until generalized two-phase borrow support
/// is implemented. Right now, dataflow can't handle the general case where there
/// is more than one use of a mutable borrow, and we don't want to accept too much
/// new code via two-phase borrows, so we try to limit where we create two-phase
#[derive(Copy, Clone, PartialEq, Debug, RustcEncodable, RustcDecodable)]
pub enum AutoBorrow<'tcx> {
- /// Convert from T to &T.
+ /// Converts from T to &T.
Ref(ty::Region<'tcx>, AutoBorrowMutability),
- /// Convert from T to *T.
+ /// Converts from T to *T.
RawPtr(hir::Mutability),
}
}
}
- /// Returns true if this function must conform to `min_const_fn`
+ /// Returns `true` if this function must conform to `min_const_fn`
pub fn is_min_const_fn(self, def_id: DefId) -> bool {
// Bail out if the signature doesn't contain `const`
if !self.is_const_fn_raw(def_id) {
-//! type context book-keeping
+//! Type context book-keeping.
use crate::dep_graph::DepGraph;
use crate::dep_graph::{self, DepNode, DepConstructor};
-use crate::errors::DiagnosticBuilder;
use crate::session::Session;
use crate::session::config::{BorrowckMode, OutputFilenames};
use crate::session::config::CrateType;
use crate::ty::CanonicalPolyFnSig;
use crate::util::nodemap::{DefIdMap, DefIdSet, ItemLocalMap};
use crate::util::nodemap::{FxHashMap, FxHashSet};
+use errors::DiagnosticBuilder;
use rustc_data_structures::interner::HashInterner;
use smallvec::SmallVec;
use rustc_data_structures::stable_hasher::{HashStable, hash_stable_hashmap,
/// belongs, but it may not exist if it's a tuple field (`tuple.0`).
field_indices: ItemLocalMap<usize>,
- /// Stores the types for various nodes in the AST. Note that this table
- /// is not guaranteed to be populated until after typeck. See
+ /// Stores the types for various nodes in the AST. Note that this table
+ /// is not guaranteed to be populated until after typeck. See
/// typeck::check::fn_ctxt for details.
node_types: ItemLocalMap<Ty<'tcx>>,
/// Stores the type parameters which were substituted to obtain the type
- /// of this node. This only applies to nodes that refer to entities
+ /// of this node. This only applies to nodes that refer to entities
/// parameterized by type parameters, such as generic fns, types, or
/// other items.
node_substs: ItemLocalMap<&'tcx Substs<'tcx>>,
pub tainted_by_errors: bool,
/// Stores the free-region relationships that were deduced from
- /// its where clauses and parameter types. These are then
+ /// its where-clauses and parameter types. These are then
/// read-again by borrowck.
pub free_region_map: FreeRegionMap<'tcx>,
}
}
- pub fn node_id_to_type(&self, id: hir::HirId) -> Ty<'tcx> {
- self.node_id_to_type_opt(id).unwrap_or_else(||
- bug!("node_id_to_type: no type for node `{}`",
- tls::with(|tcx| {
- let id = tcx.hir().hir_to_node_id(id);
- tcx.hir().node_to_string(id)
- }))
+ pub fn node_type(&self, id: hir::HirId) -> Ty<'tcx> {
+ self.node_type_opt(id).unwrap_or_else(||
+ bug!("node_type: no type for node `{}`",
+ tls::with(|tcx| tcx.hir().hir_to_string(id)))
)
}
- pub fn node_id_to_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
+ pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
validate_hir_id_for_typeck_tables(self.local_id_root, id, false);
self.node_types.get(&id.local_id).cloned()
}
// Returns the type of a pattern as a monotype. Like @expr_ty, this function
// doesn't provide type parameter substitutions.
pub fn pat_ty(&self, pat: &hir::Pat) -> Ty<'tcx> {
- self.node_id_to_type(pat.hir_id)
+ self.node_type(pat.hir_id)
}
pub fn pat_ty_opt(&self, pat: &hir::Pat) -> Option<Ty<'tcx>> {
- self.node_id_to_type_opt(pat.hir_id)
+ self.node_type_opt(pat.hir_id)
}
// Returns the type of an expression as a monotype.
// ask for the type of "id" in "id(3)", it will return "fn(&isize) -> isize"
// instead of "fn(ty) -> T with T = isize".
pub fn expr_ty(&self, expr: &hir::Expr) -> Ty<'tcx> {
- self.node_id_to_type(expr.hir_id)
+ self.node_type(expr.hir_id)
}
pub fn expr_ty_opt(&self, expr: &hir::Expr) -> Option<Ty<'tcx>> {
- self.node_id_to_type_opt(expr.hir_id)
+ self.node_type_opt(expr.hir_id)
}
pub fn adjustments(&self) -> LocalTableInContext<'_, Vec<ty::adjustment::Adjustment<'tcx>>> {
impl CanonicalUserType<'gcx> {
/// Returns `true` if this represents a substitution of the form `[?0, ?1, ?2]`,
- /// i.e. each thing is mapped to a canonical variable with the same index.
+ /// i.e., each thing is mapped to a canonical variable with the same index.
pub fn is_identity(&self) -> bool {
match self.value {
UserType::Ty(_) => false,
}
}
-/// A user-given type annotation attached to a constant. These arise
+/// A user-given type annotation attached to a constant. These arise
/// from constants that are named via paths, like `Foo::<A>::new` and
/// so forth.
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- /// Get the global TyCtxt.
+ /// Gets the global `TyCtxt`.
#[inline]
pub fn global_tcx(self) -> TyCtxt<'gcx, 'gcx, 'gcx> {
TyCtxt {
value.lift_to_tcx(self.global_tcx())
}
- /// Returns true if self is the same as self.global_tcx().
+ /// Returns `true` if self is the same as self.global_tcx().
fn is_global(self) -> bool {
ptr::eq(self.interners, &self.global_interners)
}
- /// Create a type context and call the closure with a `TyCtxt` reference
+ /// Creates a type context and call the closure with a `TyCtxt` reference
/// to the context. The closure enforces that the type context and any interned
/// value (types, substs, etc.) can only be used while `ty::tls` has a valid
/// reference to the context, to allow formatting values that need it.
}
}
- /// Convert a `DefId` into its fully expanded `DefPath` (every
+ /// Converts a `DefId` into its fully expanded `DefPath` (every
/// `DefId` is really just an interned def-path).
///
/// Note that if `id` is not local to this crate, the result will
use std::ptr;
use syntax_pos;
use crate::ty::query;
- use crate::errors::{Diagnostic, TRACK_DIAGNOSTICS};
+ use errors::{Diagnostic, TRACK_DIAGNOSTICS};
use rustc_data_structures::OnDrop;
use rustc_data_structures::sync::{self, Lrc, Lock};
use rustc_data_structures::thin_vec::ThinVec;
use std::fmt;
use rustc_target::spec::abi;
use syntax::ast;
-use crate::errors::{Applicability, DiagnosticBuilder};
+use errors::{Applicability, DiagnosticBuilder};
use syntax_pos::Span;
use crate::hir;
//! instance of a "folder" (a type which implements `TypeFolder`). Then
//! the setup is intended to be:
//!
-//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F)
+//! T.fold_with(F) --calls--> F.fold_T(T) --calls--> T.super_fold_with(F)
//!
//! This way, when you define a new folder F, you can override
//! `fold_T()` to customize the behavior, and invoke `T.super_fold_with()`
//! proper thing.
//!
//! A `TypeFoldable` T can also be visited by a `TypeVisitor` V using similar setup:
-//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V).
-//! These methods return true to indicate that the visitor has found what it is looking for
-//! and does not need to visit anything else.
+//!
+//! T.visit_with(V) --calls--> V.visit_T(T) --calls--> T.super_visit_with(V).
+//!
+//! These methods return true to indicate that the visitor has found what it is
+//! looking for, and does not need to visit anything else.
use crate::hir::def_id::DefId;
use crate::ty::{self, Binder, Ty, TyCtxt, TypeFlags};
self.super_visit_with(visitor)
}
- /// True if `self` has any late-bound regions that are either
+ /// Returns `true` if `self` has any late-bound regions that are either
/// bound by `binder` or bound by some binder outside of `binder`.
/// If `binder` is `ty::INNERMOST`, this indicates whether
/// there are any late-bound regions that appear free.
self.visit_with(&mut HasEscapingVarsVisitor { outer_index: binder })
}
- /// True if this `self` has any regions that escape `binder` (and
+ /// Returns `true` if this `self` has any regions that escape `binder` (and
/// hence are not bound by it).
fn has_vars_bound_above(&self, binder: ty::DebruijnIndex) -> bool {
self.has_vars_bound_at_or_above(binder.shifted_in(1))
}
}
-/// The TypeFolder trait defines the actual *folding*. There is a
+/// The `TypeFolder` trait defines the actual *folding*. There is a
/// method defined for every foldable type. Each of these has a
/// default implementation that does an "identity" fold. Within each
/// identity fold, it should invoke `foo.fold_with(self)` to fold each
});
}
- /// True if `callback` returns true for every region appearing free in `value`.
+ /// Returns `true` if `callback` returns true for every region appearing free in `value`.
pub fn all_free_regions_meet(
self,
value: &impl TypeFoldable<'tcx>,
!self.any_free_region_meets(value, |r| !callback(r))
}
- /// True if `callback` returns true for some region appearing free in `value`.
+ /// Returns `true` if `callback` returns true for some region appearing free in `value`.
pub fn any_free_region_meets(
self,
value: &impl TypeFoldable<'tcx>,
/// ^ ^ ^ ^
/// | | | | here, would be shifted in 1
/// | | | here, would be shifted in 2
- /// | | here, would be INNERMOST shifted in by 1
- /// | here, initially, binder would be INNERMOST
+ /// | | here, would be `INNERMOST` shifted in by 1
+ /// | here, initially, binder would be `INNERMOST`
/// ```
///
/// You see that, initially, *any* bound value is free,
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- /// Replace all regions bound by the given `Binder` with the
+ /// Replaces all regions bound by the given `Binder` with the
/// results returned by the closure; the closure is expected to
/// return a free region (relative to this binder), and hence the
/// binder is removed in the return type. The closure is invoked
/// once for each unique `BoundRegion`; multiple references to the
- /// same `BoundRegion` will reuse the previous result. A map is
+ /// same `BoundRegion` will reuse the previous result. A map is
/// returned at the end with each bound region and the free region
/// that replaced it.
///
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t)
}
- /// Replace all escaping bound vars. The `fld_r` closure replaces escaping
+ /// Replaces all escaping bound vars. The `fld_r` closure replaces escaping
/// bound regions while the `fld_t` closure replaces escaping bound types.
pub fn replace_escaping_bound_vars<T, F, G>(
self,
}
}
- /// Replace all types or regions bound by the given `Binder`. The `fld_r`
+ /// Replaces all types or regions bound by the given `Binder`. The `fld_r`
/// closure replaces bound regions while the `fld_t` closure replaces bound
/// types.
pub fn replace_bound_vars<T, F, G>(
self.replace_escaping_bound_vars(value.skip_binder(), fld_r, fld_t)
}
- /// Replace any late-bound regions bound in `value` with
+ /// Replaces any late-bound regions bound in `value` with
/// free variants attached to `all_outlive_scope`.
pub fn liberate_late_bound_regions<T>(
&self,
collector.regions
}
- /// Replace any late-bound regions bound in `value` with `'erased`. Useful in codegen but also
+ /// Replaces any late-bound regions bound in `value` with `'erased`. Useful in codegen but also
/// method lookup and a few other places where precise region relationships are not required.
pub fn erase_late_bound_regions<T>(self, value: &Binder<T>) -> T
where T : TypeFoldable<'tcx>
self.replace_late_bound_regions(value, |_| self.types.re_erased).0
}
- /// Rewrite any late-bound regions so that they are anonymous. Region numbers are
+ /// Rewrite any late-bound regions so that they are anonymous. Region numbers are
/// assigned starting at 1 and increasing monotonically in the order traversed
/// by the fold operation.
///
/// The chief purpose of this function is to canonicalize regions so that two
/// `FnSig`s or `TraitRef`s which are equivalent up to region naming will become
- /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and
+ /// structurally identical. For example, `for<'a, 'b> fn(&'a isize, &'b isize)` and
/// `for<'a, 'b> fn(&'b isize, &'a isize)` will become identical after anonymization.
pub fn anonymize_late_bound_regions<T>(self, sig: &Binder<T>) -> Binder<T>
where T : TypeFoldable<'tcx>,
/// scope to which it is attached, etc. An escaping var represents
/// a bound var for which this processing has not yet been done.
struct HasEscapingVarsVisitor {
- /// Anything bound by `outer_index` or "above" is escaping
+ /// Anything bound by `outer_index` or "above" is escaping.
outer_index: ty::DebruijnIndex,
}
current_index: ty::DebruijnIndex,
regions: FxHashSet<ty::BoundRegion>,
- /// If true, we only want regions that are known to be
+ /// `true` if we only want regions that are known to be
/// "constrained" when you equate this type with another type. In
/// particular, if you have e.g., `&'a u32` and `&'b u32`, equating
- /// them constraints `'a == 'b`. But if you have `<&'a u32 as
+ /// them constraints `'a == 'b`. But if you have `<&'a u32 as
/// Trait>::Foo` and `<&'b u32 as Trait>::Foo`, normalizing those
/// types may mean that `'a` and `'b` don't appear in the results,
/// so they are not considered *constrained*.
}
impl<'a, 'gcx, 'tcx> DefIdForest {
- /// Create an empty forest.
+ /// Creates an empty forest.
pub fn empty() -> DefIdForest {
DefIdForest {
root_ids: SmallVec::new(),
}
}
- /// Create a forest consisting of a single tree representing the entire
+ /// Creates a forest consisting of a single tree representing the entire
/// crate.
#[inline]
pub fn full(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> DefIdForest {
DefIdForest::from_id(crate_id)
}
- /// Create a forest containing a DefId and all its descendants.
+ /// Creates a forest containing a DefId and all its descendants.
pub fn from_id(id: DefId) -> DefIdForest {
let mut root_ids = SmallVec::new();
root_ids.push(id);
}
}
- /// Test whether the forest is empty.
+ /// Tests whether the forest is empty.
pub fn is_empty(&self) -> bool {
self.root_ids.is_empty()
}
- /// Test whether the forest contains a given DefId.
+ /// Tests whether the forest contains a given DefId.
pub fn contains(&self,
tcx: TyCtxt<'a, 'gcx, 'tcx>,
id: DefId) -> bool
/// `<T as Trait>::method` where `method` receives unsizeable `self: Self`.
VtableShim(DefId),
- /// \<fn() as FnTrait>::call_*
- /// def-id is FnTrait::call_*
+ /// `<fn() as FnTrait>::call_*`
+ /// `DefId` is `FnTrait::call_*`
FnPtrShim(DefId, Ty<'tcx>),
- /// <Trait as Trait>::fn
+ /// `<Trait as Trait>::fn`
Virtual(DefId, usize),
- /// <[mut closure] as FnOnce>::call_once
+ /// `<[mut closure] as FnOnce>::call_once`
ClosureOnceShim { call_once: DefId },
- /// drop_in_place::<T>; None for empty drop glue.
+ /// `drop_in_place::<T>; None` for empty drop glue.
DropGlue(DefId, Option<Ty<'tcx>>),
///`<T as Clone>::clone` shim.
self.def.def_id()
}
- /// Resolve a (def_id, substs) pair to an (optional) instance -- most commonly,
+ /// Resolves a `(def_id, substs)` pair to an (optional) instance -- most commonly,
/// this is used to find the precise code that will run for a trait method invocation,
/// if known.
///
-use crate::hir;
use crate::hir::map::DefPathData;
use crate::hir::def_id::{CrateNum, DefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use crate::ty::{self, DefIdTree, Ty, TyCtxt};
})
}
-/// Add the `crate::` prefix to paths where appropriate.
+/// Adds the `crate::` prefix to paths where appropriate.
pub fn with_crate_prefix<F: FnOnce() -> R, R>(f: F) -> R {
SHOULD_PREFIX_WITH_CRATE.with(|flag| {
let old = flag.get();
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
- /// Returns a string identifying this def-id. This string is
+ /// Returns a string identifying this `DefId`. This string is
/// suitable for user output. It is relative to the current crate
/// root, unless with_forced_absolute_paths was used.
pub fn item_path_str(self, def_id: DefId) -> String {
self.item_path_str(self.hir().local_def_id(id))
}
- // FIXME(@ljedrz): replace the NodeId variant
- pub fn hir_path_str(self, id: hir::HirId) -> String {
- self.item_path_str(self.hir().local_def_id_from_hir_id(id))
- }
-
/// Returns a string identifying this def-id. This string is
/// suitable for user output. It always begins with a crate identifier.
pub fn absolute_item_path_str(self, def_id: DefId) -> String {
buffer.push(&format!("<impl at {}>", span_str));
}
- /// Returns the def-id of `def_id`'s parent in the def tree. If
+ /// Returns the `DefId` of `def_id`'s parent in the def tree. If
/// this returns `None`, then `def_id` represents a crate root or
/// inlined root.
pub fn parent_def_id(self, def_id: DefId) -> Option<DefId> {
}
/// As a heuristic, when we see an impl, if we see that the
-/// 'self-type' is a type defined in the same module as the impl,
+/// 'self type' is a type defined in the same module as the impl,
/// we can omit including the path to the impl itself. This
-/// function tries to find a "characteristic def-id" for a
+/// function tries to find a "characteristic `DefId`" for a
/// type. It's just a heuristic so it makes some questionable
/// decisions and we may want to adjust it later.
pub fn characteristic_def_id_of_type(ty: Ty<'_>) -> Option<DefId> {
#[derive(Debug)]
pub enum RootMode {
- /// Try to make a path relative to the local crate. In
+ /// Try to make a path relative to the local crate. In
/// particular, local paths have no prefix, and if the path comes
/// from an extern crate, start with the path to the `extern
/// crate` declaration.
}
}
- /// Get the Integer type from an attr::IntType.
+ /// Gets the Integer type from an attr::IntType.
fn from_attr<C: HasDataLayout>(cx: &C, ity: attr::IntType) -> Integer {
let dl = cx.data_layout();
}
}
- /// Find the appropriate Integer type and signedness for the given
+ /// Finds the appropriate Integer type and signedness for the given
/// signed discriminant range and #[repr] attribute.
/// N.B.: u128 values above i128::MAX will be treated as signed, but
/// that shouldn't affect anything, other than maybe debuginfo.
tcx.types.re_static,
tcx.mk_array(tcx.types.usize, 3),
)
- /* FIXME use actual fn pointers
+ /* FIXME: use actual fn pointers
Warning: naively computing the number of entries in the
vtable by counting the methods on the trait + methods on
all parent traits does not work, because some methods can
use rustc_data_structures::sync::{self, Lrc, ParallelIterator, par_iter};
use std::slice;
use std::{mem, ptr};
-use syntax::ast::{self, DUMMY_NODE_ID, Name, Ident, NodeId};
+use syntax::ast::{self, Name, Ident, NodeId};
use syntax::attr;
use syntax::ext::hygiene::Mark;
use syntax::symbol::{keywords, Symbol, LocalInternedString, InternedString};
}
impl AssociatedItemContainer {
- /// Asserts that this is the def-id of an associated item declared
- /// in a trait, and returns the trait def-id.
+ /// Asserts that this is the `DefId` of an associated item declared
+ /// in a trait, and returns the trait `DefId`.
pub fn assert_trait(&self) -> DefId {
match *self {
TraitContainer(id) => id,
/// The "header" of an impl is everything outside the body: a Self type, a trait
/// ref (in the case of a trait impl), and a set of predicates (from the
-/// bounds/where clauses).
+/// bounds / where-clauses).
#[derive(Clone, PartialEq, Eq, Hash, Debug)]
pub struct ImplHeader<'tcx> {
pub impl_def_id: DefId,
/// item.
pub struct CrateVariancesMap {
/// For each item with generics, maps to a vector of the variance
- /// of its generics. If an item has no generics, it will have no
+ /// of its generics. If an item has no generics, it will have no
/// entry.
pub variances: FxHashMap<DefId, Lrc<Vec<ty::Variance>>>,
impl Variance {
/// `a.xform(b)` combines the variance of a context with the
- /// variance of a type with the following meaning. If we are in a
+ /// variance of a type with the following meaning. If we are in a
/// context with variance `a`, and we encounter a type argument in
/// a position with variance `b`, then `a.xform(b)` is the new
/// variance with which the argument appears.
/// The ambient variance is covariant. A `fn` type is
/// contravariant with respect to its parameters, so the variance
/// within which both pointer types appear is
- /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const
+ /// `Covariant.xform(Contravariant)`, or `Contravariant`. `*const
/// T` is covariant with respect to `T`, so the variance within
/// which the first `Vec<i32>` appears is
- /// `Contravariant.xform(Covariant)` or `Contravariant`. The same
+ /// `Contravariant.xform(Covariant)` or `Contravariant`. The same
/// is true for its `i32` argument. In the `*mut T` case, the
/// variance of `Vec<i32>` is `Contravariant.xform(Invariant)`,
/// and hence the outermost type is `Invariant` with respect to
/// So, for a type without any late-bound things, like `u32`, this
/// will be *innermost*, because that is the innermost binder that
/// captures nothing. But for a type `&'D u32`, where `'D` is a
- /// late-bound region with debruijn index `D`, this would be `D + 1`
+ /// late-bound region with De Bruijn index `D`, this would be `D + 1`
/// -- the binder itself does not capture `D`, but `D` is captured
/// by an inner binder.
///
/// We call this concept an "exclusive" binder `D` because all
- /// debruijn indices within the type are contained within `0..D`
+ /// De Bruijn indices within the type are contained within `0..D`
/// (exclusive).
outer_exclusive_binder: ty::DebruijnIndex,
}
pub hir_id: hir::HirId,
}
-/// Upvars do not get their own node-id. Instead, we use the pair of
-/// the original var id (that is, the root variable that is referenced
-/// by the upvar) and the id of the closure expression.
+/// Upvars do not get their own `NodeId`. Instead, we use the pair of
+/// the original var ID (that is, the root variable that is referenced
+/// by the upvar) and the ID of the closure expression.
#[derive(Clone, Copy, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct UpvarId {
pub var_path: UpvarPath,
/// Data must be immutable and is aliasable.
ImmBorrow,
- /// Data must be immutable but not aliasable. This kind of borrow
+ /// Data must be immutable but not aliasable. This kind of borrow
/// cannot currently be expressed by the user and is used only in
/// implicit closure bindings. It is needed when the closure
/// is borrowing or mutating a mutable referent, e.g.:
/// Performs a substitution suitable for going from a
/// poly-trait-ref to supertraits that must hold if that
/// poly-trait-ref holds. This is slightly different from a normal
- /// substitution in terms of what happens with bound regions. See
+ /// substitution in terms of what happens with bound regions. See
/// lengthy comment below for details.
pub fn subst_supertrait(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
trait_ref: &ty::PolyTraitRef<'tcx>)
/// This kind of predicate has no *direct* correspondent in the
/// syntax, but it roughly corresponds to the syntactic forms:
///
-/// 1. `T: TraitRef<..., Item=Type>`
+/// 1. `T: TraitRef<..., Item = Type>`
/// 2. `<T as TraitRef<...>>::Item == Type` (NYI)
///
/// In particular, form #1 is "desugared" to the combination of a
}
/// Represents the bounds declared on a particular set of type
-/// parameters. Should eventually be generalized into a flag list of
-/// where clauses. You can obtain a `InstantiatedPredicates` list from a
+/// parameters. Should eventually be generalized into a flag list of
+/// where-clauses. You can obtain a `InstantiatedPredicates` list from a
/// `GenericPredicates` by using the `instantiate` method. Note that this method
/// reflects an important semantic invariant of `InstantiatedPredicates`: while
/// the `GenericPredicates` are expressed in terms of the bound type
/// struct Foo<T,U:Bar<T>> { ... }
///
/// Here, the `GenericPredicates` for `Foo` would contain a list of bounds like
-/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
+/// `[[], [U:Bar<T>]]`. Now if there were some particular reference
/// like `Foo<isize,usize>`, then the `InstantiatedPredicates` would be `[[],
/// [usize:Bar<isize>]]`.
#[derive(Clone)]
/// Returns the "next" universe index in order -- this new index
/// is considered to extend all previous universes. This
- /// corresponds to entering a `forall` quantifier. So, for
+ /// corresponds to entering a `forall` quantifier. So, for
/// example, suppose we have this type in universe `U`:
///
/// ```
impl<'tcx> ParamEnv<'tcx> {
/// Construct a trait environment suitable for contexts where
- /// there are no where clauses in scope. Hidden types (like `impl
+ /// there are no where-clauses in scope. Hidden types (like `impl
/// Trait`) are left hidden, so this is suitable for ordinary
/// type-checking.
#[inline]
Self::new(List::empty(), Reveal::UserFacing, None)
}
- /// Construct a trait environment with no where clauses in scope
+ /// Construct a trait environment with no where-clauses in scope
/// where the values of all `impl Trait` and other hidden types
/// are revealed. This is suitable for monomorphized, post-typeck
/// environments like codegen or doing optimizations.
///
- /// N.B. If you want to have predicates in scope, use `ParamEnv::new`,
+ /// N.B., if you want to have predicates in scope, use `ParamEnv::new`,
/// or invoke `param_env.with_reveal_all()`.
#[inline]
pub fn reveal_all() -> Self {
/// Returns a new parameter environment with the same clauses, but
/// which "reveals" the true results of projections in all cases
- /// (even for associated types that are specializable). This is
+ /// (even for associated types that are specializable). This is
/// the desired behavior during codegen and certain other special
/// contexts; normally though we want to use `Reveal::UserFacing`,
/// which is the default.
#[derive(Copy, Clone, Debug)]
pub struct Destructor {
- /// The def-id of the destructor method
+ /// The `DefId` of the destructor method
pub did: DefId,
}
}
impl<'a, 'gcx, 'tcx> VariantDef {
- /// Create a new `VariantDef`.
+ /// Creates a new `VariantDef`.
///
- /// - `did` is the DefId used for the variant - for tuple-structs, it is the constructor DefId,
- /// and for everything else, it is the variant DefId.
+ /// - `did` is the `DefId` used for the variant.
+ /// This is the constructor `DefId` for tuple stucts, and the variant `DefId` for everything
+ /// else.
/// - `attribute_def_id` is the DefId that has the variant's attributes.
- /// this is the struct DefId for structs, and the variant DefId for variants.
+ /// This is the struct `DefId` for structs, and the variant `DefId` for variants.
///
- /// Note that we *could* use the constructor DefId, because the constructor attributes
+ /// Note that we *could* use the constructor `DefId`, because the constructor attributes
/// redirect to the base attributes, but compiling a small crate requires
- /// loading the AdtDefs for all the structs in the universe (e.g., coherence for any
+ /// loading the `AdtDef`s for all the structs in the universe (e.g., coherence for any
/// built-in trait), and we do not want to load attributes twice.
///
/// If someone speeds up attribute loading to not be a performance concern, they can
- /// remove this hack and use the constructor DefId everywhere.
+ /// remove this hack and use the constructor `DefId` everywhere.
pub fn new(tcx: TyCtxt<'a, 'gcx, 'tcx>,
did: DefId,
ident: Ident,
}
/// Returns `true` if this `#[repr()]` should inhibit struct field reordering
- /// optimizations, such as with repr(C), repr(packed(1)), or repr(<int>).
+ /// optimizations, such as with `repr(C)`, `repr(packed(1))`, or `repr(<int>)`.
pub fn inhibit_struct_field_reordering_opt(&self) -> bool {
self.flags.intersects(ReprFlags::IS_UNOPTIMISABLE) || self.pack == 1 ||
self.int.is_some()
}
- /// Returns true if this `#[repr()]` should inhibit union abi optimisations
+ /// Returns `true` if this `#[repr()]` should inhibit union ABI optimisations.
pub fn inhibit_union_abi_opt(&self) -> bool {
self.c()
}
self.flags.contains(AdtFlags::HAS_CTOR)
}
- /// Returns whether this type is `#[fundamental]` for the purposes
+ /// Returns `true` if this type is `#[fundamental]` for the purposes
/// of coherence checking.
#[inline]
pub fn is_fundamental(&self) -> bool {
self.flags.contains(AdtFlags::IS_FUNDAMENTAL)
}
- /// Returns `true` if this is PhantomData<T>.
+ /// Returns `true` if this is `PhantomData<T>`.
#[inline]
pub fn is_phantom_data(&self) -> bool {
self.flags.contains(AdtFlags::IS_PHANTOM_DATA)
self.flags.contains(AdtFlags::IS_BOX)
}
- /// Returns whether this type has a destructor.
+ /// Returns `true` if this type has a destructor.
pub fn has_dtor(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> bool {
self.destructor(tcx).is_some()
}
})
}
- /// Compute the discriminant value used by a specific variant.
+ /// Computes the discriminant value used by a specific variant.
/// Unlike `discriminants`, this is (amortized) constant-time,
/// only doing at most one query for evaluating an explicit
/// discriminant (the last one before the requested variant),
explicit_value.checked_add(tcx, offset as u128).0
}
- /// Yields a DefId for the discriminant and an offset to add to it
+ /// Yields a `DefId` for the discriminant and an offset to add to it
/// Alternatively, if there is no explicit discriminant, returns the
- /// inferred discriminant directly
+ /// inferred discriminant directly.
pub fn discriminant_def_for_variant(
&self,
variant_index: VariantIdx,
}
/// Returns a list of types such that `Self: Sized` if and only
- /// if that type is Sized, or `TyErr` if this type is recursive.
+ /// if that type is `Sized`, or `TyErr` if this type is recursive.
///
- /// Oddly enough, checking that the sized-constraint is Sized is
+ /// Oddly enough, checking that the sized-constraint is `Sized` is
/// actually more expressive than checking all members:
- /// the Sized trait is inductive, so an associated type that references
- /// Self would prevent its containing ADT from being Sized.
+ /// the `Sized` trait is inductive, so an associated type that references
+ /// `Self` would prevent its containing ADT from being `Sized`.
///
/// Due to normalization being eager, this applies even if
- /// the associated type is behind a pointer, e.g., issue #31299.
+ /// the associated type is behind a pointer (e.g., issue #31299).
pub fn sized_constraint(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> &'tcx [Ty<'tcx>] {
match tcx.try_adt_sized_constraint(DUMMY_SP, self.did) {
Ok(tys) => tys,
}
}
-/// Represents the various closure traits in the Rust language. This
+/// Represents the various closure traits in the language. This
/// will determine the type of the environment (`self`, in the
/// desugaring) argument that the closure expects.
///
TypeWalker::new(self)
}
- /// Iterator that walks the immediate children of `self`. Hence
+ /// Iterator that walks the immediate children of `self`. Hence
/// `Foo<Bar<i32>, u32>` yields the sequence `[Bar<i32>, u32]`
/// (but not `i32`, like `walk`).
pub fn walk_shallow(&'tcx self) -> smallvec::IntoIter<walk::TypeWalkerArray<'tcx>> {
}
/// Walks `ty` and any types appearing within `ty`, invoking the
- /// callback `f` on each type. If the callback returns false, then the
+ /// callback `f` on each type. If the callback returns `false`, then the
/// children of the current type are ignored.
///
/// Note: prefer `ty.walk()` where possible.
self.typeck_tables_of(self.hir().body_owner_def_id(body))
}
- /// Returns an iterator of the def-ids for all body-owners in this
+ /// Returns an iterator of the `DefId`s for all body-owners in this
/// crate. If you would prefer to iterate over the bodies
/// themselves, you can do `self.hir().krate().body_ids.iter()`.
pub fn body_owners(
pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option<usize> {
variant.fields.iter().position(|field| {
- self.adjust_ident(ident, variant.did, DUMMY_NODE_ID).0 == field.ident.modern()
+ self.adjust_ident(ident, variant.did, hir::DUMMY_HIR_ID).0 == field.ident.modern()
})
}
}
}
- /// Return the possibly-auto-generated MIR of a (DefId, Subst) pair.
+ /// Returns the possibly-auto-generated MIR of a `(DefId, Subst)` pair.
pub fn instance_mir(self, instance: ty::InstanceDef<'gcx>)
-> &'gcx Mir<'gcx>
{
}
}
- /// Get the attributes of a definition.
+ /// Gets the attributes of a definition.
pub fn get_attrs(self, did: DefId) -> Attributes<'gcx> {
if let Some(id) = self.hir().as_local_hir_id(did) {
Attributes::Borrowed(self.hir().attrs_by_hir_id(id))
}
}
- /// Determine whether an item is annotated with an attribute.
+ /// Determines whether an item is annotated with an attribute.
pub fn has_attr(self, did: DefId, attr: &str) -> bool {
attr::contains_name(&self.get_attrs(did), attr)
}
self.optimized_mir(def_id).generator_layout.as_ref().unwrap()
}
- /// Given the def-id of an impl, return the def_id of the trait it implements.
- /// If it implements no trait, return `None`.
+ /// Given the `DefId` of an impl, returns the `DefId` of the trait it implements.
+ /// If it implements no trait, returns `None`.
pub fn trait_id_of_impl(self, def_id: DefId) -> Option<DefId> {
self.impl_trait_ref(def_id).map(|tr| tr.def_id)
}
- /// If the given defid describes a method belonging to an impl, return the
- /// def-id of the impl that the method belongs to. Otherwise, return `None`.
+ /// If the given defid describes a method belonging to an impl, returns the
+ /// `DefId` of the impl that the method belongs to; otherwise, returns `None`.
pub fn impl_of_method(self, def_id: DefId) -> Option<DefId> {
let item = if def_id.krate != LOCAL_CRATE {
if let Some(Def::Method(_)) = self.describe_def(def_id) {
}
}
- // Hygienically compare a use-site name (`use_name`) for a field or an associated item with its
- // supposed definition name (`def_name`). The method also needs `DefId` of the supposed
- // definition's parent/scope to perform comparison.
+ /// Hygienically compares a use-site name (`use_name`) for a field or an associated item with
+ /// its supposed definition name (`def_name`). The method also needs `DefId` of the supposed
+ /// definition's parent/scope to perform comparison.
pub fn hygienic_eq(self, use_name: Ident, def_name: Ident, def_parent_def_id: DefId) -> bool {
- self.adjust_ident(use_name, def_parent_def_id, DUMMY_NODE_ID).0 == def_name.modern()
+ self.adjust_ident(use_name, def_parent_def_id, hir::DUMMY_HIR_ID).0 == def_name.modern()
}
- pub fn adjust_ident(self, mut ident: Ident, scope: DefId, block: NodeId) -> (Ident, DefId) {
+ pub fn adjust_ident(self, mut ident: Ident, scope: DefId, block: hir::HirId) -> (Ident, DefId) {
ident = ident.modern();
let target_expansion = match scope.krate {
LOCAL_CRATE => self.hir().definitions().expansion_that_defined(scope.index),
let scope = match ident.span.adjust(target_expansion) {
Some(actual_expansion) =>
self.hir().definitions().parent_module_of_macro_def(actual_expansion),
- None if block == DUMMY_NODE_ID => DefId::local(CRATE_DEF_INDEX), // Dummy DefId
- None => self.hir().get_module_parent(block),
+ None if block == hir::DUMMY_HIR_ID => DefId::local(CRATE_DEF_INDEX), // Dummy DefId
+ None => self.hir().get_module_parent_by_hir_id(block),
};
(ident, scope)
}
parent_item.node)
}
-/// Calculates the Sized-constraint.
+/// Calculates the `Sized` constraint.
///
/// In fact, there are only a few options for the types in the constraint:
/// - an obviously-unsized type
tcx.hir().span_if_local(def_id).unwrap()
}
-/// If the given def ID describes an item belonging to a trait,
-/// return the ID of the trait that the trait item belongs to.
-/// Otherwise, return `None`.
+/// If the given `DefId` describes an item belonging to a trait,
+/// returns the `DefId` of the trait that the trait item belongs to;
+/// otherwise, returns `None`.
fn trait_of_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Option<DefId> {
tcx.opt_associated_item(def_id)
.and_then(|associated_item| {
if tcx.sess.opts.debugging_opts.chalk { Some(def_id) } else { None }
);
- let body_id = tcx.hir().as_local_node_id(def_id).map_or(DUMMY_NODE_ID, |id| {
- tcx.hir().maybe_body_owned_by(id).map_or(id, |body| body.node_id)
+ let body_id = tcx.hir().as_local_hir_id(def_id).map_or(hir::DUMMY_HIR_ID, |id| {
+ tcx.hir().maybe_body_owned_by_by_hir_id(id).map_or(id, |body| body.hir_id)
});
let cause = traits::ObligationCause::misc(tcx.def_span(def_id), body_id);
traits::normalize_param_env_or_error(tcx, def_id, unnormalized_env, cause)
}
}
-/// If `def_id` is an issue 33140 hack impl, return its self type. Otherwise
-/// return None.
+/// If `def_id` is an issue 33140 hack impl, returns its self type; otherwise, returns `None`.
///
-/// See ImplOverlapKind::Issue33140 for more details.
+/// See [`ImplOverlapKind::Issue33140`] for more details.
fn issue33140_self_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId)
-> Option<Ty<'tcx>>
rustc_data_structures::stable_hasher::{StableHasherResult, StableHasher, HashStable},
};
-/// Indicates the state of a query for a given key in a query map
+/// Indicates the state of a query for a given key in a query map.
pub(super) enum QueryResult<'tcx> {
- /// An already executing query. The query job can be used to await for its completion
+ /// An already executing query. The query job can be used to await for its completion.
Started(Lrc<QueryJob<'tcx>>),
- /// The query panicked. Queries trying to wait on this will raise a fatal error / silently panic
+ /// The query panicked. Queries trying to wait on this will raise a fatal error or
+ /// silently panic.
Poisoned,
}
-/// A span and a query key
+/// Represents a span and a query key.
#[derive(Clone, Debug)]
pub struct QueryInfo<'tcx> {
- /// The span for a reason this query was required
+ /// The span corresponding to the reason for which this query was required.
pub span: Span,
pub query: Query<'tcx>,
}
-/// A object representing an active query job.
+/// Representss an object representing an active query job.
pub struct QueryJob<'tcx> {
pub info: QueryInfo<'tcx>,
/// The parent query job which created this job and is implicitly waiting on it.
pub parent: Option<Lrc<QueryJob<'tcx>>>,
- /// The latch which is used to wait on this job
+ /// The latch that is used to wait on this job.
#[cfg(parallel_compiler)]
latch: QueryLatch<'tcx>,
}
impl<'tcx> QueryJob<'tcx> {
- /// Creates a new query job
+ /// Creates a new query job.
pub fn new(info: QueryInfo<'tcx>, parent: Option<Lrc<QueryJob<'tcx>>>) -> Self {
QueryJob {
info,
}
}
- /// Remove a single waiter from the list of waiters.
+ /// Removes a single waiter from the list of waiters.
/// This is used to break query cycles.
fn extract_waiter(
&self,
use crate::dep_graph::{self, DepConstructor, DepNode};
-use crate::errors::DiagnosticBuilder;
use crate::hir::def_id::{CrateNum, DefId, DefIndex};
use crate::hir::def::{Def, Export};
use crate::hir::{self, TraitCandidate, ItemLocalId, CodegenFnAttrs};
-use rustc_data_structures::svh::Svh;
use crate::infer::canonical::{self, Canonical};
use crate::lint;
use crate::middle::borrowck::BorrowCheckResult;
use crate::util::profiling::ProfileCategory::*;
use crate::session::Session;
+use errors::DiagnosticBuilder;
+use rustc_data_structures::svh::Svh;
use rustc_data_structures::bit_set::BitSet;
use rustc_data_structures::indexed_vec::IndexVec;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
/// Records the type of every item.
[] fn type_of: TypeOfItem(DefId) -> Ty<'tcx>,
- /// Maps from the def-id of an item (trait/struct/enum/fn) to its
+ /// Maps from the `DefId` of an item (trait/struct/enum/fn) to its
/// associated generics.
[] fn generics_of: GenericsOfItem(DefId) -> &'tcx ty::Generics,
- /// Maps from the def-id of an item (trait/struct/enum/fn) to the
- /// predicates (where clauses) that must be proven true in order
+ /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the
+ /// predicates (where-clauses) that must be proven true in order
/// to reference it. This is almost always the "predicates query"
/// that you want.
///
/// user.)
[] fn predicates_of: PredicatesOfItem(DefId) -> Lrc<ty::GenericPredicates<'tcx>>,
- /// Maps from the def-id of an item (trait/struct/enum/fn) to the
- /// predicates (where clauses) directly defined on it. This is
+ /// Maps from the `DefId` of an item (trait/struct/enum/fn) to the
+ /// predicates (where-clauses) directly defined on it. This is
/// equal to the `explicit_predicates_of` predicates plus the
/// `inferred_outlives_of` predicates.
[] fn predicates_defined_on: PredicatesDefinedOnItem(DefId)
/// Foo<'a, T> { x: &'a T }`, this would return `T: 'a`).
[] fn inferred_outlives_of: InferredOutlivesOf(DefId) -> Lrc<Vec<ty::Predicate<'tcx>>>,
- /// Maps from the def-id of a trait to the list of
+ /// Maps from the `DefId` of a trait to the list of
/// super-predicates. This is a subset of the full list of
/// predicates. We store these in a separate map because we must
/// evaluate them even during type conversion, often before the
},
Codegen {
- /// Set of all the def-ids in this crate that have MIR associated with
+ /// Set of all the `DefId`s in this crate that have MIR associated with
/// them. This includes all the body owners, but also things like struct
/// constructors.
[] fn mir_keys: mir_keys(CrateNum) -> Lrc<DefIdSet>,
/// the value isn't known except to the pass itself.
[] fn mir_const_qualif: MirConstQualif(DefId) -> (u8, Lrc<BitSet<mir::Local>>),
- /// Fetch the MIR for a given def-id right after it's built - this includes
+ /// Fetch the MIR for a given `DefId` right after it's built - this includes
/// unreachable code.
[] fn mir_built: MirBuilt(DefId) -> &'tcx Steal<mir::Mir<'tcx>>,
- /// Fetch the MIR for a given def-id up till the point where it is
+ /// Fetch the MIR for a given `DefId` up till the point where it is
/// ready for const evaluation.
///
/// See the README for the `mir` module for details.
},
TypeChecking {
- /// The result of unsafety-checking this def-id.
+ /// The result of unsafety-checking this `DefId`.
[] fn unsafety_check_result: UnsafetyCheckResult(DefId) -> mir::UnsafetyCheckResult,
/// HACK: when evaluated, this reports a "unsafe derive on repr(packed)" error
TypeChecking {
/// Gets a complete map from all types to their inherent impls.
/// Not meant to be used directly outside of coherence.
- /// (Defined only for LOCAL_CRATE)
+ /// (Defined only for `LOCAL_CRATE`.)
[] fn crate_inherent_impls: crate_inherent_impls_dep_node(CrateNum)
-> Lrc<CrateInherentImpls>,
- /// Checks all types in the krate for overlap in their inherent impls. Reports errors.
+ /// Checks all types in the crate for overlap in their inherent impls. Reports errors.
/// Not meant to be used directly outside of coherence.
- /// (Defined only for LOCAL_CRATE)
+ /// (Defined only for `LOCAL_CRATE`.)
[] fn crate_inherent_impls_overlap_check: inherent_impls_overlap_check_dep_node(CrateNum)
-> (),
},
Other {
/// Evaluate a constant without running sanity checks
///
- /// DO NOT USE THIS outside const eval. Const eval uses this to break query cycles during
- /// validation. Please add a comment to every use site explaining why using `const_eval`
- /// isn't sufficient
+ /// **Do not use this** outside const eval. Const eval uses this to break query cycles
+ /// during validation. Please add a comment to every use site explaining why using
+ /// `const_eval` isn't sufficient
[] fn const_eval_raw: const_eval_raw_dep_node(ty::ParamEnvAnd<'tcx, GlobalId<'tcx>>)
-> ConstEvalRawResult<'tcx>,
Other {
[] fn reachable_set: reachability_dep_node(CrateNum) -> ReachableSet,
- /// Per-body `region::ScopeTree`. The `DefId` should be the owner-def-id for the body;
+ /// Per-body `region::ScopeTree`. The `DefId` should be the owner `DefId` for the body;
/// in the case of closures, this will be redirected to the enclosing function.
[] fn region_scope_tree: RegionScopeTree(DefId) -> Lrc<region::ScopeTree>,
-> Lrc<specialization_graph::Graph>,
[] fn is_object_safe: ObjectSafety(DefId) -> bool,
- /// Get the ParameterEnvironment for a given item; this environment
+ /// Gets the ParameterEnvironment for a given item; this environment
/// will be in "user-facing" mode, meaning that it is suitabe for
/// type-checking etc, and it does not normalize specializable
/// associated types. This is almost always what you want,
[] fn foreign_modules: ForeignModules(CrateNum) -> Lrc<Vec<ForeignModule>>,
- /// Identifies the entry-point (e.g. the `main` function) for a given
+ /// Identifies the entry-point (e.g., the `main` function) for a given
/// crate, returning `None` if there is no entry point (such as for library crates).
[] fn entry_fn: EntryFn(CrateNum) -> Option<(DefId, EntryFnType)>,
[] fn plugin_registrar_fn: PluginRegistrarFn(CrateNum) -> Option<DefId>,
use crate::dep_graph::{DepNodeIndex, SerializedDepNodeIndex};
-use crate::errors::Diagnostic;
use crate::hir;
use crate::hir::def_id::{CrateNum, DefIndex, DefId, LocalDefId, LOCAL_CRATE};
use crate::hir::map::definitions::DefPathHash;
use crate::ich::{CachingSourceMapView, Fingerprint};
use crate::mir::{self, interpret};
use crate::mir::interpret::{AllocDecodingSession, AllocDecodingState};
-use rustc_data_structures::fx::FxHashMap;
-use rustc_data_structures::thin_vec::ThinVec;
-use rustc_data_structures::sync::{Lrc, Lock, HashMapExt, Once};
-use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use crate::rustc_serialize::{Decodable, Decoder, Encodable, Encoder, opaque,
SpecializedDecoder, SpecializedEncoder,
UseSpecializedDecodable, UseSpecializedEncodable};
use crate::session::{CrateDisambiguator, Session};
+use crate::ty;
+use crate::ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
+use crate::ty::context::TyCtxt;
+use crate::util::common::time;
+
+use errors::Diagnostic;
+use rustc_data_structures::fx::FxHashMap;
+use rustc_data_structures::thin_vec::ThinVec;
+use rustc_data_structures::sync::{Lrc, Lock, HashMapExt, Once};
+use rustc_data_structures::indexed_vec::{IndexVec, Idx};
use std::mem;
use syntax::ast::NodeId;
use syntax::source_map::{SourceMap, StableSourceFileId};
use syntax_pos::{BytePos, Span, DUMMY_SP, SourceFile};
use syntax_pos::hygiene::{Mark, SyntaxContext, ExpnInfo};
-use crate::ty;
-use crate::ty::codec::{self as ty_codec, TyDecoder, TyEncoder};
-use crate::ty::context::TyCtxt;
-use crate::util::common::time;
const TAG_FILE_FOOTER: u128 = 0xC0FFEE_C0FFEE_C0FFEE_C0FFEE_C0FFEE;
}
impl<'sess> OnDiskCache<'sess> {
- /// Create a new OnDiskCache instance from the serialized data in `data`.
+ /// Creates a new OnDiskCache instance from the serialized data in `data`.
pub fn new(sess: &'sess Session, data: Vec<u8>, start_pos: usize) -> OnDiskCache<'sess> {
debug_assert!(sess.opts.incremental.is_some());
})
}
- /// Load a diagnostic emitted during the previous compilation session.
+ /// Loads a diagnostic emitted during the previous compilation session.
pub fn load_diagnostics<'a, 'tcx>(&self,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
dep_node_index: SerializedDepNodeIndex)
diagnostics.unwrap_or_default()
}
- /// Store a diagnostic emitted during the current compilation session.
+ /// Stores a diagnostic emitted during the current compilation session.
/// Anything stored like this will be available via `load_diagnostics` in
/// the next compilation session.
#[inline(never)]
}
/// Returns the cached query result if there is something in the cache for
- /// the given SerializedDepNodeIndex. Otherwise returns None.
+ /// the given `SerializedDepNodeIndex`; otherwise returns `None`.
pub fn try_load_query_result<'tcx, T>(&self,
tcx: TyCtxt<'_, 'tcx, 'tcx>,
dep_node_index: SerializedDepNodeIndex)
"query result")
}
- /// Store a diagnostic emitted during computation of an anonymous query.
+ /// Stores a diagnostic emitted during computation of an anonymous query.
/// Since many anonymous queries can share the same `DepNode`, we aggregate
/// them -- as opposed to regular queries where we assume that there is a
/// 1:1 relationship between query-key and `DepNode`.
-//! The implementation of the query system itself. Defines the macros
-//! that generate the actual methods on tcx which find and execute the
-//! provider, manage the caches, and so forth.
+//! The implementation of the query system itself. This defines the macros that
+//! generate the actual methods on tcx which find and execute the provider,
+//! manage the caches, and so forth.
use crate::dep_graph::{DepNodeIndex, DepNode, DepKind, SerializedDepNodeIndex};
-use crate::errors::DiagnosticBuilder;
-use crate::errors::Level;
-use crate::errors::Diagnostic;
-use crate::errors::FatalError;
use crate::ty::tls;
use crate::ty::{TyCtxt};
use crate::ty::query::Query;
use crate::util::common::{profq_msg, ProfileQueriesMsg, QueryMsg};
+use errors::DiagnosticBuilder;
+use errors::Level;
+use errors::Diagnostic;
+use errors::FatalError;
use rustc_data_structures::fx::{FxHashMap};
use rustc_data_structures::sync::{Lrc, Lock};
use rustc_data_structures::thin_vec::ThinVec;
let job = match lock.active.entry((*key).clone()) {
Entry::Occupied(entry) => {
match *entry.get() {
- QueryResult::Started(ref job) => job.clone(),
+ QueryResult::Started(ref job) => {
+ //For parallel queries, we'll block and wait until the query running
+ //in another thread has completed. Record how long we wait in the
+ //self-profiler
+ #[cfg(parallel_compiler)]
+ tcx.sess.profiler(|p| p.query_blocked_start(Q::NAME, Q::CATEGORY));
+
+ job.clone()
+ },
QueryResult::Poisoned => FatalError.raise(),
}
}
// thread
#[cfg(parallel_compiler)]
{
- if let Err(cycle) = job.r#await(tcx, span) {
+ let result = job.r#await(tcx, span);
+ tcx.sess.profiler(|p| p.query_blocked_end(Q::NAME, Q::CATEGORY));
+
+ if let Err(cycle) = result {
return TryGetJob::JobCompleted(Err(cycle));
}
}
job.signal_complete();
}
-
- /// Executes a job by changing the ImplicitCtxt to point to the
- /// new query job while it executes. It returns the diagnostics
- /// captured during execution and the actual result.
- #[inline(always)]
- pub(super) fn start<'lcx, F, R>(
- &self,
- tcx: TyCtxt<'_, 'tcx, 'lcx>,
- diagnostics: Option<&Lock<ThinVec<Diagnostic>>>,
- compute: F)
- -> R
- where
- F: for<'b> FnOnce(TyCtxt<'b, 'tcx, 'lcx>) -> R
- {
- // The TyCtxt stored in TLS has the same global interner lifetime
- // as `tcx`, so we use `with_related_context` to relate the 'gcx lifetimes
- // when accessing the ImplicitCtxt
- tls::with_related_context(tcx, move |current_icx| {
- // Update the ImplicitCtxt to point to our new query job
- let new_icx = tls::ImplicitCtxt {
- tcx: tcx.global_tcx(),
- query: Some(self.job.clone()),
- diagnostics,
- layout_depth: current_icx.layout_depth,
- task_deps: current_icx.task_deps,
- };
-
- // Use the ImplicitCtxt while we execute the query
- tls::enter_context(&new_icx, |_| {
- compute(tcx)
- })
- })
- }
-
}
#[inline(always)]
}
impl<'a, 'gcx, 'tcx> TyCtxt<'a, 'gcx, 'tcx> {
+ /// Executes a job by changing the ImplicitCtxt to point to the
+ /// new query job while it executes. It returns the diagnostics
+ /// captured during execution and the actual result.
+ #[inline(always)]
+ pub(super) fn start_query<F, R>(
+ self,
+ job: Lrc<QueryJob<'gcx>>,
+ diagnostics: Option<&Lock<ThinVec<Diagnostic>>>,
+ compute: F)
+ -> R
+ where
+ F: for<'b, 'lcx> FnOnce(TyCtxt<'b, 'gcx, 'lcx>) -> R
+ {
+ // The TyCtxt stored in TLS has the same global interner lifetime
+ // as `self`, so we use `with_related_context` to relate the 'gcx lifetimes
+ // when accessing the ImplicitCtxt
+ tls::with_related_context(self, move |current_icx| {
+ // Update the ImplicitCtxt to point to our new query job
+ let new_icx = tls::ImplicitCtxt {
+ tcx: self.global_tcx(),
+ query: Some(job),
+ diagnostics,
+ layout_depth: current_icx.layout_depth,
+ task_deps: current_icx.task_deps,
+ };
+
+ // Use the ImplicitCtxt while we execute the query
+ tls::enter_context(&new_icx, |_| {
+ compute(self.global_tcx())
+ })
+ })
+ }
+
#[inline(never)]
#[cold]
pub(super) fn report_cycle(
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
- job.start(self, diagnostics, |tcx| {
+ self.start_query(job.job.clone(), diagnostics, |tcx| {
tcx.dep_graph.with_anon_task(dep_node.kind, || {
Q::compute(tcx.global_tcx(), key)
})
}
if !dep_node.kind.is_input() {
- if let Some((prev_dep_node_index,
- dep_node_index)) = self.dep_graph.try_mark_green_and_read(self,
- &dep_node) {
- return Ok(self.load_from_disk_and_cache_in_memory::<Q>(
- key,
- job,
- prev_dep_node_index,
- dep_node_index,
- &dep_node
- ))
+ // The diagnostics for this query will be
+ // promoted to the current session during
+ // try_mark_green(), so we can ignore them here.
+ let loaded = self.start_query(job.job.clone(), None, |tcx| {
+ let marked = tcx.dep_graph.try_mark_green_and_read(tcx, &dep_node);
+ marked.map(|(prev_dep_node_index, dep_node_index)| {
+ (tcx.load_from_disk_and_cache_in_memory::<Q>(
+ key.clone(),
+ prev_dep_node_index,
+ dep_node_index,
+ &dep_node
+ ), dep_node_index)
+ })
+ });
+ if let Some((result, dep_node_index)) = loaded {
+ job.complete(&result, dep_node_index);
+ return Ok(result);
}
}
fn load_from_disk_and_cache_in_memory<Q: QueryDescription<'gcx>>(
self,
key: Q::Key,
- job: JobOwner<'a, 'gcx, Q>,
prev_dep_node_index: SerializedDepNodeIndex,
dep_node_index: DepNodeIndex,
dep_node: &DepNode
// First we try to load the result from the on-disk cache
let result = if Q::cache_on_disk(self.global_tcx(), key.clone()) &&
self.sess.opts.debugging_opts.incremental_queries {
+ self.sess.profiler(|p| p.incremental_load_result_start(Q::NAME));
let result = Q::try_load_from_disk(self.global_tcx(), prev_dep_node_index);
+ self.sess.profiler(|p| p.incremental_load_result_end(Q::NAME));
// We always expect to find a cached result for things that
// can be forced from DepNode.
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
- // The diagnostics for this query have already been
- // promoted to the current session during
- // try_mark_green(), so we can ignore them here.
- let result = job.start(self, None, |tcx| {
- // The dep-graph for this computation is already in
- // place
- tcx.dep_graph.with_ignore(|| {
- Q::compute(tcx, key)
- })
+ // The dep-graph for this computation is already in
+ // place
+ let result = self.dep_graph.with_ignore(|| {
+ Q::compute(self, key)
});
self.sess.profiler(|p| p.end_query(Q::NAME, Q::CATEGORY));
self.dep_graph.mark_loaded_from_cache(dep_node_index, true);
}
- job.complete(&result, dep_node_index);
-
result
}
self.sess.profiler(|p| p.start_query(Q::NAME, Q::CATEGORY));
let ((result, dep_node_index), diagnostics) = with_diagnostics(|diagnostics| {
- job.start(self, diagnostics, |tcx| {
+ self.start_query(job.job.clone(), diagnostics, |tcx| {
if dep_node.kind.is_eval_always() {
tcx.dep_graph.with_eval_always_task(dep_node,
tcx,
}
impl<'a, $tcx, 'lcx> TyCtxt<'a, $tcx, 'lcx> {
- /// Return a transparent wrapper for `TyCtxt` which ensures queries
- /// are executed instead of returing their result
+ /// Returns a transparent wrapper for `TyCtxt`, which ensures queries
+ /// are executed instead of just returing their results.
#[inline(always)]
pub fn ensure(self) -> TyCtxtEnsure<'a, $tcx, 'lcx> {
TyCtxtEnsure {
}
}
- /// Return a transparent wrapper for `TyCtxt` which uses
+ /// Returns a transparent wrapper for `TyCtxt` which uses
/// `span` as the location of queries performed through it.
#[inline(always)]
pub fn at(self, span: Span) -> TyCtxtAt<'a, $tcx, 'lcx> {
(tcx: $tcx:tt,
input: ($(([$($modifiers:tt)*] [$($attr:tt)*] [$name:ident]))*)) => {
pub struct Queries<$tcx> {
- /// This provides access to the incr. comp. on-disk cache for query results.
+ /// This provides access to the incrimental comilation on-disk cache for query results.
/// Do not access this directly. It is only meant to be used by
/// `DepGraph::try_mark_green()` and the query infrastructure.
pub(crate) on_disk_cache: OnDiskCache<'tcx>,
///
/// Now, if force_from_dep_node() would always fail, it would be pretty useless.
/// Fortunately, we can use some contextual information that will allow us to
-/// reconstruct query-keys for certain kinds of DepNodes. In particular, we
-/// enforce by construction that the GUID/fingerprint of certain DepNodes is a
-/// valid DefPathHash. Since we also always build a huge table that maps every
-/// DefPathHash in the current codebase to the corresponding DefId, we have
+/// reconstruct query-keys for certain kinds of `DepNode`s. In particular, we
+/// enforce by construction that the GUID/fingerprint of certain `DepNode`s is a
+/// valid `DefPathHash`. Since we also always build a huge table that maps every
+/// `DefPathHash` in the current codebase to the corresponding `DefId`, we have
/// everything we need to re-run the query.
///
/// Take the `mir_validated` query as an example. Like many other queries, it
-/// just has a single parameter: the DefId of the item it will compute the
-/// validated MIR for. Now, when we call `force_from_dep_node()` on a dep-node
-/// with kind `MirValidated`, we know that the GUID/fingerprint of the dep-node
-/// is actually a DefPathHash, and can therefore just look up the corresponding
-/// DefId in `tcx.def_path_hash_to_def_id`.
+/// just has a single parameter: the `DefId` of the item it will compute the
+/// validated MIR for. Now, when we call `force_from_dep_node()` on a `DepNode`
+/// with kind `MirValidated`, we know that the GUID/fingerprint of the `DepNode`
+/// is actually a `DefPathHash`, and can therefore just look up the corresponding
+/// `DefId` in `tcx.def_path_hash_to_def_id`.
///
/// When you implement a new query, it will likely have a corresponding new
-/// DepKind, and you'll have to support it here in `force_from_dep_node()`. As
-/// a rule of thumb, if your query takes a DefId or DefIndex as sole parameter,
+/// `DepKind`, and you'll have to support it here in `force_from_dep_node()`. As
+/// a rule of thumb, if your query takes a `DefId` or `DefIndex` as sole parameter,
/// then `force_from_dep_node()` should not fail for it. Otherwise, you can just
/// add it to the "We don't have enough information to reconstruct..." group in
/// the match below.
/// Returns a static string we can use for printouts.
fn tag(&self) -> &'static str;
- /// Returns true if the value `a` is the "expected" type in the
+ /// Returns `true` if the value `a` is the "expected" type in the
/// relation. Just affects error messages.
fn a_is_expected(&self) -> bool;
/// Steal<Mir<'tcx>>` (to be very specific). Now we can read from this
/// as much as we want (using `borrow()`), but you can also
/// `steal()`. Once you steal, any further attempt to read will panic.
-/// Therefore we know that -- assuming no ICE -- nobody is observing
+/// Therefore, we know that -- assuming no ICE -- nobody is observing
/// the fact that the MIR was updated.
///
/// Obviously, whenever you have a query that yields a `Steal` value,
/// you must treat it with caution, and make sure that you know that
/// -- once the value is stolen -- it will never be read from again.
-///
-/// FIXME(#41710) -- what is the best way to model linear queries?
+//
+// FIXME(#41710): what is the best way to model linear queries?
pub struct Steal<T> {
value: RwLock<Option<T>>
}
/// Named region parameters for functions (a in &'a T)
///
- /// The def-id is needed to distinguish free regions in
+ /// The `DefId` is needed to distinguish free regions in
/// the event of shadowing.
BrNamed(DefId, InternedString),
Bool,
/// The primitive character type; holds a Unicode scalar value
- /// (a non-surrogate code point). Written as `char`.
+ /// (a non-surrogate code point). Written as `char`.
Char,
/// A primitive signed integer type. For example, `i32`.
/// An array with the given length. Written as `[T; n]`.
Array(Ty<'tcx>, &'tcx ty::LazyConst<'tcx>),
- /// The pointee of an array slice. Written as `[T]`.
+ /// The pointee of an array slice. Written as `[T]`.
Slice(Ty<'tcx>),
/// A raw pointer. Written as `*mut T` or `*const T`
/// ```
FnDef(DefId, &'tcx Substs<'tcx>),
- /// A pointer to a function. Written as `fn() -> i32`.
+ /// A pointer to a function. Written as `fn() -> i32`.
///
/// For example the type of `bar` here:
///
/// The never type `!`
Never,
- /// A tuple type. For example, `(i32, bool)`.
+ /// A tuple type. For example, `(i32, bool)`.
Tuple(&'tcx List<Ty<'tcx>>),
- /// The projection of an associated type. For example,
+ /// The projection of an associated type. For example,
/// `<T as Trait<..>>::N`.
Projection(ProjectionTy<'tcx>),
///
/// All right, you say, but why include the type parameters from the
/// original function then? The answer is that codegen may need them
-/// when monomorphizing, and they may not appear in the upvars. A
+/// when monomorphizing, and they may not appear in the upvars. A
/// closure could capture no variables but still make use of some
/// in-scope type parameter with a bound (e.g., if our example above
/// had an extra `U: Default`, and the closure called `U::default()`).
/// ## Generators
///
/// Perhaps surprisingly, `ClosureSubsts` are also used for
-/// generators. In that case, what is written above is only half-true
+/// generators. In that case, what is written above is only half-true
/// -- the set of type parameters is similar, but the role of CK and
-/// CS are different. CK represents the "yield type" and CS
+/// CS are different. CK represents the "yield type" and CS
/// represents the "return type" of the generator.
///
/// It'd be nice to split this struct into ClosureSubsts and
self.split(def_id, tcx).return_ty
}
- /// Return the "generator signature", which consists of its yield
+ /// Returns the "generator signature", which consists of its yield
/// and return types.
///
- /// NB. Some bits of the code prefers to see this wrapped in a
+ /// N.B., some bits of the code prefers to see this wrapped in a
/// binder, but it never contains bound regions. Probably this
/// function should be removed.
pub fn poly_sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> PolyGenSig<'tcx> {
ty::Binder::dummy(self.sig(def_id, tcx))
}
- /// Return the "generator signature", which consists of its yield
+ /// Returns the "generator signature", which consists of its yield
/// and return types.
pub fn sig(self, def_id: DefId, tcx: TyCtxt<'_, '_, '_>) -> GenSig<'tcx> {
ty::GenSig {
#[derive(Debug, Copy, Clone, PartialEq, PartialOrd, Ord, Eq, Hash, RustcEncodable, RustcDecodable)]
pub enum ExistentialPredicate<'tcx> {
- /// e.g., Iterator
+ /// E.g., `Iterator`.
Trait(ExistentialTraitRef<'tcx>),
- /// e.g., Iterator::Item = T
+ /// E.g., `Iterator::Item = T`.
Projection(ExistentialProjection<'tcx>),
- /// e.g., Send
+ /// E.g., `Send`.
AutoTrait(DefId),
}
}
/// A complete reference to a trait. These take numerous guises in syntax,
-/// but perhaps the most recognizable form is in a where clause:
+/// but perhaps the most recognizable form is in a where-clause:
///
/// T: Foo<U>
///
-/// This would be represented by a trait-reference where the def-id is the
-/// def-id for the trait `Foo` and the substs define `T` as parameter 0,
+/// This would be represented by a trait-reference where the `DefId` is the
+/// `DefId` for the trait `Foo` and the substs define `T` as parameter 0,
/// and `U` as parameter 1.
///
/// Trait references also appear in object types like `Foo<U>`, but in
}
}
- /// Object types don't have a self-type specified. Therefore, when
+ /// Object types don't have a self type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
- /// you must give *some* self-type. A common choice is `mk_err()`
+ /// you must give *some* self type. A common choice is `mk_err()`
/// or some placeholder type.
pub fn with_self_ty(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>, self_ty: Ty<'tcx>)
-> ty::TraitRef<'tcx> {
self.skip_binder().def_id
}
- /// Object types don't have a self-type specified. Therefore, when
+ /// Object types don't have a self type specified. Therefore, when
/// we convert the principal trait-ref into a normal trait-ref,
- /// you must give *some* self-type. A common choice is `mk_err()`
+ /// you must give *some* self type. A common choice is `mk_err()`
/// or some placeholder type.
pub fn with_self_ty(&self, tcx: TyCtxt<'_, '_, 'tcx>,
self_ty: Ty<'tcx>)
/// Skips the binder and returns the "bound" value. This is a
/// risky thing to do because it's easy to get confused about
- /// debruijn indices and the like. It is usually better to
+ /// De Bruijn indices and the like. It is usually better to
/// discharge the binder using `no_bound_vars` or
/// `replace_late_bound_regions` or something like
/// that. `skip_binder` is only valid when you are either
///
/// Some examples where `skip_binder` is reasonable:
///
- /// - extracting the def-id from a PolyTraitRef;
+ /// - extracting the `DefId` from a PolyTraitRef;
/// - comparing the self type of a PolyTraitRef to see if it is equal to
/// a type parameter `X`, since the type `X` does not reference any regions
pub fn skip_binder(&self) -> &T {
}
/// Given two things that have the same binder level,
- /// and an operation that wraps on their contents, execute the operation
- /// and then wrap its result.
+ /// and an operation that wraps on their contents, executes the operation
+ /// and then wraps its result.
///
/// `f` should consider bound regions at depth 1 to be free, and
/// anything it produces with bound regions at depth 1 will be
Binder(f(self.0, u.0))
}
- /// Split the contents into two things that share the same binder
+ /// Splits the contents into two things that share the same binder
/// level as the original, returning two distinct binders.
///
/// `f` should consider bound regions at depth 1 to be free, and
/// ## Bound Regions
///
/// These are regions that are stored behind a binder and must be substituted
-/// with some concrete region before being used. There are 2 kind of
-/// bound regions: early-bound, which are bound in an item's Generics,
-/// and are substituted by a Substs, and late-bound, which are part of
-/// higher-ranked types (e.g., `for<'a> fn(&'a ())`) and are substituted by
+/// with some concrete region before being used. There are two kind of
+/// bound regions: early-bound, which are bound in an item's `Generics`,
+/// and are substituted by a `Substs`, and late-bound, which are part of
+/// higher-ranked types (e.g., `for<'a> fn(&'a ())`), and are substituted by
/// the likes of `liberate_late_bound_regions`. The distinction exists
/// because higher-ranked lifetimes aren't supported in all places. See [1][2].
///
-/// Unlike Param-s, bound regions are not supposed to exist "in the wild"
+/// Unlike `Param`s, bound regions are not supposed to exist "in the wild"
/// outside their binder, e.g., in types passed to type inference, and
/// should first be substituted (by placeholder regions, free regions,
/// or region variables).
/// To do this, we replace the bound regions with placeholder markers,
/// which don't satisfy any relation not explicitly provided.
///
-/// There are 2 kinds of placeholder regions in rustc: `ReFree` and
+/// There are two kinds of placeholder regions in rustc: `ReFree` and
/// `RePlaceholder`. When checking an item's body, `ReFree` is supposed
/// to be used. These also support explicit bounds: both the internally-stored
/// *scope*, which the region is assumed to outlive, as well as other
/// Static data that has an "infinite" lifetime. Top in the region lattice.
ReStatic,
- /// A region variable. Should not exist after typeck.
+ /// A region variable. Should not exist after typeck.
ReVar(RegionVid),
/// A placeholder region - basically the higher-ranked version of ReFree.
impl DebruijnIndex {
/// Returns the resulting index when this value is moved into
- /// `amount` number of new binders. So e.g., if you had
+ /// `amount` number of new binders. So, e.g., if you had
///
/// for<'a> fn(&'a x)
///
- /// and you wanted to change to
+ /// and you wanted to change it to
///
/// for<'a> fn(for<'b> fn(&'a x))
///
*self = self.shifted_out(amount);
}
- /// Adjusts any Debruijn Indices so as to make `to_binder` the
+ /// Adjusts any De Bruijn indices so as to make `to_binder` the
/// innermost binder. That is, if we have something bound at `to_binder`,
/// it will now be bound at INNERMOST. This is an appropriate thing to do
/// when moving a region out from inside binders:
/// // Binder: D3 D2 D1 ^^
/// ```
///
- /// Here, the region `'a` would have the debruijn index D3,
+ /// Here, the region `'a` would have the De Bruijn index D3,
/// because it is the bound 3 binders out. However, if we wanted
/// to refer to that region `'a` in the second argument (the `_`),
/// those two binders would not be in scope. In that case, we
/// might invoke `shift_out_to_binder(D3)`. This would adjust the
- /// debruijn index of `'a` to D1 (the innermost binder).
+ /// De Bruijn index of `'a` to D1 (the innermost binder).
///
/// If we invoke `shift_out_to_binder` and the region is in fact
/// bound by one of the binders we are shifting out of, that is an
}
}
- /// Adjusts any Debruijn Indices so as to make `to_binder` the
+ /// Adjusts any De Bruijn indices so as to make `to_binder` the
/// innermost binder. That is, if we have something bound at `to_binder`,
/// it will now be bound at INNERMOST. This is an appropriate thing to do
/// when moving a region out from inside binders:
/// // Binder: D3 D2 D1 ^^
/// ```
///
- /// Here, the region `'a` would have the debruijn index D3,
+ /// Here, the region `'a` would have the De Bruijn index D3,
/// because it is the bound 3 binders out. However, if we wanted
/// to refer to that region `'a` in the second argument (the `_`),
/// those two binders would not be in scope. In that case, we
/// might invoke `shift_out_to_binder(D3)`. This would adjust the
- /// debruijn index of `'a` to D1 (the innermost binder).
+ /// De Bruijn index of `'a` to D1 (the innermost binder).
///
/// If we invoke `shift_out_to_binder` and the region is in fact
/// bound by one of the binders we are shifting out of, that is an
flags
}
- /// Given an early-bound or free region, returns the def-id where it was bound.
+ /// Given an early-bound or free region, returns the `DefId` where it was bound.
/// For example, consider the regions in this snippet of code:
///
/// ```
/// }
/// ```
///
- /// Here, `free_region_binding_scope('a)` would return the def-id
+ /// Here, `free_region_binding_scope('a)` would return the `DefId`
/// of the impl, and for all the other highlighted regions, it
- /// would return the def-id of the function. In other cases (not shown), this
- /// function might return the def-id of a closure.
+ /// would return the `DefId` of the function. In other cases (not shown), this
+ /// function might return the `DefId` of a closure.
pub fn free_region_binding_scope(&self, tcx: TyCtxt<'_, '_, '_>) -> DefId {
match self {
ty::ReEarlyBound(br) => {
}
}
- /// Returns true if this type is a floating point type and false otherwise.
+ /// Returns `true` if this type is a floating point type.
pub fn is_floating_point(&self) -> bool {
match self.sty {
Float(_) |
/// Creates a `Substs` that maps each generic parameter to a higher-ranked
/// var bound at index `0`. For types, we use a `BoundVar` index equal to
/// the type parameter index. For regions, we use the `BoundRegion::BrNamed`
- /// variant (which has a def-id).
+ /// variant (which has a `DefId`).
pub fn bound_vars_for_item(
tcx: TyCtxt<'a, 'gcx, 'tcx>,
def_id: DefId
self.shift_vars_through_binders(ty)
}
- /// It is sometimes necessary to adjust the debruijn indices during substitution. This occurs
+ /// It is sometimes necessary to adjust the De Bruijn indices during substitution. This occurs
/// when we are substituting a type with escaping bound vars into a context where we have
/// passed through binders. That's quite a mouthful. Let's see an example:
///
///
/// Here the `'a` lifetime is bound in the outer function, but appears as an argument of the
/// inner one. Therefore, that appearance will have a DebruijnIndex of 2, because we must skip
- /// over the inner binder (remember that we count Debruijn indices from 1). However, in the
+ /// over the inner binder (remember that we count De Bruijn indices from 1). However, in the
/// definition of `MetaFunc`, the binder is not visible, so the type `&'a int` will have a
- /// debruijn index of 1. It's only during the substitution that we can see we must increase the
+ /// De Bruijn index of 1. It's only during the substitution that we can see we must increase the
/// depth by 1 to account for the binder that we passed through.
///
/// As a second example, consider this twist:
/// DebruijnIndex of 2
///
/// As indicated in the diagram, here the same type `&'a int` is substituted once, but in the
- /// first case we do not increase the Debruijn index and in the second case we do. The reason
+ /// first case we do not increase the De Bruijn index and in the second case we do. The reason
/// is that only in the second case have we passed through a fn binder.
fn shift_vars_through_binders(&self, ty: Ty<'tcx>) -> Ty<'tcx> {
debug!("shift_vars(ty={:?}, binders_passed={:?}, has_escaping_bound_vars={:?})",
/// The substitutions for the item as given by the user.
pub substs: &'tcx Substs<'tcx>,
- /// The self-type, in the case of a `<T>::Item` path (when applied
+ /// The self type, in the case of a `<T>::Item` path (when applied
/// to an inherent impl). See `UserSelfTy` below.
pub user_self_ty: Option<UserSelfTy<'tcx>>,
}
}
}
-/// Specifies the user-given self-type. In the case of a path that
-/// refers to a member in an inherent impl, this self-type is
+/// Specifies the user-given self type. In the case of a path that
+/// refers to a member in an inherent impl, this self type is
/// sometimes needed to constrain the type parameters on the impl. For
/// example, in this code:
///
/// ```
///
/// when you then have a path like `<Foo<&'static u32>>::method`,
-/// this struct would carry the def-id of the impl along with the
-/// self-type `Foo<u32>`. Then we can instantiate the parameters of
+/// this struct would carry the `DefId` of the impl along with the
+/// self type `Foo<u32>`. Then we can instantiate the parameters of
/// the impl (with the substs from `UserSubsts`) and apply those to
-/// the self-type, giving `Foo<?A>`. Finally, we unify that with
-/// the self-type here, which contains `?A` to be `&'static u32`
+/// the self type, giving `Foo<?A>`. Finally, we unify that with
+/// the self type here, which contains `?A` to be `&'static u32`
#[derive(Copy, Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable)]
pub struct UserSelfTy<'tcx> {
pub impl_def_id: DefId,
#[derive(Default)]
pub struct TraitImpls {
blanket_impls: Vec<DefId>,
- /// Impls indexed by their simplified self-type, for fast lookup.
+ /// Impls indexed by their simplified self type, for fast lookup.
non_blanket_impls: FxHashMap<fast_reject::SimplifiedType, Vec<DefId>>,
}
}
/// Iterate over every impl that could possibly match the
- /// self-type `self_ty`.
+ /// self type `self_ty`.
pub fn for_each_relevant_impl<F: FnMut(DefId)>(self,
def_id: DefId,
self_ty: Ty<'tcx>,
}
}
- /// Return a vector containing all impls
+ /// Returns a vector containing all impls
pub fn all_impls(self, def_id: DefId) -> Vec<DefId> {
let impls = self.trait_impls_of(def_id);
-//! misc. type-system utilities too small to deserve their own file
+//! Miscellaneous type-system utilities that are too small to deserve their own modules.
use crate::hir::def::Def;
use crate::hir::def_id::DefId;
#[derive(Copy, Clone, Debug)]
pub struct Discr<'tcx> {
- /// bit representation of the discriminant, so `-128i8` is `0xFF_u128`
+ /// Bit representation of the discriminant (e.g., `-128i8` is `0xFF_u128`).
pub val: u128,
pub ty: Ty<'tcx>
}
}
impl<'tcx> Discr<'tcx> {
- /// Adds 1 to the value and wraps around if the maximum for the type is reached
+ /// Adds `1` to the value and wraps around if the maximum for the type is reached.
pub fn wrap_incr<'a, 'gcx>(self, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Self {
self.checked_add(tcx, 1).0
}
///
/// Requires that trait definitions have been processed so that we can
/// elaborate predicates and walk supertraits.
- ///
- /// FIXME callers may only have a &[Predicate], not a Vec, so that's
- /// what this code should accept.
+ //
+ // FIXME: callers may only have a `&[Predicate]`, not a `Vec`, so that's
+ // what this code should accept.
pub fn required_region_bounds(self,
erased_self_ty: Ty<'tcx>,
predicates: Vec<ty::Predicate<'tcx>>)
Some(ty::Destructor { did: dtor_did? })
}
- /// Return the set of types that are required to be alive in
+ /// Returns the set of types that are required to be alive in
/// order to run the destructor of `def` (see RFCs 769 and
/// 1238).
///
result
}
- /// True if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
- /// that closures have a def-id, but the closure *expression* also
+ /// Returns `true` if `def_id` refers to a closure (e.g., `|x| x * 2`). Note
+ /// that closures have a `DefId`, but the closure *expression* also
/// has a `HirId` that is located within the context where the
/// closure appears (and, sadly, a corresponding `NodeId`, since
/// those are not yet phased out). The parent of the closure's
- /// def-id will also be the context where it appears.
+ /// `DefId` will also be the context where it appears.
pub fn is_closure(self, def_id: DefId) -> bool {
self.def_key(def_id).disambiguated_data.data == DefPathData::ClosureExpr
}
- /// True if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
+ /// Returns `true` if `def_id` refers to a trait (i.e., `trait Foo { ... }`).
pub fn is_trait(self, def_id: DefId) -> bool {
if let DefPathData::Trait(_) = self.def_key(def_id).disambiguated_data.data {
true
}
}
- /// True if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`).
+ /// Returns `true` if `def_id` refers to a trait alias (i.e., `trait Foo = ...;`),
+ /// and `false` otherwise.
pub fn is_trait_alias(self, def_id: DefId) -> bool {
if let DefPathData::TraitAlias(_) = self.def_key(def_id).disambiguated_data.data {
true
}
}
- /// True if this def-id refers to the implicit constructor for
- /// a tuple struct like `struct Foo(u32)`.
+ /// Returns `true` if this `DefId` refers to the implicit constructor for
+ /// a tuple struct like `struct Foo(u32)`, and `false` otherwise.
pub fn is_struct_constructor(self, def_id: DefId) -> bool {
self.def_key(def_id).disambiguated_data.data == DefPathData::StructCtor
}
/// Given the `DefId` of a fn or closure, returns the `DefId` of
/// the innermost fn item that the closure is contained within.
- /// This is a significant def-id because, when we do
+ /// This is a significant `DefId` because, when we do
/// type-checking, we type-check this fn item and all of its
- /// (transitive) closures together. Therefore, when we fetch the
+ /// (transitive) closures together. Therefore, when we fetch the
/// `typeck_tables_of` the closure, for example, we really wind up
/// fetching the `typeck_tables_of` the enclosing fn item.
pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
def_id
}
- /// Given the def-id and substs a closure, creates the type of
+ /// Given the `DefId` and substs a closure, creates the type of
/// `self` argument that the closure expects. For example, for a
/// `Fn` closure, this would return a reference type `&T` where
- /// `T=closure_ty`.
+ /// `T = closure_ty`.
///
/// Returns `None` if this closure's kind has not yet been inferred.
/// This should only be possible during type checking.
Some(ty::Binder::bind(env_ty))
}
- /// Given the def-id of some item that has no type parameters, make
+ /// Given the `DefId` of some item that has no type parameters, make
/// a suitable "empty substs" for it.
pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> &'tcx Substs<'tcx> {
Substs::for_item(self, item_def_id, |param, _| {
})
}
- /// Return whether the node pointed to by def_id is a static item, and its mutability
+ /// Returns `true` if the node pointed to by `def_id` is a static item, and its mutability.
pub fn is_static(&self, def_id: DefId) -> Option<hir::Mutability> {
if let Some(node) = self.hir().get_if_local(def_id) {
match node {
/// Checks whether values of this type `T` implement the `Freeze`
/// trait -- frozen types are those that do not contain a
- /// `UnsafeCell` anywhere. This is a language concept used to
+ /// `UnsafeCell` anywhere. This is a language concept used to
/// distinguish "true immutability", which is relevant to
/// optimization as well as the rules around static values. Note
/// that the `Freeze` trait is not exposed to end users and is
+use crate::hir;
use crate::hir::def_id::DefId;
use crate::infer::InferCtxt;
use crate::ty::subst::Substs;
use crate::traits;
use crate::ty::{self, ToPredicate, Ty, TyCtxt, TypeFoldable};
use std::iter::once;
-use syntax::ast;
use syntax_pos::Span;
use crate::middle::lang_items;
/// say "$0 is WF if $0 is WF".
pub fn obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
ty: Ty<'tcx>,
span: Span)
-> Option<Vec<traits::PredicateObligation<'tcx>>>
/// if `Bar: Eq`.
pub fn trait_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
trait_ref: &ty::TraitRef<'tcx>,
span: Span)
-> Vec<traits::PredicateObligation<'tcx>>
pub fn predicate_obligations<'a, 'gcx, 'tcx>(infcx: &InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
predicate: &ty::Predicate<'tcx>,
span: Span)
-> Vec<traits::PredicateObligation<'tcx>>
struct WfPredicates<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
span: Span,
out: Vec<traits::PredicateObligation<'tcx>>,
}
}
}
- /// Push new obligations into `out`. Returns true if it was able
+ /// Pushes new obligations into `out`. Returns `true` if it was able
/// to generate all the predicates needed to validate that `ty0`
/// is WF. Returns false if `ty0` is an unresolved type variable,
/// in which case we are not able to simplify at all.
}
}
-/// Given an object type like `SomeTrait+Send`, computes the lifetime
+/// Given an object type like `SomeTrait + Send`, computes the lifetime
/// bounds that must hold on the elided self type. These are derived
/// from the declarations of `SomeTrait`, `Send`, and friends -- if
/// they declare `trait SomeTrait : 'static`, for example, then
/// Parameters to the `Dump` variant of type `ProfileQueriesMsg`.
#[derive(Clone,Debug)]
pub struct ProfQDumpParams {
- /// A base path for the files we will dump
+ /// A base path for the files we will dump.
pub path:String,
- /// To ensure that the compiler waits for us to finish our dumps
+ /// To ensure that the compiler waits for us to finish our dumps.
pub ack:Sender<()>,
- /// toggle dumping a log file with every `ProfileQueriesMsg`
+ /// Toggle dumping a log file with every `ProfileQueriesMsg`.
pub dump_profq_msg_log:bool,
}
TIME_DEPTH.with(|slot| slot.get())
}
-/// Set the current depth of `time()` calls. The idea is to call
+/// Sets the current depth of `time()` calls. The idea is to call
/// `set_time_depth()` with the result from `time_depth()` in the
/// parent thread.
pub fn set_time_depth(depth: usize) {
-//! An efficient hash map for node IDs
+//! An efficient hash map for `NodeId`s.
use crate::hir::def_id::DefId;
use crate::hir::{HirId, ItemLocalId};
/// The "region highlights" are used to control region printing during
/// specific error messages. When a "region highlight" is enabled, it
/// gives an alternate way to print specific regions. For now, we
-/// always print those regions using a number, so something like `'0`.
+/// always print those regions using a number, so something like "`'0`".
///
/// Regions not selected by the region highlight mode are presently
/// unaffected.
#[derive(Copy, Clone, Default)]
pub struct RegionHighlightMode {
- /// If enabled, when we see the selected region, use `"'N"`
+ /// If enabled, when we see the selected region, use "`'N`"
/// instead of the ordinary behavior.
highlight_regions: [Option<(ty::RegionKind, usize)>; 3],
/// If enabled, when printing a "free region" that originated from
- /// the given `ty::BoundRegion`, print it as `'1`. Free regions that would ordinarily
+ /// the given `ty::BoundRegion`, print it as "`'1`". Free regions that would ordinarily
/// have names print as normal.
///
/// This is used when you have a signature like `fn foo(x: &u32,
}
impl RegionHighlightMode {
- /// Read and return current region highlight settings (accesses thread-local state).a
+ /// Reads and returns the current region highlight settings (accesses thread-local state).
pub fn get() -> Self {
REGION_HIGHLIGHT_MODE.with(|c| c.get())
}
- /// Internal helper to update current settings during the execution of `op`.
+ // Internal helper to update current settings during the execution of `op`.
fn set<R>(
old_mode: Self,
new_mode: Self,
})
}
- /// If `region` and `number` are both `Some`, invoke
- /// `highlighting_region`. Otherwise, just invoke `op` directly.
+ /// If `region` and `number` are both `Some`, invokes
+ /// `highlighting_region`; otherwise, just invokes `op` directly.
pub fn maybe_highlighting_region<R>(
region: Option<ty::Region<'_>>,
number: Option<usize>,
op()
}
- /// During the execution of `op`, highlight the region inference
- /// vairable `vid` as `'N`. We can only highlight one region vid
+ /// During the execution of `op`, highlights the region inference
+ /// variable `vid` as `'N`. We can only highlight one region `vid`
/// at a time.
pub fn highlighting_region<R>(
region: ty::Region<'_>,
Self::set(old_mode, new_mode, op)
}
- /// Convenience wrapper for `highlighting_region`
+ /// Convenience wrapper for `highlighting_region`.
pub fn highlighting_region_vid<R>(
vid: ty::RegionVid,
number: usize,
Self::highlighting_region(&ty::ReVar(vid), number, op)
}
- /// Returns true if any placeholders are highlighted.
+ /// Returns `true` if any placeholders are highlighted, and `false` otherwise.
fn any_region_vids_highlighted(&self) -> bool {
Self::get()
.highlight_regions
})
}
- /// Returns `Some(n)` with the number to use for the given region,
- /// if any.
+ /// Returns `Some(n)` with the number to use for the given region, if any.
fn region_highlighted(&self, region: ty::Region<'_>) -> Option<usize> {
Self::get()
.highlight_regions
}
/// During the execution of `op`, highlight the given bound
- /// region. We can only highlight one bound region at a time. See
+ /// region. We can only highlight one bound region at a time. See
/// the field `highlight_bound_region` for more detailed notes.
pub fn highlighting_bound_region<R>(
br: ty::BoundRegion,
)
}
- /// Returns true if any placeholders are highlighted.
+ /// Returns `true` if any placeholders are highlighted, and `false` otherwise.
pub fn any_placeholders_highlighted(&self) -> bool {
Self::get()
.highlight_regions
})
}
- /// Returns `Some(N)` if the placeholder `p` is highlighted to print as `'N`.
+ /// Returns `Some(N)` if the placeholder `p` is highlighted to print as "`'N`".
pub fn placeholder_highlight(&self, p: ty::PlaceholderRegion) -> Option<usize> {
self.region_highlighted(&ty::RePlaceholder(p))
}
GenericActivityEnd { category: ProfileCategory, time: Instant },
QueryCacheHit { query_name: &'static str, category: ProfileCategory },
QueryCount { query_name: &'static str, category: ProfileCategory, count: usize },
+ IncrementalLoadResultStart { query_name: &'static str, time: Instant },
+ IncrementalLoadResultEnd { query_name: &'static str, time: Instant },
+ QueryBlockedStart { query_name: &'static str, category: ProfileCategory, time: Instant },
+ QueryBlockedEnd { query_name: &'static str, category: ProfileCategory, time: Instant },
}
impl ProfilerEvent {
use self::ProfilerEvent::*;
match self {
- QueryStart { .. } | GenericActivityStart { .. } => true,
- QueryEnd { .. } | GenericActivityEnd { .. } |
- QueryCacheHit { .. } | QueryCount { .. } => false,
+ QueryStart { .. } |
+ GenericActivityStart { .. } |
+ IncrementalLoadResultStart { .. } |
+ QueryBlockedStart { .. } => true,
+
+ QueryEnd { .. } |
+ GenericActivityEnd { .. } |
+ QueryCacheHit { .. } |
+ QueryCount { .. } |
+ IncrementalLoadResultEnd { .. } |
+ QueryBlockedEnd { .. } => false,
}
}
}
}
fn total_time(&self) -> u64 {
- let mut total = 0;
- for (_, time) in &self.query_times {
- total += time;
- }
-
- total
+ self.query_times.iter().map(|(_, time)| time).sum()
}
fn total_cache_data(&self) -> (u64, u64) {
}
fn total_time(&self) -> u64 {
- let mut total = 0;
-
- for (_, data) in &self.categories {
- total += data.total_time();
- }
-
- total
+ self.categories.iter().map(|(_, data)| data.total_time()).sum()
}
fn with_options(mut self, opts: &Options) -> CalculatedResults {
})
}
+ #[inline]
+ pub fn incremental_load_result_start(&mut self, query_name: &'static str) {
+ self.record(ProfilerEvent::IncrementalLoadResultStart {
+ query_name,
+ time: Instant::now(),
+ })
+ }
+
+ #[inline]
+ pub fn incremental_load_result_end(&mut self, query_name: &'static str) {
+ self.record(ProfilerEvent::IncrementalLoadResultEnd {
+ query_name,
+ time: Instant::now(),
+ })
+ }
+
+ #[inline]
+ pub fn query_blocked_start(&mut self, query_name: &'static str, category: ProfileCategory) {
+ self.record(ProfilerEvent::QueryBlockedStart {
+ query_name,
+ category,
+ time: Instant::now(),
+ })
+ }
+
+ #[inline]
+ pub fn query_blocked_end(&mut self, query_name: &'static str, category: ProfileCategory) {
+ self.record(ProfilerEvent::QueryBlockedEnd {
+ query_name,
+ category,
+ time: Instant::now(),
+ })
+ }
+
#[inline]
fn record(&mut self, event: ProfilerEvent) {
let thread_id = std::thread::current().id();
result_data.query_cache_stats.entry(query_name).or_insert((0, 0));
*totals += *count as u64;
},
+ //we don't summarize incremental load result events in the simple output mode
+ IncrementalLoadResultStart { .. } | IncrementalLoadResultEnd { .. } => { },
+ //we don't summarize parallel query blocking in the simple output mode
+ QueryBlockedStart { .. } | QueryBlockedEnd { .. } => { },
}
}
.unwrap();
let mut categories: Vec<_> = results.categories.iter().collect();
- categories.sort_by(|(_, data1), (_, data2)| data2.total_time().cmp(&data1.total_time()));
+ categories.sort_by_cached_key(|(_, d)| d.total_time());
- for (category, data) in categories {
+ for (category, data) in categories.iter().rev() {
let (category_hits, category_total) = data.total_cache_data();
let category_hit_percent = calculate_percent(category_hits, category_total);
/// exponent = all 1's, integer bit 0, significand 0 ("pseudoinfinity")
/// exponent = all 1's, integer bit 0, significand nonzero ("pseudoNaN")
/// exponent = 0, integer bit 1 ("pseudodenormal")
- /// exponent!=0 nor all 1's, integer bit 0 ("unnormal")
+ /// exponent != 0 nor all 1's, integer bit 0 ("unnormal")
/// At the moment, the first two are treated as NaNs, the second two as Normal.
fn from_bits(bits: u128) -> IeeeFloat<Self> {
let sign = bits & (1 << (Self::BITS - 1));
}
}
- /// Returns TRUE if, when truncating the current number, with BIT the
+ /// Returns `true` if, when truncating the current number, with `bit` the
/// new LSB, with the given lost fraction and rounding mode, the result
/// would need to be rounded away from zero (i.e., by increasing the
- /// signficand). This routine must work for Category::Zero of both signs, and
- /// Category::Normal numbers.
+ /// signficand). This routine must work for `Category::Zero` of both signs, and
+ /// `Category::Normal` numbers.
fn round_away_from_zero(&self, round: Round, loss: Loss, bit: usize) -> bool {
// NaNs and infinities should not have lost fractions.
assert!(self.is_finite_non_zero() || self.is_zero());
more_significant
}
- /// Return the fraction lost were a bignum truncated losing the least
+ /// Returns the fraction lost were a bignum truncated losing the least
/// significant `bits` bits.
fn through_truncation(limbs: &[Limb], bits: usize) -> Loss {
if bits == 0 {
Ordering::Equal
}
- /// Extract the given bit.
+ /// Extracts the given bit.
pub(super) fn get_bit(limbs: &[Limb], bit: usize) -> bool {
limbs[bit / LIMB_BITS] & (1 << (bit % LIMB_BITS)) != 0
}
- /// Set the given bit.
+ /// Sets the given bit.
pub(super) fn set_bit(limbs: &mut [Limb], bit: usize) {
limbs[bit / LIMB_BITS] |= 1 << (bit % LIMB_BITS);
}
limbs[bit / LIMB_BITS] &= !(1 << (bit % LIMB_BITS));
}
- /// Shift `dst` left `bits` bits, subtract `bits` from its exponent.
+ /// Shifts `dst` left `bits` bits, subtract `bits` from its exponent.
pub(super) fn shift_left(dst: &mut [Limb], exp: &mut ExpInt, bits: usize) {
if bits > 0 {
// Our exponent should not underflow.
}
}
- /// Shift `dst` right `bits` bits noting lost fraction.
+ /// Shifts `dst` right `bits` bits noting lost fraction.
pub(super) fn shift_right(dst: &mut [Limb], exp: &mut ExpInt, bits: usize) -> Loss {
let loss = Loss::through_truncation(dst, bits);
loss
}
- /// Copy the bit vector of width `src_bits` from `src`, starting at bit SRC_LSB,
+ /// Copies the bit vector of width `src_bits` from `src`, starting at bit SRC_LSB,
/// to `dst`, such that the bit SRC_LSB becomes the least significant bit of `dst`.
/// All high bits above `src_bits` in `dst` are zero-filled.
pub(super) fn extract(dst: &mut [Limb], src: &[Limb], src_bits: usize, src_lsb: usize) {
fn from_str_r(s: &str, round: Round) -> Result<StatusAnd<Self>, ParseError>;
fn to_bits(self) -> u128;
- /// Convert a floating point number to an integer according to the
+ /// Converts a floating point number to an integer according to the
/// rounding mode. In case of an invalid operation exception,
/// deterministic values are returned, namely zero for NaNs and the
/// minimal or maximal value respectively for underflow or overflow.
///
/// The *is_exact output tells whether the result is exact, in the sense
/// that converting it back to the original floating point type produces
- /// the original value. This is almost equivalent to result==Status::OK,
+ /// the original value. This is almost equivalent to `result == Status::OK`,
/// except for negative zeroes.
fn to_i128_r(self, width: usize, round: Round, is_exact: &mut bool) -> StatusAnd<i128> {
let status;
}
}
- /// IEEE-754R isSignMinus: Returns true if and only if the current value is
+ /// IEEE-754R isSignMinus: Returns whether the current value is
/// negative.
///
/// This applies to zeros and NaNs as well.
fn is_negative(self) -> bool;
- /// IEEE-754R isNormal: Returns true if and only if the current value is normal.
+ /// IEEE-754R isNormal: Returns whether the current value is normal.
///
/// This implies that the current value of the float is not zero, subnormal,
/// infinite, or NaN following the definition of normality from IEEE-754R.
!self.is_denormal() && self.is_finite_non_zero()
}
- /// Returns true if and only if the current value is zero, subnormal, or
+ /// Returns `true` if the current value is zero, subnormal, or
/// normal.
///
/// This means that the value is not infinite or NaN.
!self.is_nan() && !self.is_infinite()
}
- /// Returns true if and only if the float is plus or minus zero.
+ /// Returns `true` if the float is plus or minus zero.
fn is_zero(self) -> bool {
self.category() == Category::Zero
}
- /// IEEE-754R isSubnormal(): Returns true if and only if the float is a
+ /// IEEE-754R isSubnormal(): Returns whether the float is a
/// denormal.
fn is_denormal(self) -> bool;
- /// IEEE-754R isInfinite(): Returns true if and only if the float is infinity.
+ /// IEEE-754R isInfinite(): Returns whether the float is infinity.
fn is_infinite(self) -> bool {
self.category() == Category::Infinity
}
- /// Returns true if and only if the float is a quiet or signaling NaN.
+ /// Returns `true` if the float is a quiet or signaling NaN.
fn is_nan(self) -> bool {
self.category() == Category::NaN
}
- /// Returns true if and only if the float is a signaling NaN.
+ /// Returns `true` if the float is a signaling NaN.
fn is_signaling(self) -> bool;
// Simple Queries
self.is_zero() && self.is_negative()
}
- /// Returns true if and only if the number has the smallest possible non-zero
+ /// Returns `true` if the number has the smallest possible non-zero
/// magnitude in the current semantics.
fn is_smallest(self) -> bool {
Self::SMALLEST.copy_sign(self).bitwise_eq(self)
}
- /// Returns true if and only if the number has the largest possible finite
+ /// Returns `true` if the number has the largest possible finite
/// magnitude in the current semantics.
fn is_largest(self) -> bool {
Self::largest().copy_sign(self).bitwise_eq(self)
}
- /// Returns true if and only if the number is an exact integer.
+ /// Returns `true` if the number is an exact integer.
fn is_integer(self) -> bool {
// This could be made more efficient; I'm going for obviously correct.
if !self.is_finite() {
}
pub trait FloatConvert<T: Float>: Float {
- /// Convert a value of one floating point type to another.
+ /// Converts a value of one floating point type to another.
/// The return value corresponds to the IEEE754 exceptions. *loses_info
/// records whether the transformation lost information, i.e., whether
/// converting the result back to the original type will produce the
- /// original value (this is almost the same as return value==Status::OK,
+ /// original value (this is almost the same as return `value == Status::OK`,
/// but there are edge cases where this is not so).
fn convert_r(self, round: Round, loses_info: &mut bool) -> StatusAnd<T>;
fn convert(self, loses_info: &mut bool) -> StatusAnd<T> {
{
//! Iterates over each loan that has been issued
//! on entrance to `node`, regardless of whether it is
- //! actually *in scope* at that point. Sometimes loans
+ //! actually *in scope* at that point. Sometimes loans
//! are issued for future scopes and thus they may have been
//! *issued* but not yet be in effect.
impl<'a, 'tcx> GuaranteeLifetimeContext<'a, 'tcx> {
fn check(&self, cmt: &mc::cmt_<'tcx>, discr_scope: Option<ast::NodeId>) -> R {
//! Main routine. Walks down `cmt` until we find the
- //! "guarantor". Reports an error if `self.loan_region` is
+ //! "guarantor". Reports an error if `self.loan_region` is
//! larger than scope of `cmt`.
debug!("guarantee_lifetime.check(cmt={:?}, loan_region={:?})",
cmt,
fn decl_without_init(&mut self, id: ast::NodeId, _span: Span) {
let ty = self.bccx
.tables
- .node_id_to_type(self.bccx.tcx.hir().node_to_hir_id(id));
+ .node_type(self.bccx.tcx.hir().node_to_hir_id(id));
gather_moves::gather_decl(self.bccx, &self.move_data, id, ty);
}
}
}
/// Guarantees that `addr_of(cmt)` will be valid for the duration of `static_scope_r`, or
- /// reports an error. This may entail taking out loans, which will be added to the
+ /// reports an error. This may entail taking out loans, which will be added to the
/// `req_loan_map`.
fn guarantee_valid(&mut self,
borrow_id: hir::ItemLocalId,
/// gen_scope indicates where loan is introduced. Typically the
/// loan is introduced at the point of the borrow, but in some
/// cases, notably method arguments, the loan may be introduced
- /// only later, once it comes into scope. See also
+ /// only later, once it comes into scope. See also
/// `GatherLoanCtxt::compute_gen_scope`.
gen_scope: region::Scope,
- /// kill_scope indicates when the loan goes out of scope. This is
+ /// kill_scope indicates when the loan goes out of scope. This is
/// either when the lifetime expires or when the local variable
/// which roots the loan-path goes out of scope, whichever happens
/// faster. See also `GatherLoanCtxt::compute_kill_scope`.
match tcx.hir().get(closure_id) {
Node::Expr(expr) => match expr.node {
hir::ExprKind::Closure(.., body_id, _, _) => {
- body_id.node_id
+ tcx.hir().hir_to_node_id(body_id.hir_id)
}
_ => {
bug!("encountered non-closure id: {}", closure_id)
/// Path being moved.
pub path: MovePathIndex,
- /// id of node that is doing the move.
+ /// ID of node that is doing the move.
pub id: hir::ItemLocalId,
/// Kind of move, for error messages.
/// Path being assigned.
pub path: MovePathIndex,
- /// id where assignment occurs
+ /// ID where assignment occurs
pub id: hir::ItemLocalId,
/// span of node where assignment occurs
}
impl<'a, 'tcx> MoveData<'tcx> {
- /// return true if there are no trackable assignments or moves
- /// in this move data - that means that there is nothing that
+ /// Returns `true` if there are no trackable assignments or moves
+ /// in this move data -- that means that there is nothing that
/// could cause a borrow error.
pub fn is_empty(&self) -> bool {
self.moves.borrow().is_empty() &&
return index;
- /// Add mappings from the ast nodes for the formal bindings to
+ /// Adds mappings from the ast nodes for the formal bindings to
/// the entry-node in the graph.
fn add_entries_from_fn_body(index: &mut FxHashMap<hir::ItemLocalId, Vec<CFGIndex>>,
body: &hir::Body,
}
impl ArgTypeExt<'ll, 'tcx> for ArgType<'tcx, Ty<'tcx>> {
- /// Get the LLVM type for a place of the original Rust type of
+ /// Gets the LLVM type for a place of the original Rust type of
/// this argument/return, i.e., the result of `type_of::type_of`.
fn memory_ty(&self, cx: &CodegenCx<'ll, 'tcx>) -> &'ll Type {
self.layout.llvm_type(cx)
}
- /// Store a direct/indirect value described by this ArgType into a
+ /// Stores a direct/indirect value described by this ArgType into a
/// place for the original Rust type of this argument/return.
/// Can be used for both storing formal arguments into Rust variables
/// or results of call/invoke instructions into their destinations.
}
impl<'a> ArchiveBuilder<'a> {
- /// Create a new static archive, ready for modifying the archive specified
+ /// Creates a new static archive, ready for modifying the archive specified
/// by `config`.
pub fn new(config: ArchiveConfig<'a>) -> ArchiveBuilder<'a> {
ArchiveBuilder {
out_filename, check_file_is_writeable};
-/// Perform the linkage portion of the compilation phase. This will generate all
+/// Performs the linkage portion of the compilation phase. This will generate all
/// of the requested outputs for this compilation session.
pub(crate) fn link_binary(sess: &Session,
codegen_results: &CodegenResults,
codegen_results: &CodegenResults) {
// Linker plugins should be specified early in the list of arguments
- cmd.cross_lang_lto();
+ cmd.linker_plugin_lto();
// The default library location, we need this to find the runtime.
// The location of crates will be determined as needed.
Lto::Thin => {
// If we defer LTO to the linker, we haven't run LTO ourselves, so
// any upstream object files have not been copied yet.
- !sess.opts.debugging_opts.cross_lang_lto.enabled()
+ !sess.opts.cg.linker_plugin_lto.enabled()
}
Lto::No |
Lto::ThinLocal => false,
use back::bytecode::{DecodedBytecode, RLIB_BYTECODE_EXTENSION};
use rustc_codegen_ssa::back::symbol_export;
-use rustc_codegen_ssa::back::write::{ModuleConfig, CodegenContext, pre_lto_bitcode_filename};
+use rustc_codegen_ssa::back::write::{ModuleConfig, CodegenContext, FatLTOInput};
use rustc_codegen_ssa::back::lto::{SerializedModule, LtoModuleCodegen, ThinShared, ThinModule};
use rustc_codegen_ssa::traits::*;
use back::write::{self, DiagnosticHandlers, with_llvm_pmb, save_temp_bitcode, to_llvm_opt_settings};
use libc;
use std::ffi::{CStr, CString};
-use std::fs;
use std::ptr;
use std::slice;
use std::sync::Arc;
/// Performs fat LTO by merging all modules into a single one and returning it
/// for further optimization.
pub(crate) fn run_fat(cgcx: &CodegenContext<LlvmCodegenBackend>,
- modules: Vec<ModuleCodegen<ModuleLlvm>>,
+ modules: Vec<FatLTOInput<LlvmCodegenBackend>>,
+ cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
timeline: &mut Timeline)
-> Result<LtoModuleCodegen<LlvmCodegenBackend>, FatalError>
{
let symbol_white_list = symbol_white_list.iter()
.map(|c| c.as_ptr())
.collect::<Vec<_>>();
- fat_lto(cgcx, &diag_handler, modules, upstream_modules, &symbol_white_list, timeline)
+ fat_lto(
+ cgcx,
+ &diag_handler,
+ modules,
+ cached_modules,
+ upstream_modules,
+ &symbol_white_list,
+ timeline,
+ )
}
/// Performs thin LTO by performing necessary global analysis and returning two
let symbol_white_list = symbol_white_list.iter()
.map(|c| c.as_ptr())
.collect::<Vec<_>>();
- if cgcx.opts.debugging_opts.cross_lang_lto.enabled() {
+ if cgcx.opts.cg.linker_plugin_lto.enabled() {
unreachable!("We should never reach this case if the LTO step \
is deferred to the linker");
}
}
pub(crate) fn prepare_thin(
- cgcx: &CodegenContext<LlvmCodegenBackend>,
module: ModuleCodegen<ModuleLlvm>
) -> (String, ThinBuffer) {
let name = module.name.clone();
let buffer = ThinBuffer::new(module.module_llvm.llmod());
-
- // We emit the module after having serialized it into a ThinBuffer
- // because only then it will contain the ThinLTO module summary.
- if let Some(ref incr_comp_session_dir) = cgcx.incr_comp_session_dir {
- if cgcx.config(module.kind).emit_pre_thin_lto_bc {
- let path = incr_comp_session_dir
- .join(pre_lto_bitcode_filename(&name));
-
- fs::write(&path, buffer.data()).unwrap_or_else(|e| {
- panic!("Error writing pre-lto-bitcode file `{}`: {}",
- path.display(),
- e);
- });
- }
- }
-
(name, buffer)
}
fn fat_lto(cgcx: &CodegenContext<LlvmCodegenBackend>,
diag_handler: &Handler,
- mut modules: Vec<ModuleCodegen<ModuleLlvm>>,
+ mut modules: Vec<FatLTOInput<LlvmCodegenBackend>>,
+ cached_modules: Vec<(SerializedModule<ModuleBuffer>, WorkProduct)>,
mut serialized_modules: Vec<(SerializedModule<ModuleBuffer>, CString)>,
symbol_white_list: &[*const libc::c_char],
timeline: &mut Timeline)
// file copy operations in the backend work correctly. The only other kind
// of module here should be an allocator one, and if your crate is smaller
// than the allocator module then the size doesn't really matter anyway.
- let (_, costliest_module) = modules.iter()
+ let costliest_module = modules.iter()
.enumerate()
+ .filter_map(|(i, module)| {
+ match module {
+ FatLTOInput::InMemory(m) => Some((i, m)),
+ FatLTOInput::Serialized { .. } => None,
+ }
+ })
.filter(|&(_, module)| module.kind == ModuleKind::Regular)
.map(|(i, module)| {
let cost = unsafe {
};
(cost, i)
})
- .max()
- .expect("must be codegen'ing at least one module");
- let module = modules.remove(costliest_module);
+ .max();
+
+ // If we found a costliest module, we're good to go. Otherwise all our
+ // inputs were serialized which could happen in the case, for example, that
+ // all our inputs were incrementally reread from the cache and we're just
+ // re-executing the LTO passes. If that's the case deserialize the first
+ // module and create a linker with it.
+ let module: ModuleCodegen<ModuleLlvm> = match costliest_module {
+ Some((_cost, i)) => {
+ match modules.remove(i) {
+ FatLTOInput::InMemory(m) => m,
+ FatLTOInput::Serialized { .. } => unreachable!(),
+ }
+ }
+ None => {
+ let pos = modules.iter().position(|m| {
+ match m {
+ FatLTOInput::InMemory(_) => false,
+ FatLTOInput::Serialized { .. } => true,
+ }
+ }).expect("must have at least one serialized module");
+ let (name, buffer) = match modules.remove(pos) {
+ FatLTOInput::Serialized { name, buffer } => (name, buffer),
+ FatLTOInput::InMemory(_) => unreachable!(),
+ };
+ ModuleCodegen {
+ module_llvm: ModuleLlvm::parse(cgcx, &name, &buffer, diag_handler)?,
+ name,
+ kind: ModuleKind::Regular,
+ }
+ }
+ };
let mut serialized_bitcode = Vec::new();
{
let (llcx, llmod) = {
// way we know of to do that is to serialize them to a string and them parse
// them later. Not great but hey, that's why it's "fat" LTO, right?
serialized_modules.extend(modules.into_iter().map(|module| {
- let buffer = ModuleBuffer::new(module.module_llvm.llmod());
- let llmod_id = CString::new(&module.name[..]).unwrap();
-
- (SerializedModule::Local(buffer), llmod_id)
+ match module {
+ FatLTOInput::InMemory(module) => {
+ let buffer = ModuleBuffer::new(module.module_llvm.llmod());
+ let llmod_id = CString::new(&module.name[..]).unwrap();
+ (SerializedModule::Local(buffer), llmod_id)
+ }
+ FatLTOInput::Serialized { name, buffer } => {
+ let llmod_id = CString::new(name).unwrap();
+ (SerializedModule::Local(buffer), llmod_id)
+ }
+ }
+ }));
+ serialized_modules.extend(cached_modules.into_iter().map(|(buffer, wp)| {
+ (buffer, CString::new(wp.cgu_name.clone()).unwrap())
}));
// For all serialized bitcode files we parse them and link them in as we did
llvm::LLVMRustModuleBufferCreate(m)
})
}
+
+ pub fn parse<'a>(
+ &self,
+ name: &str,
+ cx: &'a llvm::Context,
+ handler: &Handler,
+ ) -> Result<&'a llvm::Module, FatalError> {
+ let name = CString::new(name).unwrap();
+ parse_module(cx, &name, self.data(), handler)
+ }
}
impl ModuleBufferMethods for ModuleBuffer {
// crates but for locally codegened modules we may be able to reuse
// that LLVM Context and Module.
let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
- let llmod_raw = llvm::LLVMRustParseBitcodeForThinLTO(
+ let llmod_raw = parse_module(
llcx,
- thin_module.data().as_ptr(),
- thin_module.data().len(),
- thin_module.shared.module_names[thin_module.idx].as_ptr(),
- ).ok_or_else(|| {
- let msg = "failed to parse bitcode for thin LTO module";
- write::llvm_err(&diag_handler, msg)
- })? as *const _;
+ &thin_module.shared.module_names[thin_module.idx],
+ thin_module.data(),
+ &diag_handler,
+ )? as *const _;
let module = ModuleCodegen {
module_llvm: ModuleLlvm {
llmod_raw,
self.imports.get(llvm_module_name).map(|v| &v[..]).unwrap_or(&[])
}
- /// Load the ThinLTO import map from ThinLTOData.
+ /// Loads the ThinLTO import map from ThinLTOData.
unsafe fn from_thin_lto_data(data: *const llvm::ThinLTOData) -> ThinLTOImports {
unsafe extern "C" fn imported_module_callback(payload: *mut libc::c_void,
importing_module_name: *const libc::c_char,
c_str.to_str().unwrap_or_else(|e|
bug!("Encountered non-utf8 LLVM module name `{}`: {}", c_str.to_string_lossy(), e))
}
+
+fn parse_module<'a>(
+ cx: &'a llvm::Context,
+ name: &CStr,
+ data: &[u8],
+ diag_handler: &Handler,
+) -> Result<&'a llvm::Module, FatalError> {
+ unsafe {
+ llvm::LLVMRustParseBitcodeForLTO(
+ cx,
+ data.as_ptr(),
+ data.len(),
+ name.as_ptr(),
+ ).ok_or_else(|| {
+ let msg = "failed to parse bitcode for LTO module";
+ write::llvm_err(&diag_handler, msg)
+ })
+ }
+}
}
}
-/// Add or augment the existing `producers` section to encode information about
+/// Adds or augment the existing `producers` section to encode information about
/// the Rust compiler used to produce the wasm file.
pub fn add_producer_section(
path: &Path,
let opt_level = config.opt_level.map(|x| to_llvm_opt_settings(x).0)
.unwrap_or(llvm::CodeGenOptLevel::None);
let prepare_for_thin_lto = cgcx.lto == Lto::Thin || cgcx.lto == Lto::ThinLocal ||
- (cgcx.lto != Lto::Fat && cgcx.opts.debugging_opts.cross_lang_lto.enabled());
+ (cgcx.lto != Lto::Fat && cgcx.opts.cg.linker_plugin_lto.enabled());
with_llvm_pmb(llmod, &config, opt_level, prepare_for_thin_lto, &mut |b| {
llvm::LLVMPassManagerBuilderPopulateFunctionPassManager(b, fpm);
llvm::LLVMPassManagerBuilderPopulateModulePassManager(b, mpm);
//!
//! Hopefully useful general knowledge about codegen:
//!
-//! * There's no way to find out the Ty type of a Value. Doing so
-//! would be "trying to get the eggs out of an omelette" (credit:
-//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty,
-//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int,
-//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type.
+//! * There's no way to find out the `Ty` type of a Value. Doing so
+//! would be "trying to get the eggs out of an omelette" (credit:
+//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`,
+//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int,
+//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`.
use super::ModuleLlvm;
use rustc_codegen_ssa::{ModuleCodegen, ModuleKind};
//! Handles codegen of callees as well as other call-related
-//! things. Callees are a superset of normal rust values and sometimes
-//! have different representations. In particular, top-level fn items
+//! things. Callees are a superset of normal rust values and sometimes
+//! have different representations. In particular, top-level fn items
//! and methods are represented as just a fn ptr and not a full
//! closure.
self.use_dll_storage_attrs && !self.tcx.is_foreign_item(def_id) &&
// ThinLTO can't handle this workaround in all cases, so we don't
// emit the attrs. Instead we make them unnecessary by disallowing
- // dynamic linking when cross-language LTO is enabled.
- !self.tcx.sess.opts.debugging_opts.cross_lang_lto.enabled();
+ // dynamic linking when linker plugin based LTO is enabled.
+ !self.tcx.sess.opts.cg.linker_plugin_lto.enabled();
// If this assertion triggers, there's something wrong with commandline
// argument validation.
- debug_assert!(!(self.tcx.sess.opts.debugging_opts.cross_lang_lto.enabled() &&
+ debug_assert!(!(self.tcx.sess.opts.cg.linker_plugin_lto.enabled() &&
self.tcx.sess.target.target.options.is_like_msvc &&
self.tcx.sess.opts.cg.prefer_dynamic));
pub statics_to_rauw: RefCell<Vec<(&'ll Value, &'ll Value)>>,
/// Statics that will be placed in the llvm.used variable
- /// See http://llvm.org/docs/LangRef.html#the-llvm-used-global-variable for details
+ /// See <http://llvm.org/docs/LangRef.html#the-llvm-used-global-variable> for details
pub used_statics: RefCell<Vec<&'ll Value>>,
pub lltypes: RefCell<FxHashMap<(Ty<'tcx>, Option<VariantIdx>), &'ll Type>>,
self.declare_intrinsic(key).unwrap_or_else(|| bug!("unknown intrinsic '{}'", key))
}
+ fn insert_intrinsic(
+ &self, name: &'static str, args: Option<&[&'b llvm::Type]>, ret: &'b llvm::Type
+ ) -> &'b llvm::Value {
+ let fn_ty = if let Some(args) = args {
+ self.type_func(args, ret)
+ } else {
+ self.type_variadic_func(&[], ret)
+ };
+ let f = self.declare_cfn(name, fn_ty);
+ llvm::SetUnnamedAddr(f, false);
+ self.intrinsics.borrow_mut().insert(name, f.clone());
+ f
+ }
+
fn declare_intrinsic(
&self,
key: &str
macro_rules! ifn {
($name:expr, fn() -> $ret:expr) => (
if key == $name {
- let f = self.declare_cfn($name, self.type_func(&[], $ret));
- llvm::SetUnnamedAddr(f, false);
- self.intrinsics.borrow_mut().insert($name, f.clone());
- return Some(f);
+ return Some(self.insert_intrinsic($name, Some(&[]), $ret));
}
);
($name:expr, fn(...) -> $ret:expr) => (
if key == $name {
- let f = self.declare_cfn($name, self.type_variadic_func(&[], $ret));
- llvm::SetUnnamedAddr(f, false);
- self.intrinsics.borrow_mut().insert($name, f.clone());
- return Some(f);
+ return Some(self.insert_intrinsic($name, None, $ret));
}
);
($name:expr, fn($($arg:expr),*) -> $ret:expr) => (
if key == $name {
- let f = self.declare_cfn($name, self.type_func(&[$($arg),*], $ret));
- llvm::SetUnnamedAddr(f, false);
- self.intrinsics.borrow_mut().insert($name, f.clone());
- return Some(f);
+ return Some(self.insert_intrinsic($name, Some(&[$($arg),*]), $ret));
}
);
}
}
impl<'b, 'tcx> CodegenCx<'b, 'tcx> {
- /// Generate a new symbol name with the given prefix. This symbol name must
+ /// Generates a new symbol name with the given prefix. This symbol name must
/// only be used for definitions with `internal` or `private` linkage.
pub fn generate_local_symbol_name(&self, prefix: &str) -> String {
let idx = self.local_gen_sym_counter.get();
use syntax_pos::BytePos;
-/// Produce DIScope DIEs for each MIR Scope which has variables defined in it.
+/// Produces DIScope DIEs for each MIR Scope which has variables defined in it.
/// If debuginfo is disabled, the returned vector is empty.
pub fn create_mir_scopes(
cx: &CodegenCx<'ll, '_>,
//!
//! This algorithm also provides a stable ID for types that are defined in one
//! crate but instantiated from metadata within another crate. We just have to
-//! take care to always map crate and node IDs back to the original crate
+//! take care to always map crate and `NodeId`s back to the original crate
//! context.
//!
//! As a side-effect these unique type IDs also help to solve a problem arising
//! with different concrete substitutions for `'a`, and thus there will be N
//! `Ty` instances for the type `Struct<'a>` even though it is not generic
//! otherwise. Unfortunately this means that we cannot use `ty::type_id()` as
-//! cheap identifier for type metadata---we have done this in the past, but it
+//! cheap identifier for type metadata -- we have done this in the past, but it
//! led to unnecessary metadata duplication in the best case and LLVM
//! assertions in the worst. However, the unique type ID as described above
//! *can* be used as identifier. Since it is comparatively expensive to
}
}
-/// Create any deferred debug metadata nodes
+/// Creates any deferred debug metadata nodes
pub fn finalize(cx: &CodegenCx) {
if cx.dbg_cx.is_none() {
return;
};
}
-/// Return syntax_pos::Loc corresponding to the beginning of the span
+/// Returns syntax_pos::Loc corresponding to the beginning of the span
pub fn span_start(cx: &CodegenCx, span: Span) -> syntax_pos::Loc {
cx.sess().source_map().lookup_char_pos(span.lo())
}
extern crate memmap;
use rustc_codegen_ssa::traits::*;
-use rustc_codegen_ssa::back::write::{CodegenContext, ModuleConfig};
+use rustc_codegen_ssa::back::write::{CodegenContext, ModuleConfig, FatLTOInput};
use rustc_codegen_ssa::back::lto::{SerializedModule, LtoModuleCodegen, ThinModule};
use rustc_codegen_ssa::CompiledModule;
use errors::{FatalError, Handler};
}
fn run_fat_lto(
cgcx: &CodegenContext<Self>,
- modules: Vec<ModuleCodegen<Self::Module>>,
+ modules: Vec<FatLTOInput<Self>>,
+ cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
timeline: &mut Timeline
) -> Result<LtoModuleCodegen<Self>, FatalError> {
- back::lto::run_fat(cgcx, modules, timeline)
+ back::lto::run_fat(cgcx, modules, cached_modules, timeline)
}
fn run_thin_lto(
cgcx: &CodegenContext<Self>,
back::write::codegen(cgcx, diag_handler, module, config, timeline)
}
fn prepare_thin(
- cgcx: &CodegenContext<Self>,
module: ModuleCodegen<Self::Module>
) -> (String, Self::ThinBuffer) {
- back::lto::prepare_thin(cgcx, module)
+ back::lto::prepare_thin(module)
+ }
+ fn serialize_module(
+ module: ModuleCodegen<Self::Module>
+ ) -> (String, Self::ModuleBuffer) {
+ (module.name, back::lto::ModuleBuffer::new(module.module_llvm.llmod()))
}
fn run_lto_pass_manager(
cgcx: &CodegenContext<Self>,
}
}
+ fn parse(
+ cgcx: &CodegenContext<LlvmCodegenBackend>,
+ name: &str,
+ buffer: &back::lto::ModuleBuffer,
+ handler: &Handler,
+ ) -> Result<Self, FatalError> {
+ unsafe {
+ let llcx = llvm::LLVMRustContextCreate(cgcx.fewer_names);
+ let llmod_raw = buffer.parse(name, llcx, handler)?;
+ let tm = match (cgcx.tm_factory.0)() {
+ Ok(m) => m,
+ Err(e) => {
+ handler.struct_err(&e).emit();
+ return Err(FatalError)
+ }
+ };
+
+ Ok(ModuleLlvm {
+ llmod_raw,
+ llcx,
+ tm,
+ })
+ }
+ }
+
fn llmod(&self) -> &llvm::Module {
unsafe {
&*self.llmod_raw
pub fn LLVMGetSections(ObjFile: &'a ObjectFile) -> &'a mut SectionIterator<'a>;
/// Destroys a section iterator.
pub fn LLVMDisposeSectionIterator(SI: &'a mut SectionIterator<'a>);
- /// Returns true if the section iterator is at the end of the section
+ /// Returns `true` if the section iterator is at the end of the section
/// list:
pub fn LLVMIsSectionIteratorAtEnd(ObjFile: &'a ObjectFile, SI: &SectionIterator<'a>) -> Bool;
/// Moves the section iterator to point to the next section.
CallbackPayload: *mut c_void,
);
pub fn LLVMRustFreeThinLTOData(Data: &'static mut ThinLTOData);
- pub fn LLVMRustParseBitcodeForThinLTO(
+ pub fn LLVMRustParseBitcodeForLTO(
Context: &Context,
Data: *const u8,
len: usize,
}
}
- /// Get the LLVM type corresponding to a Rust type, i.e., `rustc::ty::Ty`.
+ /// Gets the LLVM type corresponding to a Rust type, i.e., `rustc::ty::Ty`.
/// The pointee type of the pointer in `PlaceRef` is always this type.
/// For sized types, it is also the right LLVM type for an `alloca`
/// containing a value of that type, and most immediates (except `bool`).
use rustc::middle::dependency_format::Linkage;
use rustc::session::Session;
use rustc::session::config::{self, CrateType, OptLevel, DebugInfo,
- CrossLangLto, Lto};
+ LinkerPluginLto, Lto};
use rustc::ty::TyCtxt;
use rustc_target::spec::{LinkerFlavor, LldFlavor};
use serialize::{json, Encoder};
}
}
-/// Linker abstraction used by back::link to build up the command to invoke a
+/// Linker abstraction used by `back::link` to build up the command to invoke a
/// linker.
///
/// This trait is the total list of requirements needed by `back::link` and
fn subsystem(&mut self, subsystem: &str);
fn group_start(&mut self);
fn group_end(&mut self);
- fn cross_lang_lto(&mut self);
+ fn linker_plugin_lto(&mut self);
// Should have been finalize(self), but we don't support self-by-value on trait objects (yet?).
fn finalize(&mut self) -> Command;
}
impl<'a> GccLinker<'a> {
/// Argument that must be passed *directly* to the linker
///
- /// These arguments need to be prepended with '-Wl,' when a gcc-style linker is used
+ /// These arguments need to be prepended with `-Wl`, when a GCC-style linker is used.
fn linker_arg<S>(&mut self, arg: S) -> &mut Self
where S: AsRef<OsStr>
{
}
}
- fn push_cross_lang_lto_args(&mut self, plugin_path: Option<&OsStr>) {
+ fn push_linker_plugin_lto_args(&mut self, plugin_path: Option<&OsStr>) {
if let Some(plugin_path) = plugin_path {
let mut arg = OsString::from("-plugin=");
arg.push(plugin_path);
}
}
- fn cross_lang_lto(&mut self) {
- match self.sess.opts.debugging_opts.cross_lang_lto {
- CrossLangLto::Disabled => {
+ fn linker_plugin_lto(&mut self) {
+ match self.sess.opts.cg.linker_plugin_lto {
+ LinkerPluginLto::Disabled => {
// Nothing to do
}
- CrossLangLto::LinkerPluginAuto => {
- self.push_cross_lang_lto_args(None);
+ LinkerPluginLto::LinkerPluginAuto => {
+ self.push_linker_plugin_lto_args(None);
}
- CrossLangLto::LinkerPlugin(ref path) => {
- self.push_cross_lang_lto_args(Some(path.as_os_str()));
+ LinkerPluginLto::LinkerPlugin(ref path) => {
+ self.push_linker_plugin_lto_args(Some(path.as_os_str()));
}
}
}
fn group_start(&mut self) {}
fn group_end(&mut self) {}
- fn cross_lang_lto(&mut self) {
+ fn linker_plugin_lto(&mut self) {
// Do nothing
}
}
fn group_start(&mut self) {}
fn group_end(&mut self) {}
- fn cross_lang_lto(&mut self) {
+ fn linker_plugin_lto(&mut self) {
// Do nothing
}
}
fn group_start(&mut self) {}
fn group_end(&mut self) {}
- fn cross_lang_lto(&mut self) {
+ fn linker_plugin_lto(&mut self) {
// Do nothing for now
}
}
fn group_end(&mut self) {
}
- fn cross_lang_lto(&mut self) {
+ fn linker_plugin_lto(&mut self) {
}
}
use std::time::Instant;
use std::thread;
-const PRE_THIN_LTO_BC_EXT: &str = "pre-thin-lto.bc";
+const PRE_LTO_BC_EXT: &str = "pre-lto.bc";
/// Module-specific configuration for `optimize_and_codegen`.
pub struct ModuleConfig {
pub pgo_use: String,
// Flags indicating which outputs to produce.
- pub emit_pre_thin_lto_bc: bool,
+ pub emit_pre_lto_bc: bool,
pub emit_no_opt_bc: bool,
pub emit_bc: bool,
pub emit_bc_compressed: bool,
pgo_use: String::new(),
emit_no_opt_bc: false,
- emit_pre_thin_lto_bc: false,
+ emit_pre_lto_bc: false,
emit_bc: false,
emit_bc_compressed: false,
emit_lto_bc: false,
self.time_passes = sess.time_passes();
self.inline_threshold = sess.opts.cg.inline_threshold;
self.obj_is_bitcode = sess.target.target.options.obj_is_bitcode ||
- sess.opts.debugging_opts.cross_lang_lto.enabled();
+ sess.opts.cg.linker_plugin_lto.enabled();
let embed_bitcode = sess.target.target.options.embed_bitcode ||
sess.opts.debugging_opts.embed_bitcode;
if embed_bitcode {
fn generate_lto_work<B: ExtraBackendMethods>(
cgcx: &CodegenContext<B>,
- needs_fat_lto: Vec<ModuleCodegen<B::Module>>,
+ needs_fat_lto: Vec<FatLTOInput<B>>,
needs_thin_lto: Vec<(String, B::ThinBuffer)>,
import_only_modules: Vec<(SerializedModule<B::ModuleBuffer>, WorkProduct)>
) -> Vec<(WorkItem<B>, u64)> {
let (lto_modules, copy_jobs) = if !needs_fat_lto.is_empty() {
assert!(needs_thin_lto.is_empty());
- assert!(import_only_modules.is_empty());
- let lto_module = B::run_fat_lto(cgcx, needs_fat_lto, &mut timeline)
- .unwrap_or_else(|e| e.raise());
+ let lto_module = B::run_fat_lto(
+ cgcx,
+ needs_fat_lto,
+ import_only_modules,
+ &mut timeline,
+ )
+ .unwrap_or_else(|e| e.raise());
(vec![lto_module], vec![])
} else {
assert!(needs_fat_lto.is_empty());
sess.opts.output_types.contains_key(&OutputType::Exe)
}
-fn need_pre_thin_lto_bitcode_for_incr_comp(sess: &Session) -> bool {
+fn need_pre_lto_bitcode_for_incr_comp(sess: &Session) -> bool {
if sess.opts.incremental.is_none() {
return false
}
match sess.lto() {
- Lto::Fat |
Lto::No => false,
+ Lto::Fat |
Lto::Thin |
Lto::ThinLocal => true,
}
// Save all versions of the bytecode if we're saving our temporaries.
if sess.opts.cg.save_temps {
modules_config.emit_no_opt_bc = true;
- modules_config.emit_pre_thin_lto_bc = true;
+ modules_config.emit_pre_lto_bc = true;
modules_config.emit_bc = true;
modules_config.emit_lto_bc = true;
metadata_config.emit_bc = true;
allocator_config.emit_bc_compressed = true;
}
- modules_config.emit_pre_thin_lto_bc =
- need_pre_thin_lto_bitcode_for_incr_comp(sess);
+ modules_config.emit_pre_lto_bc =
+ need_pre_lto_bitcode_for_incr_comp(sess);
modules_config.no_integrated_as = tcx.sess.opts.cg.no_integrated_as ||
tcx.sess.target.target.options.no_integrated_as;
/// Copy the post-LTO artifacts from the incremental cache to the output
/// directory.
CopyPostLtoArtifacts(CachedModuleCodegen),
- /// Perform (Thin)LTO on the given module.
+ /// Performs (Thin)LTO on the given module.
LTO(lto::LtoModuleCodegen<B>),
}
enum WorkItemResult<B: WriteBackendMethods> {
Compiled(CompiledModule),
- NeedsFatLTO(ModuleCodegen<B::Module>),
+ NeedsFatLTO(FatLTOInput<B>),
NeedsThinLTO(String, B::ThinBuffer),
}
+pub enum FatLTOInput<B: WriteBackendMethods> {
+ Serialized {
+ name: String,
+ buffer: B::ModuleBuffer,
+ },
+ InMemory(ModuleCodegen<B::Module>),
+}
+
fn execute_work_item<B: ExtraBackendMethods>(
cgcx: &CodegenContext<B>,
work_item: WorkItem<B>,
// If the linker does LTO, we don't have to do it. Note that we
// keep doing full LTO, if it is requested, as not to break the
// assumption that the output will be a single module.
- let linker_does_lto = cgcx.opts.debugging_opts.cross_lang_lto.enabled();
+ let linker_does_lto = cgcx.opts.cg.linker_plugin_lto.enabled();
// When we're automatically doing ThinLTO for multi-codegen-unit
// builds we don't actually want to LTO the allocator modules if
}
};
+ // If we're doing some form of incremental LTO then we need to be sure to
+ // save our module to disk first.
+ let bitcode = if cgcx.config(module.kind).emit_pre_lto_bc {
+ let filename = pre_lto_bitcode_filename(&module.name);
+ cgcx.incr_comp_session_dir.as_ref().map(|path| path.join(&filename))
+ } else {
+ None
+ };
+
Ok(match lto_type {
ComputedLtoType::No => {
let module = unsafe {
WorkItemResult::Compiled(module)
}
ComputedLtoType::Thin => {
- let (name, thin_buffer) = B::prepare_thin(cgcx, module);
+ let (name, thin_buffer) = B::prepare_thin(module);
+ if let Some(path) = bitcode {
+ fs::write(&path, thin_buffer.data()).unwrap_or_else(|e| {
+ panic!("Error writing pre-lto-bitcode file `{}`: {}",
+ path.display(),
+ e);
+ });
+ }
WorkItemResult::NeedsThinLTO(name, thin_buffer)
}
- ComputedLtoType::Fat => WorkItemResult::NeedsFatLTO(module),
+ ComputedLtoType::Fat => {
+ match bitcode {
+ Some(path) => {
+ let (name, buffer) = B::serialize_module(module);
+ fs::write(&path, buffer.data()).unwrap_or_else(|e| {
+ panic!("Error writing pre-lto-bitcode file `{}`: {}",
+ path.display(),
+ e);
+ });
+ WorkItemResult::NeedsFatLTO(FatLTOInput::Serialized { name, buffer })
+ }
+ None => WorkItemResult::NeedsFatLTO(FatLTOInput::InMemory(module)),
+ }
+ }
})
}
pub enum Message<B: WriteBackendMethods> {
Token(io::Result<Acquired>),
NeedsFatLTO {
- result: ModuleCodegen<B::Module>,
+ result: FatLTOInput<B>,
worker_id: usize,
},
NeedsThinLTO {
drop(self.coordinator_send.send(Box::new(Message::CodegenComplete::<B>)));
}
- /// Consume this context indicating that codegen was entirely aborted, and
+ /// Consumes this context indicating that codegen was entirely aborted, and
/// we need to exit as quickly as possible.
///
/// This method blocks the current thread until all worker threads have
}
pub fn pre_lto_bitcode_filename(module_name: &str) -> String {
- format!("{}.{}", module_name, PRE_THIN_LTO_BC_EXT)
+ format!("{}.{}", module_name, PRE_LTO_BC_EXT)
}
fn msvc_imps_needed(tcx: TyCtxt) -> bool {
// This should never be true (because it's not supported). If it is true,
// something is wrong with commandline arg validation.
- assert!(!(tcx.sess.opts.debugging_opts.cross_lang_lto.enabled() &&
+ assert!(!(tcx.sess.opts.cg.linker_plugin_lto.enabled() &&
tcx.sess.target.target.options.is_like_msvc &&
tcx.sess.opts.cg.prefer_dynamic));
tcx.sess.crate_types.borrow().iter().any(|ct| *ct == config::CrateType::Rlib) &&
// ThinLTO can't handle this workaround in all cases, so we don't
// emit the `__imp_` symbols. Instead we make them unnecessary by disallowing
- // dynamic linking when cross-language LTO is enabled.
- !tcx.sess.opts.debugging_opts.cross_lang_lto.enabled()
+ // dynamic linking when linker plugin LTO is enabled.
+ !tcx.sess.opts.cg.linker_plugin_lto.enabled()
}
//!
//! Hopefully useful general knowledge about codegen:
//!
-//! * There's no way to find out the Ty type of a Value. Doing so
-//! would be "trying to get the eggs out of an omelette" (credit:
-//! pcwalton). You can, instead, find out its llvm::Type by calling val_ty,
-//! but one llvm::Type corresponds to many `Ty`s; for instance, tup(int, int,
-//! int) and rec(x=int, y=int, z=int) will have the same llvm::Type.
+//! * There's no way to find out the `Ty` type of a Value. Doing so
+//! would be "trying to get the eggs out of an omelette" (credit:
+//! pcwalton). You can, instead, find out its `llvm::Type` by calling `val_ty`,
+//! but one `llvm::Type` corresponds to many `Ty`s; for instance, `tup(int, int,
+//! int)` and `rec(x=int, y=int, z=int)` will have the same `llvm::Type`.
use crate::{ModuleCodegen, ModuleKind, CachedModuleCodegen};
bx.sext(cmp, ret_ty)
}
-/// Retrieve the information we are losing (making dynamic) in an unsizing
+/// Retrieves the information we are losing (making dynamic) in an unsizing
/// adjustment.
///
/// The `old_info` argument is a bit funny. It is intended for use
}
}
-/// Returns whether this session's target will use SEH-based unwinding.
+/// Returns `true` if this session's target will use SEH-based unwinding.
///
/// This is only true for MSVC targets, and even then the 64-bit MSVC target
/// currently uses SEH-ish unwinding with DWARF info tables to the side (same as
mir::codegen_mir::<Bx>(cx, lldecl, &mir, instance, sig);
}
-/// Create the `main` function which will initialize the rust runtime and call
+/// Creates the `main` function which will initialize the rust runtime and call
/// users main function.
pub fn maybe_create_entry_wrapper<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
cx: &'a Bx::CodegenCx
pub struct ModuleCodegen<M> {
/// The name of the module. When the crate may be saved between
/// compilations, incremental compilation requires that name be
- /// unique amongst **all** crates. Therefore, it should contain
+ /// unique amongst **all** crates. Therefore, it should contain
/// something unique to this crate (e.g., a module path) as well
/// as the crate name and disambiguator.
/// We currently generate these names via CodegenUnit::build_cgu_name().
}
}
-/// Misc info we load from metadata to persist beyond the tcx
+/// Misc info we load from metadata to persist beyond the tcx.
pub struct CrateInfo {
pub panic_runtime: Option<CrateNum>,
pub compiler_builtins: Option<CrateNum>,
}
}
- /// Return the landingpad wrapper around the given basic block
+ /// Returns the landing-pad wrapper around the given basic block.
///
/// No-op in MSVC SEH scheme.
fn landing_pad_to(
}).unzip()
}
-/// Produce, for each argument, a `Value` pointing at the
+/// Produces, for each argument, a `Value` pointing at the
/// argument's value. As arguments are places, these are always
/// indirect.
fn arg_local_refs<'a, 'tcx: 'a, Bx: BuilderMethods<'a, 'tcx>>(
#[derive(Copy, Clone, Debug)]
pub struct PlaceRef<'tcx, V> {
- /// Pointer to the contents of the place
+ /// Pointer to the contents of the place.
pub llval: V,
- /// This place's extra data if it is unsized, or null
+ /// This place's extra data if it is unsized, or null.
pub llextra: Option<V>,
- /// Monomorphized type of this place, including variant information
+ /// Monomorphized type of this place, including variant information.
pub layout: TyLayout<'tcx>,
- /// What alignment we know for this place
+ /// What alignment we know for this place.
pub align: Align,
}
}
}
- /// Set the discriminant for a new value of the given case of the given
+ /// Sets the discriminant for a new value of the given case of the given
/// representation.
pub fn codegen_set_discr<Bx: BuilderMethods<'a, 'tcx, Value = V>>(
&self,
/// Declare a global with an intention to define it.
///
/// Use this function when you intend to define a global. This function will
- /// return None if the name already has a definition associated with it. In that
+ /// return `None` if the name already has a definition associated with it. In that
/// case an error should be reported to the user, because it usually happens due
/// to user’s fault (e.g., misuse of #[no_mangle] or #[export_name] attributes).
fn define_global(&self, name: &str, ty: Self::Type) -> Option<Self::Value>;
/// can happen with #[no_mangle] or #[export_name], for example.
fn define_internal_fn(&self, name: &str, fn_sig: ty::PolyFnSig<'tcx>) -> Self::Value;
- /// Get declared value by name.
+ /// Gets declared value by name.
fn get_declared_value(&self, name: &str) -> Option<Self::Value>;
- /// Get defined or externally defined (AvailableExternally linkage) value by
+ /// Gets defined or externally defined (AvailableExternally linkage) value by
/// name.
fn get_defined_value(&self, name: &str) -> Option<Self::Value>;
}
fn type_ptr_to(&self, ty: Self::Type) -> Self::Type;
fn element_type(&self, ty: Self::Type) -> Self::Type;
- /// Return the number of elements in `self` if it is a LLVM vector type.
+ /// Returns the number of elements in `self` if it is a LLVM vector type.
fn vector_length(&self, ty: Self::Type) -> usize;
fn func_params_types(&self, ty: Self::Type) -> Vec<Self::Type>;
fn float_width(&self, ty: Self::Type) -> usize;
- /// Retrieve the bit width of the integer type `self`.
+ /// Retrieves the bit width of the integer type `self`.
fn int_width(&self, ty: Self::Type) -> u64;
fn val_ty(&self, v: Self::Value) -> Self::Type;
use crate::back::lto::{LtoModuleCodegen, SerializedModule, ThinModule};
-use crate::back::write::{CodegenContext, ModuleConfig};
+use crate::back::write::{CodegenContext, ModuleConfig, FatLTOInput};
use crate::{CompiledModule, ModuleCodegen};
use rustc::dep_graph::WorkProduct;
/// for further optimization.
fn run_fat_lto(
cgcx: &CodegenContext<Self>,
- modules: Vec<ModuleCodegen<Self::Module>>,
+ modules: Vec<FatLTOInput<Self>>,
+ cached_modules: Vec<(SerializedModule<Self::ModuleBuffer>, WorkProduct)>,
timeline: &mut Timeline,
) -> Result<LtoModuleCodegen<Self>, FatalError>;
/// Performs thin LTO by performing necessary global analysis and returning two
timeline: &mut Timeline,
) -> Result<CompiledModule, FatalError>;
fn prepare_thin(
- cgcx: &CodegenContext<Self>,
module: ModuleCodegen<Self::Module>
) -> (String, Self::ThinBuffer);
+ fn serialize_module(
+ module: ModuleCodegen<Self::Module>
+ ) -> (String, Self::ModuleBuffer);
fn run_lto_pass_manager(
cgcx: &CodegenContext<Self>,
llmod: &ModuleCodegen<Self::Module>,
-/// Convert unsigned integers into a string representation with some base.
+/// Converts unsigned integers into a string representation with some base.
/// Bases up to and including 36 can be used for case-insensitive things.
use std::str;
}
impl<T: Idx> BitSet<T> {
- /// Create a new, empty bitset with a given `domain_size`.
+ /// Creates a new, empty bitset with a given `domain_size`.
#[inline]
pub fn new_empty(domain_size: usize) -> BitSet<T> {
let num_words = num_words(domain_size);
}
}
- /// Create a new, filled bitset with a given `domain_size`.
+ /// Creates a new, filled bitset with a given `domain_size`.
#[inline]
pub fn new_filled(domain_size: usize) -> BitSet<T> {
let num_words = num_words(domain_size);
result
}
- /// Get the domain size.
+ /// Gets the domain size.
pub fn domain_size(&self) -> usize {
self.domain_size
}
self.words.iter().map(|e| e.count_ones() as usize).sum()
}
- /// True if `self` contains `elem`.
+ /// Returns `true` if `self` contains `elem`.
#[inline]
pub fn contains(&self, elem: T) -> bool {
assert!(elem.index() < self.domain_size);
self.words.iter().all(|a| *a == 0)
}
- /// Insert `elem`. Returns true if the set has changed.
+ /// Insert `elem`. Returns whether the set has changed.
#[inline]
pub fn insert(&mut self, elem: T) -> bool {
assert!(elem.index() < self.domain_size);
self.clear_excess_bits();
}
- /// Returns true if the set has changed.
+ /// Returns `true` if the set has changed.
#[inline]
pub fn remove(&mut self, elem: T) -> bool {
assert!(elem.index() < self.domain_size);
new_word != word
}
- /// Set `self = self | other` and return true if `self` changed
+ /// Sets `self = self | other` and returns `true` if `self` changed
/// (i.e., if new bits were added).
pub fn union(&mut self, other: &impl UnionIntoBitSet<T>) -> bool {
other.union_into(self)
}
- /// Set `self = self - other` and return true if `self` changed.
+ /// Sets `self = self - other` and returns `true` if `self` changed.
/// (i.e., if any bits were removed).
pub fn subtract(&mut self, other: &impl SubtractFromBitSet<T>) -> bool {
other.subtract_from(self)
}
- /// Set `self = self & other` and return true if `self` changed.
+ /// Sets `self = self & other` and return `true` if `self` changed.
/// (i.e., if any bits were removed).
pub fn intersect(&mut self, other: &BitSet<T>) -> bool {
assert_eq!(self.domain_size, other.domain_size);
bitwise(&mut self.words, &other.words, |a, b| { a & b })
}
- /// Get a slice of the underlying words.
+ /// Gets a slice of the underlying words.
pub fn words(&self) -> &[Word] {
&self.words
}
GrowableBitSet { bit_set: BitSet::new_empty(bits) }
}
- /// Returns true if the set has changed.
+ /// Returns `true` if the set has changed.
#[inline]
pub fn insert(&mut self, elem: T) -> bool {
self.ensure(elem.index() + 1);
}
impl<R: Idx, C: Idx> BitMatrix<R, C> {
- /// Create a new `rows x columns` matrix, initially empty.
+ /// Creates a new `rows x columns` matrix, initially empty.
pub fn new(num_rows: usize, num_columns: usize) -> BitMatrix<R, C> {
// For every element, we need one bit for every other
// element. Round up to an even number of words.
/// Sets the cell at `(row, column)` to true. Put another way, insert
/// `column` to the bitset for `row`.
///
- /// Returns true if this changed the matrix, and false otherwise.
+ /// Returns `true` if this changed the matrix.
pub fn insert(&mut self, row: R, column: C) -> bool {
assert!(row.index() < self.num_rows && column.index() < self.num_columns);
let (start, _) = self.range(row);
(self.words[start + word_index] & mask) != 0
}
- /// Returns those indices that are true in rows `a` and `b`. This
+ /// Returns those indices that are true in rows `a` and `b`. This
/// is an O(n) operation where `n` is the number of elements
/// (somewhat independent from the actual size of the
/// intersection, in particular).
result
}
- /// Add the bits from row `read` to the bits from row `write`,
- /// return true if anything changed.
+ /// Adds the bits from row `read` to the bits from row `write`, and
+ /// returns `true` if anything changed.
///
/// This is used when computing transitive reachability because if
/// you have an edge `write -> read`, because in that case
}
impl<R: Idx, C: Idx> SparseBitMatrix<R, C> {
- /// Create a new empty sparse bit matrix with no rows or columns.
+ /// Creates a new empty sparse bit matrix with no rows or columns.
pub fn new(num_columns: usize) -> Self {
Self {
num_columns,
/// Sets the cell at `(row, column)` to true. Put another way, insert
/// `column` to the bitset for `row`.
///
- /// Returns true if this changed the matrix, and false otherwise.
+ /// Returns `true` if this changed the matrix.
pub fn insert(&mut self, row: R, column: C) -> bool {
self.ensure_row(row).insert(column)
}
self.row(row).map_or(false, |r| r.contains(column))
}
- /// Add the bits from row `read` to the bits from row `write`,
- /// return true if anything changed.
+ /// Adds the bits from row `read` to the bits from row `write`, and
+ /// returns `true` if anything changed.
///
/// This is used when computing transitive reachability because if
/// you have an edge `write -> read`, because in that case
//! stored. The edges are stored in a central array, but they are also
//! threaded onto two linked lists for each node, one for incoming edges
//! and one for outgoing edges. Note that every edge is a member of some
-//! incoming list and some outgoing list. Basically you can load the
+//! incoming list and some outgoing list. Basically you can load the
//! first index of the linked list from the node data structures (the
//! field `first_edge`) and then, for each edge, load the next index from
//! the field `next_edge`). Each of those fields is an array that should
pub const INCOMING: Direction = Direction { repr: 1 };
impl NodeIndex {
- /// Returns unique id (unique with respect to the graph holding associated node).
+ /// Returns unique ID (unique with respect to the graph holding associated node).
pub fn node_id(self) -> usize {
self.0
}
}
}
- /// Visit a node during the DFS. We first examine its current
+ /// Visits a node during the DFS. We first examine its current
/// state -- if it is not yet visited (`NotVisited`), we can push
/// it onto the stack and start walking its successors.
///
/// Represents some newtyped `usize` wrapper.
///
-/// (purpose: avoid mixing indexes for different bitvector domains.)
+/// Purpose: avoid mixing indexes for different bitvector domains.
pub trait Idx: Copy + 'static + Ord + Debug + Hash {
fn new(idx: usize) -> Self;
unsafe { $type { private: value } }
}
- /// Extract value of this index as an integer.
+ /// Extracts the value of this index as an integer.
#[inline]
$v fn index(self) -> usize {
self.as_usize()
}
- /// Extract value of this index as a usize.
+ /// Extracts the value of this index as a `u32`.
#[inline]
$v fn as_u32(self) -> u32 {
self.private
}
- /// Extract value of this index as a u32.
+ /// Extracts the value of this index as a `usize`.
#[inline]
$v fn as_usize(self) -> usize {
self.as_u32() as usize
self.raw.get_mut(index.index())
}
- /// Return mutable references to two distinct elements, a and b. Panics if a == b.
+ /// Returns mutable references to two distinct elements, a and b. Panics if a == b.
#[inline]
pub fn pick2_mut(&mut self, a: I, b: I) -> (&mut T, &mut T) {
let (ai, bi) = (a.index(), b.index());
use std::sync::atomic::Ordering;
impl<O: ForestObligation> ObligationForest<O> {
- /// Create a graphviz representation of the obligation forest. Given a directory this will
- /// create files with name of the format `<counter>_<description>.gv`. The counter is
+ /// Creates a graphviz representation of the obligation forest. Given a directory this will
+ /// create files with name of the format `<counter>_<description>.gv`. The counter is
/// global and is maintained internally.
///
/// Calling this will do nothing unless the environment variable
//! #### Snapshots
//!
//! The `ObligationForest` supports a limited form of snapshots; see
-//! `start_snapshot`; `commit_snapshot`; and `rollback_snapshot`. In
+//! `start_snapshot`, `commit_snapshot`, and `rollback_snapshot`. In
//! particular, you can use a snapshot to roll back new root
//! obligations. However, it is an error to attempt to
//! `process_obligations` during a snapshot.
//! ### Implementation details
//!
//! For the most part, comments specific to the implementation are in the
-//! code. This file only contains a very high-level overview. Basically,
+//! code. This file only contains a very high-level overview. Basically,
//! the forest is stored in a vector. Each element of the vector is a node
//! in some tree. Each node in the vector has the index of an (optional)
//! parent and (for convenience) its root (which may be itself). It also
obligation_tree_id_generator: ObligationTreeIdGenerator,
- /// Per tree error cache. This is used to deduplicate errors,
+ /// Per tree error cache. This is used to deduplicate errors,
/// which is necessary to avoid trait resolution overflow in
/// some cases.
///
}
}
- /// Return the total number of nodes in the forest that have not
+ /// Returns the total number of nodes in the forest that have not
/// yet been fully resolved.
pub fn len(&self) -> usize {
self.nodes.len()
}
- /// Registers an obligation
+ /// Registers an obligation.
///
/// This CAN be done in a snapshot
pub fn register_obligation(&mut self, obligation: O) {
}
}
- /// Convert all remaining obligations to the given error.
+ /// Converts all remaining obligations to the given error.
///
/// This cannot be done during a snapshot.
pub fn to_errors<E: Clone>(&mut self, error: E) -> Vec<Error<O, E>> {
.insert(node.obligation.as_predicate().clone());
}
- /// Perform a pass through the obligation list. This must
+ /// Performs a pass through the obligation list. This must
/// be called in a loop until `outcome.stalled` is false.
///
- /// This CANNOT be unrolled (presently, at least).
+ /// This _cannot_ be unrolled (presently, at least).
pub fn process_obligations<P>(&mut self, processor: &mut P, do_completed: DoCompleted)
-> Outcome<O, P::Error>
where P: ObligationProcessor<Obligation=O>
}
}
- /// Mark all NodeState::Success nodes as NodeState::Done and
+ /// Mark all `NodeState::Success` nodes as `NodeState::Done` and
/// report all cycles between them. This should be called
/// after `mark_as_waiting` marks all nodes with pending
/// subobligations as NodeState::Waiting.
}
}
- /// Marks all nodes that depend on a pending node as NodeState::Waiting.
+ /// Marks all nodes that depend on a pending node as `NodeState::Waiting`.
fn mark_as_waiting(&self) {
for node in &self.nodes {
if node.state.get() == NodeState::Waiting {
pub unsafe trait IntoErased<'a> {
/// Owner with the dereference type substituted to `Erased`.
type Erased;
- /// Perform the type erasure.
+ /// Performs the type erasure.
fn into_erased(self) -> Self::Erased;
}
pub unsafe trait IntoErasedSend<'a> {
/// Owner with the dereference type substituted to `Erased + Send`.
type Erased: Send;
- /// Perform the type erasure.
+ /// Performs the type erasure.
fn into_erased_send(self) -> Self::Erased;
}
pub unsafe trait IntoErasedSendSync<'a> {
/// Owner with the dereference type substituted to `Erased + Send + Sync`.
type Erased: Send + Sync;
- /// Perform the type erasure.
+ /// Performs the type erasure.
fn into_erased_send_sync(self) -> Self::Erased;
}
impl<O, H> OwningHandle<O, H>
where O: StableAddress, O::Target: ToHandle<Handle = H>, H: Deref,
{
- /// Create a new `OwningHandle` for a type that implements `ToHandle`. For types
+ /// Creates a new `OwningHandle` for a type that implements `ToHandle`. For types
/// that don't implement `ToHandle`, callers may invoke `new_with_fn`, which accepts
/// a callback to perform the conversion.
pub fn new(o: O) -> Self {
impl<O, H> OwningHandle<O, H>
where O: StableAddress, O::Target: ToHandleMut<HandleMut = H>, H: DerefMut,
{
- /// Create a new mutable `OwningHandle` for a type that implements `ToHandleMut`.
+ /// Creates a new mutable `OwningHandle` for a type that implements `ToHandleMut`.
pub fn new_mut(o: O) -> Self {
OwningHandle::new_with_fn(o, |x| unsafe { O::Target::to_handle_mut(x) })
}
impl<O, H> OwningHandle<O, H>
where O: StableAddress, H: Deref,
{
- /// Create a new OwningHandle. The provided callback will be invoked with
+ /// Creates a new OwningHandle. The provided callback will be invoked with
/// a pointer to the object owned by `o`, and the returned value is stored
/// as the object to which this `OwningHandle` will forward `Deref` and
/// `DerefMut`.
}
}
- /// Create a new OwningHandle. The provided callback will be invoked with
+ /// Creates a new OwningHandle. The provided callback will be invoked with
/// a pointer to the object owned by `o`, and the returned value is stored
/// as the object to which this `OwningHandle` will forward `Deref` and
/// `DerefMut`.
});
}
-/// Load an integer of the desired type from a byte stream, in LE order. Uses
+/// Loads an integer of the desired type from a byte stream, in LE order. Uses
/// `copy_nonoverlapping` to let the compiler generate the most efficient way
/// to load it from a possibly unaligned address.
///
});
}
-/// Load an u64 using up to 7 bytes of a byte slice.
+/// Loads an u64 using up to 7 bytes of a byte slice.
///
/// Unsafe because: unchecked indexing at start..start+len
#[inline]
}
impl Svh {
- /// Create a new `Svh` given the hash. If you actually want to
+ /// Creates a new `Svh` given the hash. If you actually want to
/// compute the SVH from some HIR, you want the `calculate_svh`
/// function found in `librustc_incremental`.
pub fn new(hash: u64) -> Svh {
}
/// Applies the (partial) function to each edge and returns a new
- /// relation. If `f` returns `None` for any end-point, returns
+ /// relation. If `f` returns `None` for any end-point, returns
/// `None`.
pub fn maybe_map<F, U>(&self, mut f: F) -> Option<TransitiveRelation<U>>
where F: FnMut(&T) -> Option<U>,
}
}
- /// Check whether `a < target` (transitively)
+ /// Checks whether `a < target` (transitively)
pub fn contains(&self, a: &T, b: &T) -> bool {
match (self.index(a), self.index(b)) {
(Some(a), Some(b)) => self.with_closure(|closure| closure.contains(a.0, b.0)),
/// Thinking of `x R y` as an edge `x -> y` in a graph, this
/// returns all things reachable from `a`.
///
- /// Really this probably ought to be `impl Iterator<Item=&T>`, but
+ /// Really this probably ought to be `impl Iterator<Item = &T>`, but
/// I'm too lazy to make that work, and -- given the caching
/// strategy -- it'd be a touch tricky anyhow.
pub fn reachable_from(&self, a: &T) -> Vec<&T> {
/// the query is `postdom_upper_bound(a, b)`:
///
/// ```text
- /// // returns Some(x), which is also LUB
+ /// // Returns Some(x), which is also LUB.
/// a -> a1 -> x
/// ^
/// |
/// b -> b1 ---+
///
- /// // returns Some(x), which is not LUB (there is none)
- /// // diagonal edges run left-to-right
+ /// // Returns `Some(x)`, which is not LUB (there is none)
+ /// // diagonal edges run left-to-right.
/// a -> a1 -> x
/// \/ ^
/// /\ |
/// b -> b1 ---+
///
- /// // returns None
+ /// // Returns `None`.
/// a -> a1
/// b -> b1
/// ```
}
impl<T: Idx> WorkQueue<T> {
- /// Create a new work queue with all the elements from (0..len).
+ /// Creates a new work queue with all the elements from (0..len).
#[inline]
pub fn with_all(len: usize) -> Self {
WorkQueue {
}
}
- /// Create a new work queue that starts empty, where elements range from (0..len).
+ /// Creates a new work queue that starts empty, where elements range from (0..len).
#[inline]
pub fn with_none(len: usize) -> Self {
WorkQueue {
}
}
- /// True if nothing is enqueued.
+ /// Returns `true` if nothing is enqueued.
#[inline]
pub fn is_empty(&self) -> bool {
self.deque.is_empty()
(control.after_analysis.callback)(&mut state);
});
+ // Plugins like clippy and rust-semverver stop the analysis early,
+ // but want to still return an error if errors during the analysis
+ // happened:
+ tcx.sess.compile_status()?;
+
if control.after_analysis.stop == Compilation::Stop {
return result.and_then(|_| Err(CompileIncomplete::Stopped));
}
pub hir_forest: hir_map::Forest,
}
-/// Run the "early phases" of the compiler: initial `cfg` processing,
+/// Runs the "early phases" of the compiler: initial `cfg` processing,
/// loading compiler plugins (including those from `addl_plugins`),
/// syntax expansion, secondary `cfg` expansion, synthesis of a test
/// harness if one is to be provided, injection of a dependency on the
cstore::provide_extern(providers);
}
-/// Run the resolution, typechecking, region checking and other
+/// Runs the resolution, typec-hecking, region checking and other
/// miscellaneous analysis passes on the crate. Return various
/// structures carrying the results of the analysis.
pub fn phase_3_run_analysis_passes<'tcx, F, R>(
)
}
-/// Run the codegen backend, after which the AST and analysis can
+/// Runs the codegen backend, after which the AST and analysis can
/// be discarded.
pub fn phase_4_codegen<'a, 'tcx>(
codegen_backend: &dyn CodegenBackend,
use rustc::session::Session;
use rustc_codegen_utils::codegen_backend::CodegenBackend;
- /// Add `target_feature = "..."` cfgs for a variety of platform
+ /// Adds `target_feature = "..."` cfgs for a variety of platform
/// specific features (SSE, NEON etc.).
///
/// This is performed by checking whether a whitelisted set of
/// Process command line options. Emits messages as appropriate. If compilation
/// should continue, returns a getopts::Matches object parsed from args,
-/// otherwise returns None.
+/// otherwise returns `None`.
///
/// The compiler's handling of options is a little complicated as it ties into
/// our stability story, and it's even *more* complicated by historical
in_named_rustc_thread("rustc".to_string(), f)
}
-/// Get a list of extra command-line flags provided by the user, as strings.
+/// Gets a list of extra command-line flags provided by the user, as strings.
///
/// This function is used during ICEs to show more information useful for
/// debugging, since some ICEs only happens with non-default compiler flags
}
}
-/// Run a procedure which will detect panics in the compiler and print nicer
+/// Runs a procedure which will detect panics in the compiler and print nicer
/// error messages rather than just failing the test.
///
/// The diagnostic emitter yielded to the procedure should be used for reporting
-//! # Standalone Tests for the Inference Module
+//! Standalone tests for the inference module.
use driver;
use errors;
})
}
-/// Test substituting a bound region into a function, which introduces another level of binding.
-/// This requires adjusting the Debruijn index.
+/// Tests substituting a bound region into a function, which introduces another level of binding.
+/// This requires adjusting the De Bruijn index.
#[test]
fn subst_ty_renumber_some_bounds() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |env| {
})
}
-/// Test that we correctly compute whether a type has escaping regions or not.
+/// Tests that we correctly compute whether a type has escaping regions or not.
#[test]
fn escaping() {
test_env(EMPTY_SOURCE_STR, errors(&[]), |mut env| {
})
}
-/// Test applying a substitution where the value being substituted for an early-bound region is a
+/// Tests applying a substitution where the value being substituted for an early-bound region is a
/// late-bound region.
#[test]
fn subst_region_renumber_region() {
use crate::CodeSuggestion;
+use crate::SuggestionStyle;
use crate::SubstitutionPart;
use crate::Substitution;
use crate::Applicability;
self.level == Level::Cancelled
}
- /// Add a span/label to be included in the resulting snippet.
+ /// Adds a span/label to be included in the resulting snippet.
/// This is pushed onto the `MultiSpan` that was created when the
/// diagnostic was first built. If you don't call this function at
/// all, and you just supplied a `Span` to create the diagnostic,
.collect(),
}],
msg: msg.to_owned(),
- show_code_when_inline: true,
+ style: SuggestionStyle::ShowCode,
+ applicability,
+ });
+ self
+ }
+
+ /// Prints out a message with for a multipart suggestion without showing the suggested code.
+ ///
+ /// This is intended to be used for suggestions that are obvious in what the changes need to
+ /// be from the message, showing the span label inline would be visually unpleasant
+ /// (marginally overlapping spans or multiline spans) and showing the snippet window wouldn't
+ /// improve understandability.
+ pub fn tool_only_multipart_suggestion(
+ &mut self,
+ msg: &str,
+ suggestion: Vec<(Span, String)>,
+ applicability: Applicability,
+ ) -> &mut Self {
+ self.suggestions.push(CodeSuggestion {
+ substitutions: vec![Substitution {
+ parts: suggestion
+ .into_iter()
+ .map(|(span, snippet)| SubstitutionPart { snippet, span })
+ .collect(),
+ }],
+ msg: msg.to_owned(),
+ style: SuggestionStyle::CompletelyHidden,
applicability,
});
self
}],
}],
msg: msg.to_owned(),
- show_code_when_inline: true,
+ style: SuggestionStyle::ShowCode,
applicability,
});
self
}],
}).collect(),
msg: msg.to_owned(),
- show_code_when_inline: true,
+ style: SuggestionStyle::ShowCode,
applicability,
});
self
}],
}],
msg: msg.to_owned(),
- show_code_when_inline: false,
+ style: SuggestionStyle::HideCodeInline,
+ applicability,
+ });
+ self
+ }
+
+ /// Prints out a message with for a suggestion without showing the suggested code.
+ ///
+ /// This is intended to be used for suggestions that are obvious in what the changes need to
+ /// be from the message, showing the span label inline would be visually unpleasant
+ /// (marginally overlapping spans or multiline spans) and showing the snippet window wouldn't
+ /// improve understandability.
+ pub fn span_suggestion_hidden(
+ &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability
+ ) -> &mut Self {
+ self.suggestions.push(CodeSuggestion {
+ substitutions: vec![Substitution {
+ parts: vec![SubstitutionPart {
+ snippet: suggestion,
+ span: sp,
+ }],
+ }],
+ msg: msg.to_owned(),
+ style: SuggestionStyle::HideCodeInline,
+ applicability,
+ });
+ self
+ }
+
+ /// Adds a suggestion to the json output, but otherwise remains silent/undisplayed in the cli.
+ ///
+ /// This is intended to be used for suggestions that are *very* obvious in what the changes
+ /// need to be from the message, but we still want other tools to be able to apply them.
+ pub fn tool_only_span_suggestion(
+ &mut self, sp: Span, msg: &str, suggestion: String, applicability: Applicability
+ ) -> &mut Self {
+ self.suggestions.push(CodeSuggestion {
+ substitutions: vec![Substitution {
+ parts: vec![SubstitutionPart {
+ snippet: suggestion,
+ span: sp,
+ }],
+ }],
+ msg: msg.to_owned(),
+ style: SuggestionStyle::CompletelyHidden,
applicability: applicability,
});
self
/// In general, the `DiagnosticBuilder` uses deref to allow access to
/// the fields and methods of the embedded `diagnostic` in a
-/// transparent way. *However,* many of the methods are intended to
+/// transparent way. *However,* many of the methods are intended to
/// be used in a chained way, and hence ought to return `self`. In
/// that case, we can't just naively forward to the method on the
/// `diagnostic`, because the return type would be a `&Diagnostic`
self.cancel();
}
- /// Add a span/label to be included in the resulting snippet.
+ /// Adds a span/label to be included in the resulting snippet.
/// This is pushed onto the `MultiSpan` that was created when the
/// diagnostic was first built. If you don't call this function at
/// all, and you just supplied a `Span` to create the diagnostic,
self
}
+ pub fn tool_only_multipart_suggestion(
+ &mut self,
+ msg: &str,
+ suggestion: Vec<(Span, String)>,
+ applicability: Applicability,
+ ) -> &mut Self {
+ if !self.allow_suggestions {
+ return self
+ }
+ self.diagnostic.tool_only_multipart_suggestion(
+ msg,
+ suggestion,
+ applicability,
+ );
+ self
+ }
+
+
pub fn span_suggestion(
&mut self,
sp: Span,
);
self
}
+
+ pub fn span_suggestion_hidden(
+ &mut self,
+ sp: Span,
+ msg: &str,
+ suggestion: String,
+ applicability: Applicability,
+ ) -> &mut Self {
+ if !self.allow_suggestions {
+ return self
+ }
+ self.diagnostic.span_suggestion_hidden(
+ sp,
+ msg,
+ suggestion,
+ applicability,
+ );
+ self
+ }
+
+ pub fn tool_only_span_suggestion(
+ &mut self,
+ sp: Span,
+ msg: &str,
+ suggestion: String,
+ applicability: Applicability,
+ ) -> &mut Self {
+ if !self.allow_suggestions {
+ return self
+ }
+ self.diagnostic.tool_only_span_suggestion(
+ sp,
+ msg,
+ suggestion,
+ applicability,
+ );
+ self
+ }
+
forward!(pub fn set_span<S: Into<MultiSpan>>(&mut self, sp: S) -> &mut Self);
forward!(pub fn code(&mut self, s: DiagnosticId) -> &mut Self);
use syntax_pos::{SourceFile, Span, MultiSpan};
-use crate::{Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic, SourceMapperDyn, DiagnosticId};
+use crate::{
+ Level, CodeSuggestion, DiagnosticBuilder, SubDiagnostic,
+ SuggestionStyle, SourceMapperDyn, DiagnosticId,
+};
use crate::snippet::{Annotation, AnnotationType, Line, MultilineAnnotation, StyledString, Style};
use crate::styled_buffer::StyledBuffer;
/// Emit a structured diagnostic.
fn emit(&mut self, db: &DiagnosticBuilder<'_>);
- /// Check if should show explanations about "rustc --explain"
+ /// Checks if should show explanations about "rustc --explain"
fn should_show_explain(&self) -> bool {
true
}
// don't display long messages as labels
sugg.msg.split_whitespace().count() < 10 &&
// don't display multiline suggestions as labels
- !sugg.substitutions[0].parts[0].snippet.contains('\n') {
+ !sugg.substitutions[0].parts[0].snippet.contains('\n') &&
+ // when this style is set we want the suggestion to be a message, not inline
+ sugg.style != SuggestionStyle::HideCodeAlways &&
+ // trivial suggestion for tooling's sake, never shown
+ sugg.style != SuggestionStyle::CompletelyHidden
+ {
let substitution = &sugg.substitutions[0].parts[0].snippet.trim();
- let msg = if substitution.len() == 0 || !sugg.show_code_when_inline {
+ let msg = if substitution.len() == 0 || sugg.style.hide_inline() {
// This substitution is only removal or we explicitly don't want to show the
// code inline, don't show it
format!("help: {}", sugg.msg)
}
}
- /// Add a left margin to every line but the first, given a padding length and the label being
+ /// Adds a left margin to every line but the first, given a padding length and the label being
/// displayed, keeping the provided highlighting.
fn msg_to_buffer(&self,
buffer: &mut StyledBuffer,
// `max_line_num_len`
let padding = " ".repeat(padding + label.len() + 5);
- /// Return whether `style`, or the override if present and the style is `NoStyle`.
+ /// Returns `true` if `style`, or the override if present and the style is `NoStyle`.
fn style_or_override(style: Style, override_style: Option<Style>) -> Style {
if let Some(o) = override_style {
if style == Style::NoStyle {
}
}
- fn emit_message_default(&mut self,
- msp: &MultiSpan,
- msg: &[(String, Style)],
- code: &Option<DiagnosticId>,
- level: &Level,
- max_line_num_len: usize,
- is_secondary: bool)
- -> io::Result<()> {
+ fn emit_message_default(
+ &mut self,
+ msp: &MultiSpan,
+ msg: &[(String, Style)],
+ code: &Option<DiagnosticId>,
+ level: &Level,
+ max_line_num_len: usize,
+ is_secondary: bool,
+ ) -> io::Result<()> {
let mut buffer = StyledBuffer::new();
let header_style = if is_secondary {
Style::HeaderMsg
}
- fn emit_suggestion_default(&mut self,
- suggestion: &CodeSuggestion,
- level: &Level,
- max_line_num_len: usize)
- -> io::Result<()> {
+ fn emit_suggestion_default(
+ &mut self,
+ suggestion: &CodeSuggestion,
+ level: &Level,
+ max_line_num_len: usize,
+ ) -> io::Result<()> {
if let Some(ref sm) = self.sm {
let mut buffer = StyledBuffer::new();
buffer.append(0, &level_str, Style::Level(level.clone()));
buffer.append(0, ": ", Style::HeaderMsg);
}
- self.msg_to_buffer(&mut buffer,
- &[(suggestion.msg.to_owned(), Style::NoStyle)],
- max_line_num_len,
- "suggestion",
- Some(Style::HeaderMsg));
+ self.msg_to_buffer(
+ &mut buffer,
+ &[(suggestion.msg.to_owned(), Style::NoStyle)],
+ max_line_num_len,
+ "suggestion",
+ Some(Style::HeaderMsg),
+ );
// Render the replacements for each suggestion
let suggestions = suggestion.splice_lines(&**sm);
if !self.short_message {
for child in children {
let span = child.render_span.as_ref().unwrap_or(&child.span);
- match self.emit_message_default(&span,
- &child.styled_message(),
- &None,
- &child.level,
- max_line_num_len,
- true) {
+ match self.emit_message_default(
+ &span,
+ &child.styled_message(),
+ &None,
+ &child.level,
+ max_line_num_len,
+ true,
+ ) {
Err(e) => panic!("failed to emit error: {}", e),
_ => ()
}
}
for sugg in suggestions {
- match self.emit_suggestion_default(sugg,
- &Level::Help,
- max_line_num_len) {
- Err(e) => panic!("failed to emit error: {}", e),
- _ => ()
+ if sugg.style == SuggestionStyle::CompletelyHidden {
+ // do not display this suggestion, it is meant only for tools
+ } else if sugg.style == SuggestionStyle::HideCodeAlways {
+ match self.emit_message_default(
+ &MultiSpan::new(),
+ &[(sugg.msg.to_owned(), Style::HeaderMsg)],
+ &None,
+ &Level::Help,
+ max_line_num_len,
+ true,
+ ) {
+ Err(e) => panic!("failed to emit error: {}", e),
+ _ => ()
+ }
+ } else {
+ match self.emit_suggestion_default(
+ sugg,
+ &Level::Help,
+ max_line_num_len,
+ ) {
+ Err(e) => panic!("failed to emit error: {}", e),
+ _ => ()
+ }
}
}
}
Unspecified,
}
+#[derive(Debug, PartialEq, Eq, Clone, Copy, Hash, RustcEncodable, RustcDecodable)]
+pub enum SuggestionStyle {
+ /// Hide the suggested code when displaying this suggestion inline.
+ HideCodeInline,
+ /// Always hide the suggested code but display the message.
+ HideCodeAlways,
+ /// Do not display this suggestion in the cli output, it is only meant for tools.
+ CompletelyHidden,
+ /// Always show the suggested code.
+ /// This will *not* show the code if the suggestion is inline *and* the suggested code is
+ /// empty.
+ ShowCode,
+}
+
+impl SuggestionStyle {
+ fn hide_inline(&self) -> bool {
+ match *self {
+ SuggestionStyle::ShowCode => false,
+ _ => true,
+ }
+ }
+}
+
#[derive(Clone, Debug, PartialEq, Hash, RustcEncodable, RustcDecodable)]
pub struct CodeSuggestion {
/// Each substitute can have multiple variants due to multiple
/// ```
pub substitutions: Vec<Substitution>,
pub msg: String,
- pub show_code_when_inline: bool,
+ /// Visual representation of this suggestion.
+ pub style: SuggestionStyle,
/// Whether or not the suggestion is approximate
///
/// Sometimes we may show suggestions with placeholders,
/// Resets the diagnostic error count as well as the cached emitted diagnostics.
///
- /// NOTE: DO NOT call this function from rustc. It is only meant to be called from external
+ /// NOTE: *do not* call this function from rustc. It is only meant to be called from external
/// tools that want to reuse a `Parser` cleaning the previously emitted diagnostics as well as
/// the overall count of emitted error diagnostics.
pub fn reset_err_count(&self) {
Copy,
}
-/// Copy `p` into `q`, preferring to use hard-linking if possible. If
+/// Copies `p` into `q`, preferring to use hard-linking if possible. If
/// `q` already exists, it is removed first.
/// The result indicates which of the two operations has been performed.
pub fn link_or_copy<P: AsRef<Path>, Q: AsRef<Path>>(p: P, q: Q) -> io::Result<LinkOrCopy> {
//! In this code, we report errors on each `rustc_if_this_changed`
//! annotation. If a path exists in all cases, then we would report
//! "all path(s) exist". Otherwise, we report: "no path to `foo`" for
-//! each case where no path exists. `compile-fail` tests can then be
+//! each case where no path exists. `compile-fail` tests can then be
//! used to check when paths exist or do not.
//!
//! The full form of the `rustc_if_this_changed` annotation is
//!
//! - `#[rustc_clean(cfg="rev2", except="TypeckTables")]` if we are
//! in `#[cfg(rev2)]`, then the fingerprints associated with
-//! `DepNode::TypeckTables(X)` must be DIFFERENT (`X` is the def-id of the
+//! `DepNode::TypeckTables(X)` must be DIFFERENT (`X` is the `DefId` of the
//! current node).
//! - `#[rustc_clean(cfg="rev2")]` same as above, except that the
//! fingerprints must be the SAME (along with all other fingerprints).
//!
//! Errors are reported if we are in the suitable configuration but
//! the required condition is not met.
-//!
use std::iter::FromIterator;
use std::vec::Vec;
label_strs::TypeOfItem,
];
-/// Trait Definition DepNodes
+/// Trait definition `DepNode`s.
const BASE_TRAIT_DEF: &[&str] = &[
label_strs::AssociatedItemDefIds,
label_strs::GenericsOfItem,
label_strs::TraitImpls,
];
-/// extra DepNodes for methods (+fn)
+/// Extra `DepNode`s for functions and methods.
const EXTRA_ASSOCIATED: &[&str] = &[
label_strs::AssociatedItems,
];
EXTRA_TRAIT,
];
-/// Function DepNode
+/// Function `DepNode`s.
const LABELS_FN: &[&[&str]] = &[
BASE_HIR,
BASE_MIR,
BASE_FN,
];
-/// Method DepNodes
+/// Method `DepNode`s.
const LABELS_FN_IN_IMPL: &[&[&str]] = &[
BASE_HIR,
BASE_MIR,
EXTRA_ASSOCIATED,
];
-/// Trait-Method DepNodes
+/// Trait method `DepNode`s.
const LABELS_FN_IN_TRAIT: &[&[&str]] = &[
BASE_HIR,
BASE_MIR,
EXTRA_TRAIT,
];
-/// For generic cases like inline-assembly/mod/etc
+/// For generic cases like inline-assembly, modules, etc.
const LABELS_HIR_ONLY: &[&[&str]] = &[
BASE_HIR,
];
-/// Impl DepNodes
+/// Impl `DepNode`s.
const LABELS_IMPL: &[&[&str]] = &[
BASE_HIR,
BASE_IMPL,
];
-/// Abstract Data Type (Struct, Enum, Unions) DepNodes
+/// Abstract data type (struct, enum, union) `DepNode`s.
const LABELS_ADT: &[&[&str]] = &[
BASE_HIR,
BASE_STRUCT,
];
-/// Trait Definition DepNodes
+/// Trait definition `DepNode`s.
#[allow(dead_code)]
const LABELS_TRAIT: &[&[&str]] = &[
BASE_HIR,
Some(assertion)
}
- /// Get the "auto" assertion on pre-validated attr, along with the `except` labels
+ /// Gets the "auto" assertion on pre-validated attr, along with the `except` labels.
fn assertion_auto(&mut self, item_id: ast::NodeId, attr: &Attribute, is_clean: bool)
-> Assertion
{
use rustc::session::config::nightly_options;
use rustc_serialize::opaque::Encoder;
-/// The first few bytes of files generated by incremental compilation
+/// The first few bytes of files generated by incremental compilation.
const FILE_MAGIC: &[u8] = b"RSIC";
-/// Change this if the header format changes
+/// Change this if the header format changes.
const HEADER_FORMAT_VERSION: u16 = 0;
/// A version string that hopefully is always different for compiler versions
/// with different encodings of incremental compilation artifacts. Contains
-/// the git commit hash.
+/// the Git commit hash.
const RUSTC_VERSION: Option<&str> = option_env!("CFG_VERSION");
pub fn write_file_header(stream: &mut Encoder) {
Ok(files_linked > 0 || files_copied == 0)
}
-/// Generate unique directory path of the form:
+/// Generates unique directory path of the form:
/// {crate_dir}/s-{timestamp}-{random-number}-working
fn generate_session_dir_path(crate_dir: &Path) -> PathBuf {
let timestamp = timestamp_to_string(SystemTime::now());
}
}
-/// Find the most recent published session directory that is not in the
+/// Finds the most recent published session directory that is not in the
/// ignore-list.
fn find_source_directory(crate_dir: &Path,
source_directories_already_tried: &FxHashSet<PathBuf>)
name = "rustc_lint"
path = "lib.rs"
crate-type = ["dylib"]
-test = false
[dependencies]
log = "0.4"
//! `LintPass` (also, note that such lints will need to be defined in
//! `rustc::lint::builtin`, not here).
//!
-//! If you define a new `LintPass`, you will also need to add it to the
-//! `add_builtin!` or `add_builtin_with_new!` invocation in `lib.rs`.
-//! Use the former for unit-like structs and the latter for structs with
-//! a `pub fn new()`.
+//! If you define a new `EarlyLintPass`, you will also need to add it to the
+//! `add_early_builtin!` or `add_early_builtin_with_new!` invocation in
+//! `lib.rs`. Use the former for unit-like structs and the latter for structs
+//! with a `pub fn new()`.
+//!
+//! If you define a new `LateLintPass`, you will also need to add it to the
+//! `late_lint_methods!` invocation in `lib.rs`.
use rustc::hir::def::Def;
use rustc::hir::def_id::{DefId, LOCAL_CRATE};
}
fn check_expr(&mut self, cx: &LateContext<'_, '_>, e: &hir::Expr) {
- let ty = cx.tables.node_id_to_type(e.hir_id);
+ let ty = cx.tables.node_type(e.hir_id);
self.check_heap_type(cx, e.span, ty);
}
}
}
pub struct MissingDoc {
- /// Stack of whether #[doc(hidden)] is set
- /// at each level which has lint attributes.
+ /// Stack of whether `#[doc(hidden)]` is set at each level which has lint attributes.
doc_hidden_stack: Vec<bool>,
/// Private traits or trait items that leaked through. Don't check their methods.
"detects anonymous parameters"
}
-/// Checks for use of anonymous parameters (RFC 1685)
-#[derive(Clone)]
+/// Checks for use of anonymous parameters (RFC 1685).
+#[derive(Copy, Clone)]
pub struct AnonymousParameters;
impl LintPass for AnonymousParameters {
}
}
-/// Checks for use of attributes which have been deprecated.
+/// Check for use of attributes which have been deprecated.
#[derive(Clone)]
pub struct DeprecatedAttr {
// This is not free to compute, so we want to keep it around, rather than
if !def_id_is_transmute(cx, did) {
return None;
}
- let sig = cx.tables.node_id_to_type(expr.hir_id).fn_sig(cx.tcx);
+ let sig = cx.tables.node_type(expr.hir_id).fn_sig(cx.tcx);
let from = sig.inputs().skip_binder()[0];
let to = *sig.output().skip_binder();
return Some((&from.sty, &to.sty));
}
}
-/// Lint for items marked `pub` that aren't reachable from other crates
+/// Lint for items marked `pub` that aren't reachable from other crates.
+#[derive(Copy, Clone)]
pub struct UnreachablePub;
declare_lint! {
}
}
-/// Lint for trait and lifetime bounds in type aliases being mostly ignored:
+/// Lint for trait and lifetime bounds in type aliases being mostly ignored.
/// They are relevant when using associated types, but otherwise neither checked
/// at definition site nor enforced at use site.
"detects edition keywords being used as an identifier"
}
-/// Checks for uses of edition keywords used as an identifier
-#[derive(Clone)]
+/// Check for uses of edition keywords used as an identifier.
+#[derive(Copy, Clone)]
pub struct KeywordIdents;
impl LintPass for KeywordIdents {
"types, variants, traits and type parameters should have camel case names"
}
-#[derive(Copy, Clone)]
-pub struct NonCamelCaseTypes;
+fn char_has_case(c: char) -> bool {
+ c.is_lowercase() || c.is_uppercase()
+}
-impl NonCamelCaseTypes {
- fn check_case(&self, cx: &EarlyContext<'_>, sort: &str, ident: &Ident) {
- fn char_has_case(c: char) -> bool {
- c.is_lowercase() || c.is_uppercase()
- }
+fn is_camel_case(name: &str) -> bool {
+ let name = name.trim_matches('_');
+ if name.is_empty() {
+ return true;
+ }
- fn is_camel_case(name: &str) -> bool {
- let name = name.trim_matches('_');
- if name.is_empty() {
- return true;
+ // start with a non-lowercase letter rather than non-uppercase
+ // ones (some scripts don't have a concept of upper/lowercase)
+ !name.chars().next().unwrap().is_lowercase()
+ && !name.contains("__")
+ && !name.chars().collect::<Vec<_>>().windows(2).any(|pair| {
+ // contains a capitalisable character followed by, or preceded by, an underscore
+ char_has_case(pair[0]) && pair[1] == '_' || char_has_case(pair[1]) && pair[0] == '_'
+ })
+}
+
+fn to_camel_case(s: &str) -> String {
+ s.trim_matches('_')
+ .split('_')
+ .filter(|component| !component.is_empty())
+ .map(|component| {
+ let mut camel_cased_component = String::new();
+
+ let mut new_word = true;
+ let mut prev_is_lower_case = true;
+
+ for c in component.chars() {
+ // Preserve the case if an uppercase letter follows a lowercase letter, so that
+ // `camelCase` is converted to `CamelCase`.
+ if prev_is_lower_case && c.is_uppercase() {
+ new_word = true;
+ }
+
+ if new_word {
+ camel_cased_component.push_str(&c.to_uppercase().to_string());
+ } else {
+ camel_cased_component.push_str(&c.to_lowercase().to_string());
+ }
+
+ prev_is_lower_case = c.is_lowercase();
+ new_word = false;
}
- // start with a non-lowercase letter rather than non-uppercase
- // ones (some scripts don't have a concept of upper/lowercase)
- !name.is_empty() && !name.chars().next().unwrap().is_lowercase() &&
- !name.contains("__") && !name.chars().collect::<Vec<_>>().windows(2).any(|pair| {
- // contains a capitalisable character followed by, or preceded by, an underscore
- char_has_case(pair[0]) && pair[1] == '_' ||
- char_has_case(pair[1]) && pair[0] == '_'
- })
- }
+ camel_cased_component
+ })
+ .fold(
+ (String::new(), None),
+ |(acc, prev): (String, Option<String>), next| {
+ // separate two components with an underscore if their boundary cannot
+ // be distinguished using a uppercase/lowercase case distinction
+ let join = if let Some(prev) = prev {
+ let l = prev.chars().last().unwrap();
+ let f = next.chars().next().unwrap();
+ !char_has_case(l) && !char_has_case(f)
+ } else {
+ false
+ };
+ (acc + if join { "_" } else { "" } + &next, Some(next))
+ },
+ )
+ .0
+}
- fn to_camel_case(s: &str) -> String {
- s.trim_matches('_')
- .split('_')
- .map(|word| {
- word.chars().enumerate().map(|(i, c)| if i == 0 {
- c.to_uppercase().collect::<String>()
- } else {
- c.to_lowercase().collect()
- })
- .collect::<String>()
- })
- .filter(|x| !x.is_empty())
- .fold((String::new(), None), |(acc, prev): (String, Option<String>), next| {
- // separate two components with an underscore if their boundary cannot
- // be distinguished using a uppercase/lowercase case distinction
- let join = if let Some(prev) = prev {
- let l = prev.chars().last().unwrap();
- let f = next.chars().next().unwrap();
- !char_has_case(l) && !char_has_case(f)
- } else { false };
- (acc + if join { "_" } else { "" } + &next, Some(next))
- }).0
- }
+#[derive(Copy, Clone)]
+pub struct NonCamelCaseTypes;
+impl NonCamelCaseTypes {
+ fn check_case(&self, cx: &EarlyContext<'_>, sort: &str, ident: &Ident) {
let name = &ident.name.as_str();
if !is_camel_case(name) {
- let c = to_camel_case(name);
-
- let msg = format!("{} `{}` should have a camel case name", sort, name);
+ let msg = format!("{} `{}` should have an upper camel case name", sort, name);
cx.struct_span_lint(NON_CAMEL_CASE_TYPES, ident.span, &msg)
.span_suggestion(
ident.span,
- "convert the identifier to camel case",
- c,
+ "convert the identifier to upper camel case",
+ to_camel_case(name),
Applicability::MaybeIncorrect,
)
.emit();
fn check_item(&mut self, cx: &EarlyContext<'_>, it: &ast::Item) {
let has_repr_c = it.attrs
.iter()
- .any(|attr| {
- attr::find_repr_attrs(&cx.sess.parse_sess, attr)
- .iter()
- .any(|r| r == &attr::ReprC)
- });
+ .any(|attr| attr::find_repr_attrs(&cx.sess.parse_sess, attr).contains(&attr::ReprC));
if has_repr_c {
return;
}
}
}
+
+#[cfg(test)]
+mod tests {
+ use super::{is_camel_case, to_camel_case};
+
+ #[test]
+ fn camel_case() {
+ assert!(!is_camel_case("userData"));
+ assert_eq!(to_camel_case("userData"), "UserData");
+
+ assert!(is_camel_case("X86_64"));
+
+ assert!(!is_camel_case("X86__64"));
+ assert_eq!(to_camel_case("X86__64"), "X86_64");
+
+ assert!(!is_camel_case("Abc_123"));
+ assert_eq!(to_camel_case("Abc_123"), "Abc123");
+
+ assert!(!is_camel_case("A1_b2_c3"));
+ assert_eq!(to_camel_case("A1_b2_c3"), "A1B2C3");
+
+ assert!(!is_camel_case("ONE_TWO_THREE"));
+ assert_eq!(to_camel_case("ONE_TWO_THREE"), "OneTwoThree");
+ }
+}
}
}
hir::ExprKind::Lit(ref lit) => {
- match cx.tables.node_id_to_type(e.hir_id).sty {
+ match cx.tables.node_type(e.hir_id).sty {
ty::Int(t) => {
match lit.node {
ast::LitKind::Int(v, ast::LitIntType::Signed(_)) |
// Normalize the binop so that the literal is always on the RHS in
// the comparison
let norm_binop = if swap { rev_binop(binop) } else { binop };
- match cx.tables.node_id_to_type(expr.hir_id).sty {
+ match cx.tables.node_type(expr.hir_id).sty {
ty::Int(int_ty) => {
let (min, max) = int_ty_range(int_ty);
let lit_val: i128 = match lit.node {
repr_str, val, t, actually, t
));
if let Some(sugg_ty) =
- get_type_suggestion(&cx.tables.node_id_to_type(expr.hir_id).sty, val, negative)
+ get_type_suggestion(&cx.tables.node_type(expr.hir_id).sty, val, negative)
{
if let Some(pos) = repr_str.chars().position(|c| c == 'i' || c == 'u') {
let (sans_suffix, _) = repr_str.split_at(pos);
}
impl<'a, 'tcx> ImproperCTypesVisitor<'a, 'tcx> {
- /// Check if the given type is "ffi-safe" (has a stable, well-defined
+ /// Checks if the given type is "ffi-safe" (has a stable, well-defined
/// representation which can be exported to C code).
fn check_type_for_ffi(&self,
cache: &mut FxHashSet<Ty<'tcx>>,
}
}
- /// Load custom derive macros.
+ /// Loads custom derive macros.
///
/// Note that this is intentionally similar to how we load plugins today,
/// but also intentionally separate. Plugins are likely always going to be
/// Original name of the crate.
pub name: Symbol,
- /// Name of the crate as imported. I.e., if imported with
+ /// Name of the crate as imported. I.e., if imported with
/// `extern crate foo as bar;` this will be `bar`.
pub imported_name: Symbol,
pub root: schema::CrateRoot,
- /// For each public item in this crate, we encode a key. When the
+ /// For each public item in this crate, we encode a key. When the
/// crate is loaded, we read all the keys and put them in this
- /// hashmap, which gives the reverse mapping. This allows us to
+ /// hashmap, which gives the reverse mapping. This allows us to
/// quickly retrace a `DefPath`, which is needed for incremental
/// compilation support.
pub def_path_table: Lrc<DefPathTable>,
}
}
-/// Create the "fake" DefPathTable for a given proc macro crate.
+/// Creates the "fake" DefPathTable for a given proc macro crate.
///
/// The DefPathTable is as follows:
///
```
See more:
-https://doc.rust-lang.org/book/first-edition/conditional-compilation.html
+https://doc.rust-lang.org/reference/attributes.html#conditional-compilation
"##,
E0458: r##"
}
}
- /// Load a dynamic library into the global namespace (RTLD_GLOBAL on Unix)
+ /// Loads a dynamic library into the global namespace (RTLD_GLOBAL on Unix)
/// and do it now (don't use RTLD_LAZY on Unix).
pub fn open_global_now(filename: &Path) -> Result<DynamicLibrary, String> {
let maybe_library = dl::open_global_now(filename.as_os_str());
let tables = self.tcx.typeck_tables_of(def_id);
let node_id = self.tcx.hir().as_local_node_id(def_id).unwrap();
let hir_id = self.tcx.hir().node_to_hir_id(node_id);
- let kind = match tables.node_id_to_type(hir_id).sty {
+ let kind = match tables.node_type(hir_id).sty {
ty::Generator(def_id, ..) => {
let layout = self.tcx.generator_layout(def_id);
let data = GeneratorData {
//!
//! ```
//! <common::data> // big list of item-like things...
-//! <common::data_item> // ...for most def-ids, there is an entry.
+//! <common::data_item> // ...for most `DefId`s, there is an entry.
//! </common::data_item>
//! </common::data>
//! ```
}
}
- /// Emit the data for a def-id to the metadata. The function to
+ /// Emit the data for a `DefId` to the metadata. The function to
/// emit the data is `op`, and it will be given `data` as
/// arguments. This `record` function will call `op` to generate
/// the `Entry` (which may point to other encoded information)
}
/// Trait used for data that can be passed from outside a dep-graph
-/// task. The data must either be of some safe type, such as a
+/// task. The data must either be of some safe type, such as a
/// `DefId` index, or implement the `read` method so that it can add
/// a read of whatever dep-graph nodes are appropriate.
pub trait DepGraphRead {
}
/// Newtype that can be used to package up misc data extracted from a
-/// HIR node that doesn't carry its own id. This will allow an
+/// HIR node that doesn't carry its own ID. This will allow an
/// arbitrary `T` to be passed in, but register a read on the given
-/// node-id.
+/// `NodeId`.
pub struct FromId<T>(pub ast::NodeId, pub T);
impl<T> DepGraphRead for FromId<T> {
//!
//! The reason for this is that any of B's types could be composed of C's types,
//! any function in B could return a type from C, etc. To be able to guarantee
-//! that we can always typecheck/translate any function, we have to have
+//! that we can always type-check/translate any function, we have to have
//! complete knowledge of the whole ecosystem, not just our immediate
//! dependencies.
//!
}
}
-// A diagnostic function for dumping crate metadata to an output stream
+/// A diagnostic function for dumping crate metadata to an output stream.
pub fn list_file_metadata(target: &Target,
path: &Path,
loader: &dyn MetadataLoader,
[dependencies]
arena = { path = "../libarena" }
-bitflags = "1.0"
either = "1.5.0"
dot = { path = "../libgraphviz", package = "graphviz" }
log = "0.4"
crate location_map: FxHashMap<Location, BorrowIndex>,
/// Locations which activate borrows.
- /// NOTE: A given location may activate more than one borrow in the future
+ /// NOTE: a given location may activate more than one borrow in the future
/// when more general two-phase borrow support is introduced, but for now we
- /// only need to store one borrow index
+ /// only need to store one borrow index.
crate activation_map: FxHashMap<Location, Vec<BorrowIndex>>,
- /// Map from local to all the borrows on that local
+ /// Map from local to all the borrows on that local.
crate local_map: FxHashMap<mir::Local, FxHashSet<BorrowIndex>>,
crate locals_state_at_exit: LocalsStateAtExit,
}
}
-/// Location where a two phase borrow is activated, if a borrow
-/// is in fact a two phase borrow.
+/// Location where a two-phase borrow is activated, if a borrow
+/// is in fact a two-phase borrow.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
crate enum TwoPhaseActivation {
NotTwoPhase,
}
impl<'a, 'gcx, 'tcx> GatherBorrows<'a, 'gcx, 'tcx> {
- /// Returns true if the borrow represented by `kind` is
+ /// Returns `true` if the borrow represented by `kind` is
/// allowed to be split into separate Reservation and
/// Activation phases.
fn allow_two_phase_borrow(&self, kind: mir::BorrowKind) -> bool {
/// then tell us where the move occurred.
moi: MoveOutIndex,
- /// True if we traversed a back edge while walking from the point
+ /// `true` if we traversed a back edge while walking from the point
/// of error to the move site.
traversed_back_edge: bool
}
let escapes_from = if tcx.is_closure(self.mir_def_id) {
let tables = tcx.typeck_tables_of(self.mir_def_id);
let mir_hir_id = tcx.hir().def_index_to_hir_id(self.mir_def_id.index);
- match tables.node_id_to_type(mir_hir_id).sty {
+ match tables.node_type(mir_hir_id).sty {
ty::Closure(..) => "closure",
ty::Generator(..) => "generator",
_ => bug!("Closure body doesn't have a closure or generator type"),
}
}
- /// Check if a place is a thread-local static.
+ /// Checks if a place is a thread-local static.
pub fn is_place_thread_local(&self, place: &Place<'tcx>) -> bool {
if let Place::Static(statik) = place {
let attrs = self.infcx.tcx.get_attrs(statik.def_id);
}
}
- /// Return the name of the provided `Ty` (that must be a reference)'s region with a
+ /// Returns the name of the provided `Ty` (that must be a reference)'s region with a
/// synthesized lifetime name where required.
fn get_region_name_for_ty(&self, ty: ty::Ty<'tcx>, counter: usize) -> String {
match ty.sty {
}
}
- /// Return `false` if this place is not used in a closure.
+ /// Returns `false` if this place is not used in a closure.
fn for_closure(&self) -> bool {
match *self {
UseSpans::ClosureUse { is_generator, .. } => !is_generator,
}
}
- /// Return `false` if this place is not used in a generator.
+ /// Returns `false` if this place is not used in a generator.
fn for_generator(&self) -> bool {
match *self {
UseSpans::ClosureUse { is_generator, .. } => is_generator,
access_place_error_reported: FxHashSet<(Place<'tcx>, Span)>,
/// This field keeps track of when borrow conflict errors are reported
/// for reservations, so that we don't report seemingly duplicate
- /// errors for corresponding activations
- ///
- /// FIXME: Ideally this would be a set of BorrowIndex, not Places,
- /// but it is currently inconvenient to track down the BorrowIndex
- /// at the time we detect and report a reservation error.
+ /// errors for corresponding activations.
+ //
+ // FIXME: ideally this would be a set of `BorrowIndex`, not `Place`s,
+ // but it is currently inconvenient to track down the `BorrowIndex`
+ // at the time we detect and report a reservation error.
reservation_error_reported: FxHashSet<Place<'tcx>>,
/// This field keeps track of move errors that are to be reported for given move indicies.
///
/// If the function we're checking is a closure, then we'll need to report back the list of
/// mutable upvars that have been used. This field keeps track of them.
used_mut_upvars: SmallVec<[Field; 8]>,
- /// Non-lexical region inference context, if NLL is enabled. This
+ /// Non-lexical region inference context, if NLL is enabled. This
/// contains the results from region inference and lets us e.g.
/// find out which CFG points are contained in each borrow region.
nonlexical_regioncx: Rc<RegionInferenceContext<'tcx>>,
/// When checking permissions for a place access, this flag is used to indicate that an immutable
/// local place can be mutated.
-///
-/// FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications:
-/// - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()`
-/// - Split `is_mutable()` into `is_assignable()` (can be directly assigned) and
-/// `is_declared_mutable()`
-/// - Take flow state into consideration in `is_assignable()` for local variables
+//
+// FIXME: @nikomatsakis suggested that this flag could be removed with the following modifications:
+// - Merge `check_access_permissions()` and `check_if_reassignment_to_immutable_state()`.
+// - Split `is_mutable()` into `is_assignable()` (can be directly assigned) and
+// `is_declared_mutable()`.
+// - Take flow state into consideration in `is_assignable()` for local variables.
#[derive(Copy, Clone, PartialEq, Eq, Debug)]
enum LocalMutationIsAllowed {
Yes,
/// place is initialized and (b) it is not borrowed in some way that would prevent this
/// access.
///
- /// Returns true if an error is reported, false otherwise.
+ /// Returns `true` if an error is reported.
fn access_place(
&mut self,
context: Context,
}
}
- /// Check the permissions for the given place and read or write kind
+ /// Checks the permissions for the given place and read or write kind
///
- /// Returns true if an error is reported, false otherwise.
+ /// Returns `true` if an error is reported.
fn check_access_permissions(
&mut self,
(place, span): (&Place<'tcx>, Span),
ty.is_closure() || ty.is_generator()
}
-/// Add a suggestion to a struct definition given a field access to a local.
+/// Adds a suggestion to a struct definition given a field access to a local.
/// This function expects the local to be a reference to a struct in order to produce a suggestion.
///
/// ```text
}
impl<D: ConstraintGraphDirecton> ConstraintGraph<D> {
- /// Create a "dependency graph" where each region constraint `R1:
+ /// Creates a "dependency graph" where each region constraint `R1:
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
/// construct SCCs for region inference but also for error
/// reporting.
}
impl<'s, D: ConstraintGraphDirecton> RegionGraph<'s, D> {
- /// Create a "dependency graph" where each region constraint `R1:
+ /// Creates a "dependency graph" where each region constraint `R1:
/// R2` is treated as an edge `R1 -> R2`. We use this graph to
/// construct SCCs for region inference but also for error
/// reporting.
/// easy to find the constraints affecting a particular region.
///
/// N.B., this graph contains a "frozen" view of the current
- /// constraints. any new constraints added to the `ConstraintSet`
+ /// constraints. Any new constraints added to the `ConstraintSet`
/// after the graph is built will not be present in the graph.
crate fn graph(&self, num_region_vars: usize) -> graph::NormalConstraintGraph {
graph::ConstraintGraph::new(graph::Normal, self, num_region_vars)
graph::ConstraintGraph::new(graph::Reverse, self, num_region_vars)
}
- /// Compute cycles (SCCs) in the graph of regions. In particular,
+ /// Computes cycles (SCCs) in the graph of regions. In particular,
/// find all regions R1, R2 such that R1: R2 and R2: R1 and group
/// them into an SCC, and find the relationships between SCCs.
crate fn compute_sccs(
}
}
- /// Check if a borrow location is within a loop.
+ /// Checks if a borrow location is within a loop.
fn is_borrow_location_in_loop(
&self,
borrow_location: Location,
}
}
- /// Check if a borrowed value was captured by a trait object. We do this by
+ /// Checks if a borrowed value was captured by a trait object. We do this by
/// looking forward in the MIR from the reserve location and checking if we see
/// a unsized cast to a trait object on our data.
fn was_captured_by_trait_object(&self, borrow: &BorrowData<'tcx>) -> bool {
crate type AllFacts = PoloniusAllFacts<RegionVid, BorrowIndex, LocationIndex>;
crate trait AllFactsExt {
- /// Returns true if there is a need to gather `AllFacts` given the
+ /// Returns `true` if there is a need to gather `AllFacts` given the
/// current `-Z` flags.
fn enabled(tcx: TyCtxt<'_, '_, '_>) -> bool;
borrow_set: &'cx BorrowSet<'tcx>,
}
-/// Visits the whole MIR and generates invalidates() facts
-/// Most of the code implementing this was stolen from borrow_check/mod.rs
+/// Visits the whole MIR and generates `invalidates()` facts.
+/// Most of the code implementing this was stolen from `borrow_check/mod.rs`.
impl<'cx, 'tcx, 'gcx> Visitor<'tcx> for InvalidationGenerator<'cx, 'tcx, 'gcx> {
fn visit_statement(
&mut self,
}
impl<'cg, 'cx, 'tcx, 'gcx> InvalidationGenerator<'cx, 'tcx, 'gcx> {
- /// Simulates mutation of a place
+ /// Simulates mutation of a place.
fn mutate_place(
&mut self,
context: Context,
);
}
- /// Simulates consumption of an operand
+ /// Simulates consumption of an operand.
fn consume_operand(
&mut self,
context: Context,
}
}
- /// Simulates an access to a place
+ /// Simulates an access to a place.
fn access_place(
&mut self,
context: Context,
}
- /// Generate a new invalidates(L, B) fact
+ /// Generates a new `invalidates(L, B)` fact.
fn generate_invalidates(&mut self, b: BorrowIndex, l: Location) {
let lidx = self.location_table.start_index(l);
self.all_facts.invalidates.push((lidx, b));
}
/// Debugging aid: Invokes the `with_msg` callback repeatedly with
- /// our internal region constraints. These are dumped into the
+ /// our internal region constraints. These are dumped into the
/// -Zdump-mir file so that we can figure out why the region
/// inference resulted in the values that it did when debugging.
fn for_each_constraint(
if let (Some(f), Some(o)) = (self.to_error_region(fr), self.to_error_region(outlived_fr)) {
let tables = infcx.tcx.typeck_tables_of(mir_def_id);
let nice = NiceRegionError::new_from_span(infcx, span, o, f, Some(tables));
- if let Some(_error_reported) = nice.try_report_from_nll() {
+ if let Some(diag) = nice.try_report_from_nll() {
+ diag.buffer(errors_buffer);
return;
}
}
}
/// If `r2` represents a placeholder region, then this returns
- /// true if `r1` cannot name that placeholder in its
- /// value. Otherwise, returns false.
+ /// `true` if `r1` cannot name that placeholder in its
+ /// value; otherwise, returns `false`.
fn cannot_name_placeholder(&self, r1: RegionVid, r2: RegionVid) -> bool {
debug!("cannot_name_value_of(r1={:?}, r2={:?})", r1, r2);
value
}
- /// Check for the case where `fr` maps to something that the
+ /// Checks for the case where `fr` maps to something that the
/// *user* has a name for. In that case, we'll be able to map
/// `fr` to a `Region<'tcx>`, and that region will be one of
/// named variants.
}
}
- /// Get a span of a named region to provide context for error messages that
+ /// Gets a span of a named region to provide context for error messages that
/// mention that span, for example:
///
/// ```
}
}
- /// Find an argument that contains `fr` and label it with a fully
+ /// Finds an argument that contains `fr` and label it with a fully
/// elaborated type, returning something like `'1`. Result looks
/// like:
///
/// to. For example, we might produce an annotation like this:
///
/// ```
- /// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item=&T>> {
+ /// | fn a<T>(items: &[T]) -> Box<dyn Iterator<Item = &T>> {
/// | - let's call the lifetime of this reference `'1`
/// ```
///
/// `argument_hir_ty`, a `hir::Ty` (the syntax of the type
/// annotation). We are descending through the types stepwise,
/// looking in to find the region `needle_fr` in the internal
- /// type. Once we find that, we can use the span of the `hir::Ty`
+ /// type. Once we find that, we can use the span of the `hir::Ty`
/// to add the highlight.
///
/// This is a somewhat imperfect process, so long the way we also
None
}
- /// Find a closure upvar that contains `fr` and label it with a
+ /// Finds a closure upvar that contains `fr` and label it with a
/// fully elaborated type, returning something like `'1`. Result
/// looks like:
///
})
}
- /// Check for arguments appearing in the (closure) return type. It
+ /// Checks for arguments appearing in the (closure) return type. It
/// must be a closure since, in a free fn, such an argument would
/// have to either also appear in an argument (if using elision)
/// or be early bound (named, not in argument).
})
}
- /// Create a synthetic region named `'1`, incrementing the
+ /// Creates a synthetic region named `'1`, incrementing the
/// counter.
fn synthesize_region_name(&self, counter: &mut usize) -> InternedString {
let c = *counter;
ConstraintCategory, Local, Location, Mir,
};
use rustc::ty::{self, RegionVid, Ty, TyCtxt, TypeFoldable};
-use rustc::util::common;
+use rustc::util::common::{self, ErrorReported};
use rustc_data_structures::bit_set::BitSet;
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
use rustc_data_structures::graph::scc::Sccs;
use super::ToRegionVid;
pub struct RegionInferenceContext<'tcx> {
- /// Contains the definition for every region variable. Region
+ /// Contains the definition for every region variable. Region
/// variables are identified by their index (`RegionVid`). The
/// definition contains information about where the region came
/// from as well as its final inferred value.
}
/// A "type test" corresponds to an outlives constraint between a type
-/// and a lifetime, like `T: 'x` or `<T as Foo>::Bar: 'x`. They are
+/// and a lifetime, like `T: 'x` or `<T as Foo>::Bar: 'x`. They are
/// translated from the `Verify` region constraints in the ordinary
/// inference context.
///
///
/// In some cases, however, there are outlives relationships that are
/// not converted into a region constraint, but rather into one of
-/// these "type tests". The distinction is that a type test does not
+/// these "type tests". The distinction is that a type test does not
/// influence the inference result, but instead just examines the
/// values that we ultimately inferred for each region variable and
-/// checks that they meet certain extra criteria. If not, an error
+/// checks that they meet certain extra criteria. If not, an error
/// can be issued.
///
/// One reason for this is that these type tests typically boil down
/// Initializes the region variables for each universally
/// quantified region (lifetime parameter). The first N variables
/// always correspond to the regions appearing in the function
- /// signature (both named and anonymous) and where clauses. This
+ /// signature (both named and anonymous) and where-clauses. This
/// function iterates over those regions and initializes them with
/// minimum values.
///
self.universal_regions.to_region_vid(r)
}
- /// Add annotations for `#[rustc_regions]`; see `UniversalRegions::annotate`.
+ /// Adds annotations for `#[rustc_regions]`; see `UniversalRegions::annotate`.
crate fn annotate(&self, tcx: TyCtxt<'_, '_, 'tcx>, err: &mut DiagnosticBuilder<'_>) {
self.universal_regions.annotate(tcx, err)
}
- /// Returns true if the region `r` contains the point `p`.
+ /// Returns `true` if the region `r` contains the point `p`.
///
/// Panics if called before `solve()` executes,
crate fn region_contains(&self, r: impl ToRegionVid, p: impl ToElementIndex) -> bool {
self.scc_universes[scc]
}
- /// Perform region inference and report errors if we see any
+ /// Performs region inference and report errors if we see any
/// unsatisfiable constraints. If this is a closure, returns the
/// region requirements to propagate to our creator, if any.
pub(super) fn solve<'gcx>(
);
}
- /// True if all the elements in the value of `scc_b` are nameable
+ /// Returns `true` if all the elements in the value of `scc_b` are nameable
/// in `scc_a`. Used during constraint propagation, and only once
/// the value of `scc_b` has been computed.
fn universe_compatible(&self, scc_b: ConstraintSccIndex, scc_a: ConstraintSccIndex) -> bool {
debug!("try_promote_type_test: ur={:?}", ur);
- let non_local_ub = self.universal_region_relations.non_local_upper_bound(ur);
+ let non_local_ub = self.universal_region_relations.non_local_upper_bounds(&ur);
debug!("try_promote_type_test: non_local_ub={:?}", non_local_ub);
- assert!(self.universal_regions.is_universal_region(non_local_ub));
- assert!(!self.universal_regions.is_local_free_region(non_local_ub));
-
- let requirement = ClosureOutlivesRequirement {
- subject,
- outlived_free_region: non_local_ub,
- blame_span: locations.span(mir),
- category: ConstraintCategory::Boring,
- };
- debug!("try_promote_type_test: pushing {:#?}", requirement);
- propagated_outlives_requirements.push(requirement);
+ // This is slightly too conservative. To show T: '1, given `'2: '1`
+ // and `'3: '1` we only need to prove that T: '2 *or* T: '3, but to
+ // avoid potential non-determinism we approximate this by requiring
+ // T: '1 and T: '2.
+ for &upper_bound in non_local_ub {
+ debug_assert!(self.universal_regions.is_universal_region(upper_bound));
+ debug_assert!(!self.universal_regions.is_local_free_region(upper_bound));
+
+ let requirement = ClosureOutlivesRequirement {
+ subject,
+ outlived_free_region: upper_bound,
+ blame_span: locations.span(mir),
+ category: ConstraintCategory::Boring,
+ };
+ debug!("try_promote_type_test: pushing {:#?}", requirement);
+ propagated_outlives_requirements.push(requirement);
+ }
}
true
}
lub
}
- /// Test if `test` is true when applied to `lower_bound` at
- /// `point`, and returns true or false.
+ /// Tests if `test` is true when applied to `lower_bound` at
+ /// `point`.
fn eval_verify_bound(
&self,
tcx: TyCtxt<'_, '_, 'tcx>,
/// different results. (For example, there might be two regions
/// with the same value that are not in the same SCC).
///
- /// NB. This is not an ideal approach and I would like to revisit
+ /// N.B., this is not an ideal approach and I would like to revisit
/// it. However, it works pretty well in practice. In particular,
/// this is needed to deal with projection outlives bounds like
///
///
/// In particular, this routine winds up being important when
/// there are bounds like `where <T as Foo<'a>>::Item: 'b` in the
- /// environment. In this case, if we can show that `'0 == 'a`,
+ /// environment. In this case, if we can show that `'0 == 'a`,
/// and that `'b: '1`, then we know that the clause is
/// satisfied. In such cases, particularly due to limitations of
/// the trait solver =), we usually wind up with a where-clause like
/// Once regions have been propagated, this method is used to see
/// whether any of the constraints were too strong. In particular,
/// we want to check for a case where a universally quantified
- /// region exceeded its bounds. Consider:
+ /// region exceeded its bounds. Consider:
///
/// fn foo<'a, 'b>(x: &'a u32) -> &'b u32 { x }
///
}
}
- /// Check the final value for the free region `fr` to see if it
+ /// Checks the final value for the free region `fr` to see if it
/// grew too large. In particular, examine what `end(X)` points
/// wound up in `fr`'s final value; for each `end(X)` where `X !=
/// fr`, we want to check that `fr: X`. If not, that's either an
.is_none()
);
+ // Only check all of the relations for the main representative of each
+ // SCC, otherwise just check that we outlive said representative. This
+ // reduces the number of redundant relations propagated out of
+ // closures.
+ // Note that the representative will be a universal region if there is
+ // one in this SCC, so we will always check the representative here.
+ let representative = self.scc_representatives[longer_fr_scc];
+ if representative != longer_fr {
+ self.check_universal_region_relation(
+ longer_fr,
+ representative,
+ infcx,
+ mir,
+ mir_def_id,
+ propagated_outlives_requirements,
+ errors_buffer,
+ );
+ return;
+ }
+
// Find every region `o` such that `fr: o`
// (because `fr` includes `end(o)`).
for shorter_fr in self.scc_values.universal_regions_outlived_by(longer_fr_scc) {
- // If it is known that `fr: o`, carry on.
- if self.universal_region_relations
- .outlives(longer_fr, shorter_fr)
- {
- continue;
+ if let Some(ErrorReported) = self.check_universal_region_relation(
+ longer_fr,
+ shorter_fr,
+ infcx,
+ mir,
+ mir_def_id,
+ propagated_outlives_requirements,
+ errors_buffer,
+ ) {
+ // continuing to iterate just reports more errors than necessary
+ return;
}
+ }
+ }
- debug!(
- "check_universal_region: fr={:?} does not outlive shorter_fr={:?}",
- longer_fr, shorter_fr,
- );
+ fn check_universal_region_relation(
+ &self,
+ longer_fr: RegionVid,
+ shorter_fr: RegionVid,
+ infcx: &InferCtxt<'_, 'gcx, 'tcx>,
+ mir: &Mir<'tcx>,
+ mir_def_id: DefId,
+ propagated_outlives_requirements: &mut Option<&mut Vec<ClosureOutlivesRequirement<'gcx>>>,
+ errors_buffer: &mut Vec<Diagnostic>,
+ ) -> Option<ErrorReported> {
+ // If it is known that `fr: o`, carry on.
+ if self.universal_region_relations
+ .outlives(longer_fr, shorter_fr)
+ {
+ return None;
+ }
- let blame_span_category = self.find_outlives_blame_span(mir, longer_fr, shorter_fr);
-
- if let Some(propagated_outlives_requirements) = propagated_outlives_requirements {
- // Shrink `fr` until we find a non-local region (if we do).
- // We'll call that `fr-` -- it's ever so slightly smaller than `fr`.
- if let Some(fr_minus) = self.universal_region_relations
- .non_local_lower_bound(longer_fr)
- {
- debug!("check_universal_region: fr_minus={:?}", fr_minus);
-
- // Grow `shorter_fr` until we find a non-local
- // region. (We always will.) We'll call that
- // `shorter_fr+` -- it's ever so slightly larger than
- // `fr`.
- let shorter_fr_plus = self.universal_region_relations
- .non_local_upper_bound(shorter_fr);
- debug!(
- "check_universal_region: shorter_fr_plus={:?}",
- shorter_fr_plus
- );
+ debug!(
+ "check_universal_region_relation: fr={:?} does not outlive shorter_fr={:?}",
+ longer_fr, shorter_fr,
+ );
+ if let Some(propagated_outlives_requirements) = propagated_outlives_requirements {
+ // Shrink `longer_fr` until we find a non-local region (if we do).
+ // We'll call it `fr-` -- it's ever so slightly smaller than
+ // `longer_fr`.
+
+ if let Some(fr_minus) = self
+ .universal_region_relations
+ .non_local_lower_bound(longer_fr)
+ {
+ debug!("check_universal_region: fr_minus={:?}", fr_minus);
+
+ let blame_span_category = self.find_outlives_blame_span(mir, longer_fr, shorter_fr);
+
+ // Grow `shorter_fr` until we find some non-local regions. (We
+ // always will.) We'll call them `shorter_fr+` -- they're ever
+ // so slightly larger than `shorter_fr`.
+ let shorter_fr_plus = self.universal_region_relations
+ .non_local_upper_bounds(&shorter_fr);
+ debug!(
+ "check_universal_region: shorter_fr_plus={:?}",
+ shorter_fr_plus
+ );
+ for &&fr in &shorter_fr_plus {
// Push the constraint `fr-: shorter_fr+`
propagated_outlives_requirements.push(ClosureOutlivesRequirement {
subject: ClosureOutlivesSubject::Region(fr_minus),
- outlived_free_region: shorter_fr_plus,
+ outlived_free_region: fr,
blame_span: blame_span_category.1,
category: blame_span_category.0,
});
- continue;
}
+ return None;
}
-
- // If we are not in a context where we can propagate
- // errors, or we could not shrink `fr` to something
- // smaller, then just report an error.
- //
- // Note: in this case, we use the unapproximated regions
- // to report the error. This gives better error messages
- // in some cases.
- self.report_error(mir, infcx, mir_def_id, longer_fr, shorter_fr, errors_buffer);
- return; // continuing to iterate just reports more errors than necessary
}
+
+ // If we are not in a context where we can't propagate errors, or we
+ // could not shrink `fr` to something smaller, then just report an
+ // error.
+ //
+ // Note: in this case, we use the unapproximated regions to report the
+ // error. This gives better error messages in some cases.
+ self.report_error(mir, infcx, mir_def_id, longer_fr, shorter_fr, errors_buffer);
+ Some(ErrorReported)
}
fn check_bound_universal_region<'gcx>(
self.points.rows()
}
- /// Adds the given element to the value for the given region. Returns true if
+ /// Adds the given element to the value for the given region. Returns whether
/// the element is newly added (i.e., was not already present).
crate fn add_element(&mut self, row: N, location: Location) -> bool {
debug!("LivenessValues::add(r={:?}, location={:?})", row, location);
}
/// Adds all the elements in the given bit array into the given
- /// region. Returns true if any of them are newly added.
+ /// region. Returns whether any of them are newly added.
crate fn add_elements(&mut self, row: N, locations: &HybridBitSet<PointIndex>) -> bool {
debug!(
"LivenessValues::add_elements(row={:?}, locations={:?})",
self.points.insert_all_into_row(row);
}
- /// True if the region `r` contains the given element.
+ /// Returns `true` if the region `r` contains the given element.
crate fn contains(&self, row: N, location: Location) -> bool {
let index = self.elements.point_from_location(location);
self.points.contains(row, index)
}
}
- /// Adds the given element to the value for the given region. Returns true if
+ /// Adds the given element to the value for the given region. Returns whether
/// the element is newly added (i.e., was not already present).
crate fn add_element(&mut self, r: N, elem: impl ToElementIndex) -> bool {
debug!("add(r={:?}, elem={:?})", r, elem);
self.points.insert_all_into_row(r);
}
- /// Add all elements in `r_from` to `r_to` (because e.g., `r_to:
+ /// Adds all elements in `r_from` to `r_to` (because e.g., `r_to:
/// r_from`).
crate fn add_region(&mut self, r_to: N, r_from: N) -> bool {
self.points.union_rows(r_from, r_to)
| self.placeholders.union_rows(r_from, r_to)
}
- /// True if the region `r` contains the given element.
+ /// Returns `true` if the region `r` contains the given element.
crate fn contains(&self, r: N, elem: impl ToElementIndex) -> bool {
elem.contained_in_row(self, r)
}
}
}
- /// True if `sup_region` contains all the CFG points that
+ /// Returns `true` if `sup_region` contains all the CFG points that
/// `sub_region` contains. Ignores universal regions.
crate fn contains_points(&self, sup_region: N, sub_region: N) -> bool {
if let Some(sub_row) = self.points.row(sub_region) {
universal_regions: Rc<UniversalRegions<'tcx>>,
/// Stores the outlives relations that are known to hold from the
- /// implied bounds, in-scope where clauses, and that sort of
+ /// implied bounds, in-scope where-clauses, and that sort of
/// thing.
outlives: TransitiveRelation<RegionVid>,
/// added via implicit bounds.
///
/// Each region here is guaranteed to be a key in the `indices`
-/// map. We use the "original" regions (i.e., the keys from the
+/// map. We use the "original" regions (i.e., the keys from the
/// map, and not the values) because the code in
/// `process_registered_region_obligations` has some special-cased
/// logic expecting to see (e.g.) `ReStatic`, and if we supplied
/// As part of computing the free region relations, we also have to
/// normalize the input-output types, which we then need later. So we
-/// return those. This vector consists of first the input types and
+/// return those. This vector consists of first the input types and
/// then the output type as the last element.
type NormalizedInputsAndOutput<'tcx> = Vec<Ty<'tcx>>;
/// Finds an "upper bound" for `fr` that is not local. In other
/// words, returns the smallest (*) known region `fr1` that (a)
- /// outlives `fr` and (b) is not local. This cannot fail, because
- /// we will always find `'static` at worst.
+ /// outlives `fr` and (b) is not local.
///
- /// (*) If there are multiple competing choices, we pick the "postdominating"
- /// one. See `TransitiveRelation::postdom_upper_bound` for details.
- crate fn non_local_upper_bound(&self, fr: RegionVid) -> RegionVid {
+ /// (*) If there are multiple competing choices, we return all of them.
+ crate fn non_local_upper_bounds(&'a self, fr: &'a RegionVid) -> Vec<&'a RegionVid> {
debug!("non_local_upper_bound(fr={:?})", fr);
- self.non_local_bound(&self.inverse_outlives, fr)
+ let res = self.non_local_bounds(&self.inverse_outlives, fr);
+ assert!(!res.is_empty(), "can't find an upper bound!?");
+ res
+ }
+
+ /// Returns the "postdominating" bound of the set of
+ /// `non_local_upper_bounds` for the given region.
+ crate fn non_local_upper_bound(&self, fr: RegionVid) -> RegionVid {
+ let upper_bounds = self.non_local_upper_bounds(&fr);
+
+ // In case we find more than one, reduce to one for
+ // convenience. This is to prevent us from generating more
+ // complex constraints, but it will cause spurious errors.
+ let post_dom = self
+ .inverse_outlives
+ .mutual_immediate_postdominator(upper_bounds);
+
+ debug!("non_local_bound: post_dom={:?}", post_dom);
+
+ post_dom
+ .and_then(|&post_dom| {
+ // If the mutual immediate postdom is not local, then
+ // there is no non-local result we can return.
+ if !self.universal_regions.is_local_free_region(post_dom) {
+ Some(post_dom)
+ } else {
+ None
+ }
+ })
.unwrap_or(self.universal_regions.fr_static)
}
+
/// Finds a "lower bound" for `fr` that is not local. In other
/// words, returns the largest (*) known region `fr1` that (a) is
- /// outlived by `fr` and (b) is not local. This cannot fail,
- /// because we will always find `'static` at worst.
+ /// outlived by `fr` and (b) is not local.
///
/// (*) If there are multiple competing choices, we pick the "postdominating"
/// one. See `TransitiveRelation::postdom_upper_bound` for details.
crate fn non_local_lower_bound(&self, fr: RegionVid) -> Option<RegionVid> {
debug!("non_local_lower_bound(fr={:?})", fr);
- self.non_local_bound(&self.outlives, fr)
+ let lower_bounds = self.non_local_bounds(&self.outlives, &fr);
+
+ // In case we find more than one, reduce to one for
+ // convenience. This is to prevent us from generating more
+ // complex constraints, but it will cause spurious errors.
+ let post_dom = self
+ .outlives
+ .mutual_immediate_postdominator(lower_bounds);
+
+ debug!("non_local_bound: post_dom={:?}", post_dom);
+
+ post_dom
+ .and_then(|&post_dom| {
+ // If the mutual immediate postdom is not local, then
+ // there is no non-local result we can return.
+ if !self.universal_regions.is_local_free_region(post_dom) {
+ Some(post_dom)
+ } else {
+ None
+ }
+ })
}
- /// Helper for `non_local_upper_bound` and
- /// `non_local_lower_bound`. Repeatedly invokes `postdom_parent`
- /// until we find something that is not local. Returns None if we
- /// never do so.
- fn non_local_bound(
+ /// Helper for `non_local_upper_bounds` and `non_local_lower_bounds`.
+ /// Repeatedly invokes `postdom_parent` until we find something that is not
+ /// local. Returns `None` if we never do so.
+ fn non_local_bounds<'a>(
&self,
- relation: &TransitiveRelation<RegionVid>,
- fr0: RegionVid,
- ) -> Option<RegionVid> {
+ relation: &'a TransitiveRelation<RegionVid>,
+ fr0: &'a RegionVid,
+ ) -> Vec<&'a RegionVid> {
// This method assumes that `fr0` is one of the universally
// quantified region variables.
- assert!(self.universal_regions.is_universal_region(fr0));
+ assert!(self.universal_regions.is_universal_region(*fr0));
let mut external_parents = vec![];
- let mut queue = vec![&fr0];
+ let mut queue = vec![fr0];
// Keep expanding `fr` into its parents until we reach
// non-local regions.
debug!("non_local_bound: external_parents={:?}", external_parents);
- // In case we find more than one, reduce to one for
- // convenience. This is to prevent us from generating more
- // complex constraints, but it will cause spurious errors.
- let post_dom = relation
- .mutual_immediate_postdominator(external_parents)
- .cloned();
-
- debug!("non_local_bound: post_dom={:?}", post_dom);
-
- post_dom.and_then(|post_dom| {
- // If the mutual immediate postdom is not local, then
- // there is no non-local result we can return.
- if !self.universal_regions.is_local_free_region(post_dom) {
- Some(post_dom)
- } else {
- None
- }
- })
+ external_parents
}
- /// True if fr1 is known to outlive fr2.
+ /// Returns `true` if fr1 is known to outlive fr2.
///
/// This will only ever be true for universally quantified regions.
crate fn outlives(&self, fr1: RegionVid, fr2: RegionVid) -> bool {
}
}
- /// True if there are no local variables that need liveness computation.
+ /// Returns `true` if there are no local variables that need liveness computation.
crate fn is_empty(&self) -> bool {
self.to_local.is_empty()
}
/// that indicate which types must be live at which point in the CFG.
/// This vector is consumed by `constraint_generation`.
///
-/// NB. This computation requires normalization; therefore, it must be
+/// N.B., this computation requires normalization; therefore, it must be
/// performed before
pub(super) fn generate<'gcx, 'tcx>(
typeck: &mut TypeChecker<'_, 'gcx, 'tcx>,
trace::trace(typeck, mir, elements, flow_inits, move_data, &liveness_map, location_table);
}
-/// Compute all regions that are (currently) known to outlive free
+/// Computes all regions that are (currently) known to outlive free
/// regions. For these regions, we do not need to compute
/// liveness, since the outlives constraints will ensure that they
/// are live over the whole fn body anyhow.
}
}
- /// Compute all points where local is "use live" -- meaning its
+ /// Computes all points where local is "use live" -- meaning its
/// current value may be used later (except by a drop). This is
/// done by walking backwards from each use of `live_local` until we
/// find a `def` of local.
}
}
- /// Compute all points where local is "drop live" -- meaning its
+ /// Computes all points where local is "drop live" -- meaning its
/// current value may be dropped later (but not used). This is
/// done by iterating over the drops of `local` where `local` (or
/// some subpart of `local`) is initialized. For each such drop,
}
impl LivenessContext<'_, '_, '_, '_, 'tcx> {
- /// True if the local variable (or some part of it) is initialized in
+ /// Returns `true` if the local variable (or some part of it) is initialized in
/// the terminator of `block`. We need to check this to determine if a
/// DROP of some local variable will have an effect -- note that
/// drops, as they may unwind, are always terminators.
self.flow_inits.has_any_child_of(mpi).is_some()
}
- /// True if the path `mpi` (or some part of it) is initialized at
+ /// Returns `true` if the path `mpi` (or some part of it) is initialized at
/// the exit of `block`.
///
/// **Warning:** Does not account for the result of `Call`
self.flow_inits.has_any_child_of(mpi).is_some()
}
- /// Store the result that all regions in `value` are live for the
+ /// Stores the result that all regions in `value` are live for the
/// points `live_at`.
fn add_use_live_facts_for(
&mut self,
/// older NLL analysis, we required this only at the entry point
/// to the function. By the nature of the constraints, this wound
/// up propagating to all points reachable from start (because
- /// `'1` -- as a universal region -- is live everywhere). In the
+ /// `'1` -- as a universal region -- is live everywhere). In the
/// newer analysis, though, this doesn't work: `_0` is considered
/// dead at the start (it has no usable value) and hence this type
/// equality is basically a no-op. Then, later on, when we do `_0
}
}
- /// Add the constraints that arise from a borrow expression `&'a P` at the location `L`.
+ /// Adds the constraints that arise from a borrow expression `&'a P` at the location `L`.
///
/// # Parameters
///
/// - "Invariant" `a == b`
/// - "Contravariant" `a :> b`
///
-/// NB. The type `a` is permitted to have unresolved inference
+/// N.B., the type `a` is permitted to have unresolved inference
/// variables, but not the type `b`.
pub(super) fn relate_types<'tcx>(
infcx: &InferCtxt<'_, '_, 'tcx>,
pub fr_static: RegionVid,
/// A special region vid created to represent the current MIR fn
- /// body. It will outlive the entire CFG but it will not outlive
+ /// body. It will outlive the entire CFG but it will not outlive
/// any other universal regions.
pub fr_fn_body: RegionVid,
/// We create region variables such that they are ordered by their
/// `RegionClassification`. The first block are globals, then
- /// externals, then locals. So things from:
- /// - `FIRST_GLOBAL_INDEX..first_extern_index` are global;
- /// - `first_extern_index..first_local_index` are external; and
+ /// externals, then locals. So, things from:
+ /// - `FIRST_GLOBAL_INDEX..first_extern_index` are global,
+ /// - `first_extern_index..first_local_index` are external,
/// - `first_local_index..num_universals` are local.
first_extern_index: usize,
num_universals: usize,
/// The "defining" type for this function, with all universal
- /// regions instantiated. For a closure or generator, this is the
+ /// regions instantiated. For a closure or generator, this is the
/// closure type, but for a top-level function it's the `FnDef`.
pub defining_ty: DefiningTy<'tcx>,
/// The return type of this function, with all regions replaced by
/// their universal `RegionVid` equivalents.
///
- /// NB. Associated types in this type have not been normalized,
+ /// N.B., associated types in this type have not been normalized,
/// as the name suggests. =)
pub unnormalized_output_ty: Ty<'tcx>,
/// The fully liberated input types of this function, with all
/// regions replaced by their universal `RegionVid` equivalents.
///
- /// NB. Associated types in these types have not been normalized,
+ /// N.B., associated types in these types have not been normalized,
/// as the name suggests. =)
pub unnormalized_input_tys: &'tcx [Ty<'tcx>],
/// `ClosureSubsts::generator_return_ty`.
Generator(DefId, ty::GeneratorSubsts<'tcx>, hir::GeneratorMovability),
- /// The MIR is a fn item with the given def-id and substs. The signature
+ /// The MIR is a fn item with the given `DefId` and substs. The signature
/// of the function can be bound then with the `fn_sig` query.
FnDef(DefId, &'tcx Substs<'tcx>),
/// A **local** lifetime is one about which we know the full set
/// of relevant constraints (that is, relationships to other named
- /// regions). For a closure, this includes any region bound in
- /// the closure's signature. For a fn item, this includes all
+ /// regions). For a closure, this includes any region bound in
+ /// the closure's signature. For a fn item, this includes all
/// regions other than global ones.
///
/// Continuing with the example from `External`, if we were
/// analyzing the closure, then `'x` would be local (and `'a` and
- /// `'b` are external). If we are analyzing the function item
+ /// `'b` are external). If we are analyzing the function item
/// `foo`, then `'a` and `'b` are local (and `'x` is not in
/// scope).
Local,
region_mapping
}
- /// True if `r` is a member of this set of universal regions.
+ /// Returns `true` if `r` is a member of this set of universal regions.
pub fn is_universal_region(&self, r: RegionVid) -> bool {
(FIRST_GLOBAL_INDEX..self.num_universals).contains(&r.index())
}
(FIRST_GLOBAL_INDEX..self.num_universals).map(RegionVid::new)
}
- /// True if `r` is classified as an local region.
+ /// Returns `true` if `r` is classified as an local region.
pub fn is_local_free_region(&self, r: RegionVid) -> bool {
self.region_classification(r) == Some(RegionClassification::Local)
}
self.first_local_index
}
- /// Get an iterator over all the early-bound regions that have names.
+ /// Gets an iterator over all the early-bound regions that have names.
pub fn named_universal_regions<'s>(
&'s self,
) -> impl Iterator<Item = (ty::Region<'tcx>, ty::RegionVid)> + 's {
tcx.type_of(closure_base_def_id)
} else {
let tables = tcx.typeck_tables_of(self.mir_def_id);
- tables.node_id_to_type(self.mir_hir_id)
+ tables.node_type(self.mir_hir_id)
};
debug!("defining_ty (pre-replacement): {:?}", defining_ty);
/// indices vector. Typically, we identify late-bound regions as we process the inputs and
/// outputs of the closure/function. However, sometimes there are late-bound regions which do
/// not appear in the fn parameters but which are nonetheless in scope. The simplest case of
- /// this are unused functions, like fn foo<'a>() { } (see eg., #51351). Despite not being used,
+ /// this are unused functions, like fn foo<'a>() { } (see e.g., #51351). Despite not being used,
/// users can still reference these regions (e.g., let x: &'a u32 = &22;), so we need to create
/// entries for them and store them in the indices map. This code iterates over the complete
/// set of late-bound regions and checks for any that we have not yet seen, adding them to the
}
}
- /// Replace all free regions in `value` with region vids, as
+ /// Replaces all free regions in `value` with region vids, as
/// returned by `to_region_vid`.
pub fn fold_to_region_vids<T>(&self, tcx: TyCtxt<'_, '_, 'tcx>, value: &T) -> T
where
use rustc::ty::TyCtxt;
use rustc_data_structures::graph::dominators::Dominators;
-/// Returns true if the borrow represented by `kind` is
+/// Returns `true` if the borrow represented by `kind` is
/// allowed to be split into separate Reservation and
/// Activation phases.
pub(super) fn allow_two_phase_borrow<'a, 'tcx, 'gcx: 'tcx>(
/// Extension methods for the `Place` type.
crate trait PlaceExt<'tcx> {
- /// Returns true if we can safely ignore borrows of this place.
+ /// Returns `true` if we can safely ignore borrows of this place.
/// This is true whenever there is no action that the user can do
/// to the place `self` that would invalidate the borrow. This is true
/// for borrows of raw pointer dereferents as well as shared references.
/// A linked list of places running up the stack; begins with the
/// innermost place and extends to projections (e.g., `a.b` would have
-/// the place `a` with a "next" pointer to `a.b`). Created by
+/// the place `a` with a "next" pointer to `a.b`). Created by
/// `unroll_place`.
///
-/// N.B., this particular impl strategy is not the most obvious. It was
+/// N.B., this particular impl strategy is not the most obvious. It was
/// chosen because it makes a measurable difference to NLL
/// performance, as this code (`borrow_conflicts_with_place`) is somewhat hot.
struct PlaceComponents<'p, 'tcx: 'p> {
use crate::build::ForGuard::{self, OutsideGuard, RefWithinGuard, ValWithinGuard};
use crate::build::{BlockAnd, BlockAndExtension, Builder};
use crate::build::{GuardFrame, GuardFrameLocal, LocalsForNode};
-use crate::hair::*;
+use crate::hair::{self, *};
use rustc::mir::*;
use rustc::ty::{self, CanonicalUserTypeAnnotation, Ty};
use rustc::ty::layout::VariantIdx;
},
..
},
- user_ty: pat_ascription_ty,
- variance: _,
- user_ty_span,
+ ascription: hair::pattern::Ascription {
+ user_ty: pat_ascription_ty,
+ variance: _,
+ user_ty_span,
+ },
} => {
let place =
self.storage_live_binding(block, var, irrefutable_pat.span, OutsideGuard);
}
PatternKind::AscribeUserType {
ref subpattern,
- ref user_ty,
- user_ty_span,
- variance: _,
+ ascription: hair::pattern::Ascription {
+ ref user_ty,
+ user_ty_span,
+ variance: _,
+ },
} => {
// This corresponds to something like
//
///
/// The return value is a list of "otherwise" blocks. These are
/// points in execution where we found that *NONE* of the
- /// candidates apply. In principle, this means that the input
+ /// candidates apply. In principle, this means that the input
/// list was not exhaustive, though at present we sometimes are
/// not smart enough to recognize all exhaustive inputs.
///
}
}
- /// This is the most subtle part of the matching algorithm. At
+ /// This is the most subtle part of the matching algorithm. At
/// this point, the input candidates have been fully simplified,
/// and so we know that all remaining match-pairs require some
/// sort of test. To decide what test to do, we take the highest
/// 4. etc.
///
/// Once we know what sort of test we are going to perform, this
- /// test may also help us with other candidates. So we walk over
+ /// Tests may also help us with other candidates. So we walk over
/// the candidates (from high to low priority) and check. This
/// gives us, for each outcome of the test, a transformed list of
- /// candidates. For example, if we are testing the current
+ /// candidates. For example, if we are testing the current
/// variant of `x.0`, and we have a candidate `{x.0 @ Some(v), x.1
/// @ 22}`, then we would have a resulting candidate of `{(x.0 as
/// Some).0 @ v, x.1 @ 22}`. Note that the first match-pair is now
/// for the case where the guard fails.
///
/// Note: we check earlier that if there is a guard, there cannot
- /// be move bindings. This isn't really important for the
+ /// be move bindings. This isn't really important for the
/// self-consistency of this fn, but the reason for it should be
/// clear: after we've done the assignments, if there were move
/// bindings, further tests would be a use-after-move (which would
use crate::build::Builder;
use crate::build::matches::{Ascription, Binding, MatchPair, Candidate};
-use crate::hair::*;
+use crate::hair::{self, *};
use rustc::ty;
use rustc::ty::layout::{Integer, IntegerExt, Size};
use syntax::attr::{SignedInt, UnsignedInt};
match *match_pair.pattern.kind {
PatternKind::AscribeUserType {
ref subpattern,
- variance,
- ref user_ty,
- user_ty_span
+ ascription: hair::pattern::Ascription {
+ variance,
+ ref user_ty,
+ user_ty_span,
+ },
} => {
// Apply the type ascription to the value at `match_pair.place`, which is the
// value being matched, taking the variance field into account.
/// appropriate.
///
/// So, for example, if this candidate is `x @ Some(P0)` and the
- /// test is a variant test, then we would add `(x as Option).0 @
+ /// Tests is a variant test, then we would add `(x as Option).0 @
/// P0` to the `resulting_candidates` entry corresponding to the
/// variant `Some`.
///
use syntax_pos::{Span, DUMMY_SP};
impl<'a, 'gcx, 'tcx> Builder<'a, 'gcx, 'tcx> {
- /// Add a new temporary value of type `ty` storing the result of
+ /// Adds a new temporary value of type `ty` storing the result of
/// evaluating `expr`.
///
/// N.B., **No cleanup is scheduled for this temporary.** You should
use super::lints;
-/// Construct the MIR for a given def-id.
+/// Construct the MIR for a given `DefId`.
pub fn mir_build<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, def_id: DefId) -> Mir<'tcx> {
let id = tcx.hir().as_local_node_id(def_id).unwrap();
Some(ArgInfo(liberated_closure_env_ty(tcx, id, body_id), None, None, None))
}
ty::Generator(..) => {
- let gen_ty = tcx.body_tables(body_id).node_id_to_type(fn_hir_id);
+ let gen_ty = tcx.body_tables(body_id).node_type(fn_hir_id);
Some(ArgInfo(gen_ty, None, None, None))
}
_ => None,
})
}
-/// A pass to lift all the types and substitutions in a Mir
+/// A pass to lift all the types and substitutions in a MIR
/// to the global tcx. Sadly, we don't have a "folder" that
-/// can change 'tcx so we have to transmute afterwards.
+/// can change `'tcx` so we have to transmute afterwards.
struct GlobalizeMir<'a, 'gcx: 'a> {
tcx: TyCtxt<'a, 'gcx, 'gcx>,
span: Span
body_id: hir::BodyId)
-> Ty<'tcx> {
let closure_expr_hir_id = tcx.hir().node_to_hir_id(closure_expr_id);
- let closure_ty = tcx.body_tables(body_id).node_id_to_type(closure_expr_hir_id);
+ let closure_ty = tcx.body_tables(body_id).node_type(closure_expr_hir_id);
let (closure_def_id, closure_substs) = match closure_ty.sty {
ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs),
fn_span: Span,
arg_count: usize,
- /// the current set of scopes, updated as we traverse;
- /// see the `scope` module for more details
+ /// The current set of scopes, updated as we traverse;
+ /// see the `scope` module for more details.
scopes: Vec<scope::Scope<'tcx>>,
- /// the block-context: each time we build the code within an hair::Block,
+ /// The block-context: each time we build the code within an hair::Block,
/// we push a frame here tracking whether we are building a statement or
/// if we are pushing the tail expression of the block. This is used to
/// embed information in generated temps about whether they were created
/// for a block tail expression or not.
///
/// It would be great if we could fold this into `self.scopes`
- /// somehow; but right now I think that is very tightly tied to
+ /// somehow, but right now I think that is very tightly tied to
/// the code generation in ways that we cannot (or should not)
/// start just throwing new entries onto that vector in order to
/// distinguish the context of EXPR1 from the context of EXPR2 in
- /// `{ STMTS; EXPR1 } + EXPR2`
+ /// `{ STMTS; EXPR1 } + EXPR2`.
block_context: BlockContext,
/// The current unsafe block in scope, even if it is hidden by
- /// a PushUnsafeBlock
+ /// a `PushUnsafeBlock`.
unpushed_unsafe: Safety,
- /// The number of `push_unsafe_block` levels in scope
+ /// The number of `push_unsafe_block` levels in scope.
push_unsafe_count: usize,
- /// the current set of breakables; see the `scope` module for more
- /// details
+ /// The current set of breakables; see the `scope` module for more
+ /// details.
breakable_scopes: Vec<scope::BreakableScope<'tcx>>,
- /// the vector of all scopes that we have created thus far;
- /// we track this for debuginfo later
+ /// The vector of all scopes that we have created thus far;
+ /// we track this for debuginfo later.
source_scopes: IndexVec<SourceScope, SourceScopeData>,
source_scope_local_data: IndexVec<SourceScope, SourceScopeLocalData>,
source_scope: SourceScope,
- /// the guard-context: each time we build the guard expression for
+ /// The guard-context: each time we build the guard expression for
/// a match arm, we push onto this stack, and then pop when we
/// finish building it.
guard_context: Vec<GuardFrame>,
- /// Maps node ids of variable bindings to the `Local`s created for them.
+ /// Maps `NodeId`s of variable bindings to the `Local`s created for them.
/// (A match binding can have two locals; the 2nd is for the arm's guard.)
var_indices: NodeMap<LocalsForNode>,
local_decls: IndexVec<Local, LocalDecl<'tcx>>,
upvar_decls: Vec<UpvarDecl>,
unit_temp: Option<Place<'tcx>>,
- /// cached block with the RESUME terminator; this is created
+ /// Cached block with the `RESUME` terminator; this is created
/// when first set of cleanups are built.
cached_resume_block: Option<BasicBlock>,
- /// cached block with the RETURN terminator
+ /// Cached block with the `RETURN` terminator.
cached_return_block: Option<BasicBlock>,
- /// cached block with the UNREACHABLE terminator
+ /// Cached block with the `UNREACHABLE` terminator.
cached_unreachable_block: Option<BasicBlock>,
}
fn push(&mut self, bf: BlockFrame) { self.0.push(bf); }
fn pop(&mut self) -> Option<BlockFrame> { self.0.pop() }
- /// Traverses the frames on the BlockContext, searching for either
+ /// Traverses the frames on the `BlockContext`, searching for either
/// the first block-tail expression frame with no intervening
/// statement frame.
///
#[derive(Debug)]
enum LocalsForNode {
- /// In the usual case, a node-id for an identifier maps to at most
- /// one Local declaration.
+ /// In the usual case, a `NodeId` for an identifier maps to at most
+ /// one `Local` declaration.
One(Local),
/// The exceptional case is identifiers in a match arm's pattern
/// that are referenced in a guard of that match arm. For these,
- /// we can have `2+k` Locals, where `k` is the number of candidate
+ /// we can have `2 + k` Locals, where `k` is the number of candidate
/// patterns (separated by `|`) in the arm.
///
/// * `for_arm_body` is the Local used in the arm body (which is
/// P1(id1) if (... (match E2 { P2(id2) if ... => B2 })) => B1,
/// }
///
- /// here, when building for FIXME
+ /// here, when building for FIXME.
locals: Vec<GuardFrameLocal>,
}
-/// ForGuard indicates whether we are talking about:
+/// `ForGuard` indicates whether we are talking about:
/// 1. the temp for a local binding used solely within guard expressions,
/// 2. the temp that holds reference to (1.), which is actually what the
/// guard expressions see, or
}
impl<'tcx> Scope<'tcx> {
- /// Invalidate all the cached blocks in the scope.
+ /// Invalidates all the cached blocks in the scope.
///
/// Should always be run for all inner scopes when a drop is pushed into some scope enclosing a
/// larger extent of code.
/// Branch out of `block` to `target`, exiting all scopes up to
- /// and including `region_scope`. This will insert whatever drops are
+ /// and including `region_scope`. This will insert whatever drops are
/// needed. See module comment for details.
pub fn exit_scope(&mut self,
span: Span,
next_target.unit()
}
- /// Create an Assert terminator and return the success block.
+ /// Creates an Assert terminator and return the success block.
/// If the boolean condition operand is not the expected value,
/// a runtime panic will be caused with the given message.
pub fn assert(&mut self, block: BasicBlock,
use rustc::mir::interpret::{ConstEvalErr, ErrorHandled};
use rustc::mir;
use rustc::ty::{self, TyCtxt, query::TyCtxtAt};
-use rustc::ty::layout::{self, LayoutOf, TyLayout, VariantIdx};
+use rustc::ty::layout::{self, LayoutOf, VariantIdx};
use rustc::ty::subst::Subst;
use rustc::traits::Reveal;
use rustc_data_structures::fx::FxHashMap;
use syntax::source_map::{Span, DUMMY_SP};
use crate::interpret::{self,
- PlaceTy, MPlaceTy, MemPlace, OpTy, Operand, Immediate, Scalar, RawConst, ConstValue, Pointer,
+ PlaceTy, MPlaceTy, MemPlace, OpTy, ImmTy, Operand, Immediate, Scalar, Pointer,
+ RawConst, ConstValue,
EvalResult, EvalError, EvalErrorKind, GlobalId, EvalContext, StackPopCleanup,
Allocation, AllocId, MemoryKind,
snapshot, RefTracking,
/// `simd_shuffle` and const patterns in match arms.
///
/// The function containing the `match` that is currently being analyzed may have generic bounds
-/// that inform us about the generic bounds of the constant. E.g. using an associated constant
+/// that inform us about the generic bounds of the constant. E.g., using an associated constant
/// of a function's generic parameter will require knowledge about the bounds on the generic
/// parameter. These bounds are passed to `mk_eval_cx` via the `ParamEnv` argument.
pub(crate) fn mk_eval_cx<'a, 'mir, 'tcx>(
let normalized_op = if normalize {
ecx.try_read_immediate(op)?
} else {
- match op.op {
+ match *op {
Operand::Indirect(mplace) => Err(mplace),
Operand::Immediate(val) => Ok(val)
}
Ok(ty::Const { val, ty: op.layout.ty })
}
-pub fn lazy_const_to_op<'tcx>(
- ecx: &CompileTimeEvalContext<'_, '_, 'tcx>,
- cnst: ty::LazyConst<'tcx>,
- ty: ty::Ty<'tcx>,
-) -> EvalResult<'tcx, OpTy<'tcx>> {
- let op = ecx.const_value_to_op(cnst)?;
- Ok(OpTy { op, layout: ecx.layout_of(ty)? })
-}
-
fn eval_body_and_ecx<'a, 'mir, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
cid: GlobalId<'tcx>,
fn ptr_op(
_ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
_bin_op: mir::BinOp,
- _left: Scalar,
- _left_layout: TyLayout<'tcx>,
- _right: Scalar,
- _right_layout: TyLayout<'tcx>,
+ _left: ImmTy<'tcx>,
+ _right: ImmTy<'tcx>,
) -> EvalResult<'tcx, (Scalar, bool)> {
Err(
ConstEvalError::NeedsRfc("pointer arithmetic or comparison".to_string()).into(),
Ok(())
}
- /// Called immediately before a stack frame gets popped
+ /// Called immediately before a stack frame gets popped.
#[inline(always)]
fn stack_pop(
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
}
}
-/// Project to a field of a (variant of a) const
+/// Projects to a field of a (variant of a) const.
pub fn const_field<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env);
let result = (|| {
// get the operand again
- let op = lazy_const_to_op(&ecx, ty::LazyConst::Evaluated(value), value.ty)?;
+ let op = ecx.lazy_const_to_op(ty::LazyConst::Evaluated(value), value.ty)?;
// downcast
let down = match variant {
None => op,
) -> EvalResult<'tcx, VariantIdx> {
trace!("const_variant_index: {:?}", val);
let ecx = mk_eval_cx(tcx, DUMMY_SP, param_env);
- let op = lazy_const_to_op(&ecx, ty::LazyConst::Evaluated(val), val.ty)?;
+ let op = ecx.lazy_const_to_op(ty::LazyConst::Evaluated(val), val.ty)?;
Ok(ecx.read_discriminant(op)?.1)
}
op,
path,
Some(&mut ref_tracking),
- /* const_mode */ true,
+ true, // const mode
)?;
}
- // Now that we validated, turn this into a proper constant
+ // Now that we validated, turn this into a proper constant.
let def_id = cid.instance.def.def_id();
let normalize = tcx.is_static(def_id).is_none() && cid.promoted.is_none();
op_to_const(&ecx, op, normalize)
/// effects don't apply to the unwind edge).
fn reset_to_exit_of(&mut self, bb: BasicBlock);
- /// Build gen + kill sets for statement at `loc`.
+ /// Builds gen and kill sets for statement at `loc`.
///
/// Note that invoking this method alone does not change the
/// `curr_state` -- you must invoke `apply_local_effect`
/// afterwards.
fn reconstruct_statement_effect(&mut self, loc: Location);
- /// Build gen + kill sets for terminator for `loc`.
+ /// Builds gen and kill sets for terminator for `loc`.
///
/// Note that invoking this method alone does not change the
/// `curr_state` -- you must invoke `apply_local_effect`
/// In both cases, the contents can only be accessed if and only if
/// their parents are initialized. This implies for example that there
/// is no need to maintain separate drop flags to track such state.
-///
-/// FIXME: we have to do something for moving slice patterns.
+//
+// FIXME: we have to do something for moving slice patterns.
fn place_contents_drop_state_cannot_differ<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>,
mir: &Mir<'tcx>,
place: &mir::Place<'tcx>) -> bool {
Ok(())
}
- /// Build the verbose row: full MIR data, and detailed gen/kill/entry sets
+ /// Builds the verbose row: full MIR data, and detailed gen/kill/entry sets.
fn node_label_verbose_row<W: io::Write>(&self,
n: &Node,
w: &mut W,
Ok(())
}
- /// Build the summary row: terminator, gen/kill/entry bit sets
+ /// Builds the summary row: terminator, gen/kill/entry bit sets.
fn node_label_final_row<W: io::Write>(&self,
n: &Node,
w: &mut W,
/// initialized upon reaching a particular point in the control flow
/// for a function.
///
-/// FIXME: Note that once flow-analysis is complete, this should be
-/// the set-complement of MaybeUninitializedPlaces; thus we can get rid
-/// of one or the other of these two. I'm inclined to get rid of
-/// MaybeUninitializedPlaces, simply because the sets will tend to be
-/// smaller in this analysis and thus easier for humans to process
-/// when debugging.
-///
/// For example, in code like the following, we have corresponding
/// dataflow information shown in the right-hand comments.
///
/// the block's start, not necessarily the state immediately prior
/// to the statement/terminator under analysis.
///
- /// In either case, the passed reference is mutable; but this is a
+ /// In either case, the passed reference is mutable, but this is a
/// wart from using the `BlockSets` type in the API; the intention
/// is that the `statement_effect` and `terminator_effect` methods
/// mutate only the gen/kill sets.
- ///
- /// FIXME: We should consider enforcing the intention described in
- /// the previous paragraph by passing the three sets in separate
- /// parameters to encode their distinct mutabilities.
+ //
+ // FIXME: we should consider enforcing the intention described in
+ // the previous paragraph by passing the three sets in separate
+ // parameters to encode their distinct mutabilities.
fn accumulates_intrablock_state() -> bool { false }
/// A name describing the dataflow analysis that this
- /// BitDenotation is supporting. The name should be something
- /// suitable for plugging in as part of a filename e.g., avoid
+ /// `BitDenotation` is supporting. The name should be something
+ /// suitable for plugging in as part of a filename (i.e., avoid
/// space-characters or other things that tend to look bad on a
- /// file system, like slashes or periods. It is also better for
+ /// file system, like slashes or periods). It is also better for
/// the name to be reasonably short, again because it will be
/// plugged into a filename.
fn name() -> &'static str;
/// flow-dependent, the current MIR cannot encode them via just
/// GEN and KILL sets attached to the block, and so instead we add
/// this extra machinery to represent the flow-dependent effect.
- ///
- /// FIXME: Right now this is a bit of a wart in the API. It might
- /// be better to represent this as an additional gen- and
- /// kill-sets associated with each edge coming out of the basic
- /// block.
+ //
+ // FIXME: right now this is a bit of a wart in the API. It might
+ // be better to represent this as an additional gen- and
+ // kill-sets associated with each edge coming out of the basic
+ // block.
fn propagate_call_return(
&self,
in_out: &mut BitSet<Self::Idx>,
//! The move-analysis portion of borrowck needs to work in an abstract
-//! domain of lifted Places. Most of the Place variants fall into a
+//! domain of lifted `Place`s. Most of the `Place` variants fall into a
//! one-to-one mapping between the concrete and abstract (e.g., a
-//! field-deref on a local-variable, `x.field`, has the same meaning
-//! in both domains). Indexed-Projections are the exception: `a[x]`
+//! field-deref on a local variable, `x.field`, has the same meaning
+//! in both domains). Indexed projections are the exception: `a[x]`
//! needs to be treated as mapping to the same move path as `a[y]` as
-//! well as `a[13]`, et cetera.
+//! well as `a[13]`, etc.
//!
-//! (In theory the analysis could be extended to work with sets of
+//! (In theory, the analysis could be extended to work with sets of
//! paths, so that `a[0]` and `a[13]` could be kept distinct, while
//! `a[x]` would still overlap them both. But that is not this
//! representation does today.)
}
```
-See also https://doc.rust-lang.org/book/first-edition/unsafe.html
+See also https://doc.rust-lang.org/book/ch19-01-unsafe-rust.html
"##,
E0373: r##"
If you wish to learn more about ownership in Rust, start with the chapter in the
Book:
-https://doc.rust-lang.org/book/first-edition/ownership.html
+https://doc.rust-lang.org/book/ch04-00-understanding-ownership.html
"##,
E0383: r##"
Please note that in rust, you can either have many immutable references, or one
mutable reference. Take a look at
-https://doc.rust-lang.org/stable/book/references-and-borrowing.html for more
+https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html for more
information. Example:
```
For more information on the rust ownership system, take a look at
-https://doc.rust-lang.org/stable/book/references-and-borrowing.html.
+https://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html.
"##,
E0503: r##"
```
You can find more information about borrowing in the rust-book:
-http://doc.rust-lang.org/stable/book/references-and-borrowing.html
+http://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html
"##,
E0504: r##"
```
You can find more information about borrowing in the rust-book:
-http://doc.rust-lang.org/stable/book/references-and-borrowing.html
+http://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html
"##,
E0506: r##"
```
You can find more information about borrowing in the rust-book:
-http://doc.rust-lang.org/book/first-edition/references-and-borrowing.html
+http://doc.rust-lang.org/book/ch04-02-references-and-borrowing.html
"##,
E0508: r##"
-use crate::hair::*;
+use crate::hair::{self, *};
use crate::hair::cx::Cx;
use crate::hair::cx::to_ref::ToRef;
use rustc::middle::region;
ty: pattern.ty,
span: pattern.span,
kind: Box::new(PatternKind::AscribeUserType {
- user_ty: PatternTypeProjection::from_user_type(user_ty),
- user_ty_span: ty.span,
+ ascription: hair::pattern::Ascription {
+ user_ty: PatternTypeProjection::from_user_type(user_ty),
+ user_ty_span: ty.span,
+ variance: ty::Variance::Covariant,
+ },
subpattern: pattern,
- variance: ty::Variance::Covariant,
})
};
}
pub fn to_expr_ref<'a, 'gcx, 'tcx>(cx: &mut Cx<'a, 'gcx, 'tcx>,
block: &'tcx hir::Block)
-> ExprRef<'tcx> {
- let block_ty = cx.tables().node_id_to_type(block.hir_id);
+ let block_ty = cx.tables().node_type(block.hir_id);
let temp_lifetime = cx.region_scope_tree.temporary_scope(block.hir_id.local_id);
let expr = Expr {
ty: block_ty,
}
} else {
ExprKind::Call {
- ty: cx.tables().node_id_to_type(fun.hir_id),
+ ty: cx.tables().node_type(fun.hir_id),
fun: fun.to_ref(),
args: args.to_ref(),
from_hir_call: true,
let def = cx.tables().qpath_def(qpath, source.hir_id);
cx
.tables()
- .node_id_to_type(source.hir_id)
+ .node_type(source.hir_id)
.ty_adt_def()
.and_then(|adt_def| {
match def {
debug!("convert_path_expr: user_ty={:?}", user_ty);
ExprKind::Literal {
literal: cx.tcx.mk_lazy_const(ty::LazyConst::Evaluated(ty::Const::zero_sized(
- cx.tables().node_id_to_type(expr.hir_id),
+ cx.tables().node_type(expr.hir_id),
))),
user_ty,
}
let user_provided_types = cx.tables.user_provided_types();
let user_provided_type = user_provided_types.get(expr.hir_id).map(|u_ty| *u_ty);
debug!("convert_path_expr: user_provided_type={:?}", user_provided_type);
- match cx.tables().node_id_to_type(expr.hir_id).sty {
+ match cx.tables().node_type(expr.hir_id).sty {
// A unit struct/variant which is used as a value.
// We return a completely different ExprKind here to account for this special case.
ty::Adt(adt_def, substs) => {
index,
closure_expr_id);
let var_hir_id = cx.tcx.hir().node_to_hir_id(var_id);
- let var_ty = cx.tables().node_id_to_type(var_hir_id);
+ let var_ty = cx.tables().node_type(var_hir_id);
// FIXME free regions in closures are not right
let closure_ty = cx.tables()
- .node_id_to_type(cx.tcx.hir().node_to_hir_id(closure_expr_id));
+ .node_type(cx.tcx.hir().node_to_hir_id(closure_expr_id));
// FIXME we're just hard-coding the idea that the
// signature will be &self or &mut self and hence will
};
let upvar_capture = cx.tables().upvar_capture(upvar_id);
let temp_lifetime = cx.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id);
- let var_ty = cx.tables().node_id_to_type(var_hir_id);
+ let var_ty = cx.tables().node_type(var_hir_id);
let captured_var = Expr {
temp_lifetime,
ty: var_ty,
-//! This module contains the code to convert from the wacky tcx data
-//! structures into the hair. The `builder` is generally ignorant of
-//! the tcx etc, and instead goes through the `Cx` for most of its
-//! work.
-//!
+//! This module contains the fcuntaiontliy to convert from the wacky tcx data
+//! structures into the HAIR. The `builder` is generally ignorant of the tcx,
+//! etc., and instead goes through the `Cx` for most of its work.
use crate::hair::*;
use crate::hair::util::UserAnnotatedTyHelpers;
/// What kind of body is being compiled.
pub body_owner_kind: hir::BodyOwnerKind,
- /// True if this constant/function needs overflow checks.
+ /// Whether this constant/function needs overflow checks.
check_overflow: bool,
- /// See field with the same name on `Mir`
+ /// See field with the same name on `Mir`.
control_flow_destroyed: Vec<(Span, String)>,
}
}
impl<'a, 'gcx, 'tcx> Cx<'a, 'gcx, 'tcx> {
- /// Normalizes `ast` into the appropriate `mirror` type.
+ /// Normalizes `ast` into the appropriate "mirror" type.
pub fn mirror<M: Mirror<'tcx>>(&mut self, ast: M) -> M::Output {
ast.make_mirror(self)
}
/// Mirroring is gradual: when you mirror an outer expression like `e1
/// + e2`, the references to the inner expressions `e1` and `e2` are
/// `ExprRef<'tcx>` instances, and they may or may not be eagerly
-/// mirrored. This allows a single AST node from the compiler to
+/// mirrored. This allows a single AST node from the compiler to
/// expand into one or more Hair nodes, which lets the Hair nodes be
/// simpler.
pub trait Mirror<'tcx> {
///
/// The algorithm implemented here is a modified version of the one described in:
/// http://moscova.inria.fr/~maranget/papers/warn/index.html
-/// However, to save future implementors from reading the original paper, I'm going
-/// to summarise the algorithm here to hopefully save time and be a little clearer
+/// However, to save future implementors from reading the original paper, we
+/// summarise the algorithm here to hopefully save time and be a little clearer
/// (without being so rigorous).
///
/// The core of the algorithm revolves about a "usefulness" check. In particular, we
/// The module in which the match occurs. This is necessary for
/// checking inhabited-ness of types because whether a type is (visibly)
/// inhabited can depend on whether it was defined in the current module or
- /// not. eg. `struct Foo { _private: ! }` cannot be seen to be empty
+ /// not. E.g., `struct Foo { _private: ! }` cannot be seen to be empty
/// outside it's module and should not be matchable with an empty match
/// statement.
pub module: DefId,
}
fn from_pat(tcx: TyCtxt<'_, 'tcx, 'tcx>,
- pat: &Pattern<'tcx>)
+ mut pat: &Pattern<'tcx>)
-> Option<IntRange<'tcx>> {
- Self::from_ctor(tcx, &match pat.kind {
- box PatternKind::Constant { value } => ConstantValue(value),
- box PatternKind::Range(PatternRange { lo, hi, ty, end }) => ConstantRange(
- lo.to_bits(tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
- hi.to_bits(tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
- ty,
- end,
- ),
- _ => return None,
- })
+ let range = loop {
+ match pat.kind {
+ box PatternKind::Constant { value } => break ConstantValue(value),
+ box PatternKind::Range(PatternRange { lo, hi, ty, end }) => break ConstantRange(
+ lo.to_bits(tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
+ hi.to_bits(tcx, ty::ParamEnv::empty().and(ty)).unwrap(),
+ ty,
+ end,
+ ),
+ box PatternKind::AscribeUserType { ref subpattern, .. } => {
+ pat = subpattern;
+ },
+ _ => return None,
+ }
+ };
+ Self::from_ctor(tcx, &range)
}
// The return value of `signed_bias` should be XORed with an endpoint to encode/decode it.
}
}
- /// Convert a `RangeInclusive` to a `ConstantValue` or inclusive `ConstantRange`.
+ /// Converts a `RangeInclusive` to a `ConstantValue` or inclusive `ConstantRange`.
fn range_to_ctor(
tcx: TyCtxt<'_, 'tcx, 'tcx>,
ty: Ty<'tcx>,
}
}
- /// Return a collection of ranges that spans the values covered by `ranges`, subtracted
+ /// Returns a collection of ranges that spans the values covered by `ranges`, subtracted
/// by the values covered by `self`: i.e., `ranges \ self` (in set notation).
fn subtract_from(self,
tcx: TyCtxt<'_, 'tcx, 'tcx>,
}
}
-/// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html
+/// Algorithm from http://moscova.inria.fr/~maranget/papers/warn/index.html.
/// The algorithm from the paper has been modified to correctly handle empty
/// types. The changes are:
/// (0) We don't exit early if the pattern matrix has zero rows. We just
/// continue to recurse over columns.
/// (1) all_constructors will only return constructors that are statically
-/// possible. eg. it will only return Ok for Result<T, !>
+/// possible. E.g., it will only return `Ok` for `Result<T, !>`.
///
/// This finds whether a (row) vector `v` of patterns is 'useful' in relation
/// to a set of such vectors `m` - this is defined as there being a set of
///
/// All the patterns at each column of the `matrix ++ v` matrix must
/// have the same type, except that wildcard (PatternKind::Wild) patterns
-/// with type TyErr are also allowed, even if the "type of the column"
-/// is not TyErr. That is used to represent private fields, as using their
+/// with type `TyErr` are also allowed, even if the "type of the column"
+/// is not `TyErr`. That is used to represent private fields, as using their
/// real type would assert that they are inhabited.
///
/// This is used both for reachability checking (if a pattern isn't useful in
/// Slice patterns, however, can match slices of different lengths. For instance,
/// `[a, b, ..tail]` can match a slice of length 2, 3, 4 and so on.
///
-/// Returns None in case of a catch-all, which can't be specialized.
+/// Returns `None` in case of a catch-all, which can't be specialized.
fn pat_constructors<'tcx>(cx: &mut MatchCheckCtxt<'_, 'tcx>,
pat: &Pattern<'tcx>,
pcx: PatternContext<'_>)
split_ctors
}
-/// Check whether there exists any shared value in either `ctor` or `pat` by intersecting them.
+/// Checks whether there exists any shared value in either `ctor` or `pat` by intersecting them.
fn constructor_intersects_pattern<'p, 'a: 'p, 'tcx: 'a>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
ctor: &Constructor<'tcx>,
// Then, if the match has no arms, check whether the scrutinee
// is uninhabited.
- let pat_ty = self.tables.node_id_to_type(scrut.hir_id);
+ let pat_ty = self.tables.node_type(scrut.hir_id);
let module = self.tcx.hir().get_module_parent(scrut.id);
if inlined_arms.is_empty() {
let scrutinee_is_uninhabited = if self.tcx.features().exhaustive_patterns {
.flat_map(|arm| &arm.0)
.map(|pat| smallvec![pat.0])
.collect();
- let scrut_ty = self.tables.node_id_to_type(scrut.hir_id);
+ let scrut_ty = self.tables.node_type(scrut.hir_id);
check_exhaustive(cx, scrut_ty, scrut.span, &matrix);
})
}
if let Some(&bm) = cx.tables.pat_binding_modes().get(p.hir_id) {
match bm {
ty::BindByValue(..) => {
- let pat_ty = cx.tables.node_id_to_type(p.hir_id);
+ let pat_ty = cx.tables.node_type(p.hir_id);
if !pat_ty.is_copy_modulo_regions(cx.tcx, cx.param_env, pat.span) {
check_move(p, sub.as_ref().map(|p| &**p), span_vec);
}
}
}
-/// Ensures that a pattern guard doesn't borrow by mutable reference or
-/// assign.
-///
-/// FIXME: this should be done by borrowck.
+/// Ensures that a pattern guard doesn't borrow by mutable reference or assign.
+//
+// FIXME: this should be done by borrowck.
fn check_for_mutation_in_guard(cx: &MatchVisitor<'_, '_>, guard: &hir::Guard) {
let mut checker = MutationChecker {
cx,
-//! Code to validate patterns/matches
+//! Validation of patterns/matches.
mod _match;
mod check_match;
}
-#[derive(Clone, Debug)]
+#[derive(Copy, Clone, Debug, PartialEq)]
pub struct PatternTypeProjection<'tcx> {
pub user_ty: CanonicalUserType<'tcx>,
}
}
}
+#[derive(Copy, Clone, Debug, PartialEq)]
+pub struct Ascription<'tcx> {
+ pub user_ty: PatternTypeProjection<'tcx>,
+ /// Variance to use when relating the type `user_ty` to the **type of the value being
+ /// matched**. Typically, this is `Variance::Covariant`, since the value being matched must
+ /// have a type that is some subtype of the ascribed type.
+ ///
+ /// Note that this variance does not apply for any bindings within subpatterns. The type
+ /// assigned to those bindings must be exactly equal to the `user_ty` given here.
+ ///
+ /// The only place where this field is not `Covariant` is when matching constants, where
+ /// we currently use `Contravariant` -- this is because the constant type just needs to
+ /// be "comparable" to the type of the input value. So, for example:
+ ///
+ /// ```text
+ /// match x { "foo" => .. }
+ /// ```
+ ///
+ /// requires that `&'static str <: T_x`, where `T_x` is the type of `x`. Really, we should
+ /// probably be checking for a `PartialEq` impl instead, but this preserves the behavior
+ /// of the old type-check for now. See #57280 for details.
+ pub variance: ty::Variance,
+ pub user_ty_span: Span,
+}
+
#[derive(Clone, Debug)]
pub enum PatternKind<'tcx> {
Wild,
AscribeUserType {
- user_ty: PatternTypeProjection<'tcx>,
+ ascription: Ascription<'tcx>,
subpattern: Pattern<'tcx>,
- /// Variance to use when relating the type `user_ty` to the **type of the value being
- /// matched**. Typically, this is `Variance::Covariant`, since the value being matched must
- /// have a type that is some subtype of the ascribed type.
- ///
- /// Note that this variance does not apply for any bindings within subpatterns. The type
- /// assigned to those bindings must be exactly equal to the `user_ty` given here.
- ///
- /// The only place where this field is not `Covariant` is when matching constants, where
- /// we currently use `Contravariant` -- this is because the constant type just needs to
- /// be "comparable" to the type of the input value. So, for example:
- ///
- /// ```text
- /// match x { "foo" => .. }
- /// ```
- ///
- /// requires that `&'static str <: T_x`, where `T_x` is the type of `x`. Really, we should
- /// probably be checking for a `PartialEq` impl instead, but this preserves the behavior
- /// of the old type-check for now. See #57280 for details.
- variance: ty::Variance,
- user_ty_span: Span,
},
- /// x, ref x, x @ P, etc
+ /// `x`, `ref x`, `x @ P`, etc.
Binding {
mutability: Mutability,
name: ast::Name,
subpattern: Option<Pattern<'tcx>>,
},
- /// Foo(...) or Foo{...} or Foo, where `Foo` is a variant name from an adt with >1 variants
+ /// `Foo(...)` or `Foo{...}` or `Foo`, where `Foo` is a variant name from an ADT with
+ /// multiple variants.
Variant {
adt_def: &'tcx AdtDef,
substs: &'tcx Substs<'tcx>,
subpatterns: Vec<FieldPattern<'tcx>>,
},
- /// (...), Foo(...), Foo{...}, or Foo, where `Foo` is a variant name from an adt with 1 variant
+ /// `(...)`, `Foo(...)`, `Foo{...}`, or `Foo`, where `Foo` is a variant name from an ADT with
+ /// a single variant.
Leaf {
subpatterns: Vec<FieldPattern<'tcx>>,
},
- /// box P, &P, &mut P, etc
+ /// `box P`, `&P`, `&mut P`, etc.
Deref {
subpattern: Pattern<'tcx>,
},
Range(PatternRange<'tcx>),
- /// matches against a slice, checking the length and extracting elements.
+ /// Matches against a slice, checking the length and extracting elements.
/// irrefutable when there is a slice pattern and both `prefix` and `suffix` are empty.
/// e.g., `&[ref xs..]`.
Slice {
suffix: Vec<Pattern<'tcx>>,
},
- /// fixed match against an array, irrefutable
+ /// Fixed match against an array; irrefutable.
Array {
prefix: Vec<Pattern<'tcx>>,
slice: Option<Pattern<'tcx>>,
},
}
-impl<'tcx> PatternKind<'tcx> {
- /// If this is a `PatternKind::AscribeUserType` then return the subpattern kind, otherwise
- /// return this pattern kind.
- fn with_user_type_ascription_subpattern(self) -> Self {
- match self {
- PatternKind::AscribeUserType { subpattern: Pattern { box kind, .. }, .. } => kind,
- kind => kind,
- }
- }
-}
-
-#[derive(Clone, Copy, Debug, PartialEq)]
+#[derive(Copy, Clone, Debug, PartialEq)]
pub struct PatternRange<'tcx> {
pub lo: ty::Const<'tcx>,
pub hi: ty::Const<'tcx>,
)
}
+ fn lower_range_expr(
+ &mut self,
+ expr: &'tcx hir::Expr,
+ ) -> (PatternKind<'tcx>, Option<Ascription<'tcx>>) {
+ match self.lower_lit(expr) {
+ PatternKind::AscribeUserType {
+ ascription: lo_ascription,
+ subpattern: Pattern { kind: box kind, .. },
+ } => (kind, Some(lo_ascription)),
+ kind => (kind, None),
+ }
+ }
+
fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat) -> Pattern<'tcx> {
- let mut ty = self.tables.node_id_to_type(pat.hir_id);
+ let mut ty = self.tables.node_type(pat.hir_id);
let kind = match pat.node {
PatKind::Wild => PatternKind::Wild,
PatKind::Lit(ref value) => self.lower_lit(value),
PatKind::Range(ref lo_expr, ref hi_expr, end) => {
- match (
- // Look for `PatternKind::Constant` patterns inside of any
- // `PatternKind::AscribeUserType` patterns. Type ascriptions can be safely
- // ignored for the purposes of lowering a range correctly - these are checked
- // elsewhere for well-formedness.
- self.lower_lit(lo_expr).with_user_type_ascription_subpattern(),
- self.lower_lit(hi_expr).with_user_type_ascription_subpattern(),
- ) {
+ let (lo, lo_ascription) = self.lower_range_expr(lo_expr);
+ let (hi, hi_ascription) = self.lower_range_expr(hi_expr);
+
+ let mut kind = match (lo, hi) {
(PatternKind::Constant { value: lo }, PatternKind::Constant { value: hi }) => {
use std::cmp::Ordering;
let cmp = compare_const_vals(
PatternKind::Wild
}
}
- }
+ },
ref pats => {
self.tcx.sess.delay_span_bug(
pat.span,
- &format!("found bad range pattern `{:?}` outside of error recovery",
- pats),
+ &format!(
+ "found bad range pattern `{:?}` outside of error recovery",
+ pats,
+ ),
);
PatternKind::Wild
+ },
+ };
+
+ // If we are handling a range with associated constants (e.g.
+ // `Foo::<'a>::A..=Foo::B`), we need to put the ascriptions for the associated
+ // constants somewhere. Have them on the range pattern.
+ for ascription in &[lo_ascription, hi_ascription] {
+ if let Some(ascription) = ascription {
+ kind = PatternKind::AscribeUserType {
+ ascription: *ascription,
+ subpattern: Pattern { span: pat.span, ty, kind: Box::new(kind), },
+ };
}
}
+
+ kind
}
PatKind::Path(ref qpath) => {
}
PatKind::Binding(_, id, _, ident, ref sub) => {
- let var_ty = self.tables.node_id_to_type(pat.hir_id);
+ let var_ty = self.tables.node_type(pat.hir_id);
if let ty::Error = var_ty.sty {
// Avoid ICE
return Pattern { span: pat.span, ty, kind: Box::new(PatternKind::Wild) };
ty,
kind: Box::new(kind),
},
- user_ty: PatternTypeProjection::from_user_type(user_ty),
- user_ty_span: span,
- variance: ty::Variance::Covariant,
+ ascription: Ascription {
+ user_ty: PatternTypeProjection::from_user_type(user_ty),
+ user_ty_span: span,
+ variance: ty::Variance::Covariant,
+ },
};
}
/// Takes a HIR Path. If the path is a constant, evaluates it and feeds
/// it to `const_to_pat`. Any other path (like enum variants without fields)
- /// is converted to the corresponding pattern via `lower_variant_or_leaf`
+ /// is converted to the corresponding pattern via `lower_variant_or_leaf`.
fn lower_path(&mut self,
qpath: &hir::QPath,
id: hir::HirId,
span: Span)
-> Pattern<'tcx> {
- let ty = self.tables.node_id_to_type(id);
+ let ty = self.tables.node_type(id);
let def = self.tables.qpath_def(qpath, id);
let is_associated_const = match def {
Def::AssociatedConst(_) => true,
kind: Box::new(
PatternKind::AscribeUserType {
subpattern: pattern,
- /// Note that use `Contravariant` here. See the
- /// `variance` field documentation for details.
- variance: ty::Variance::Contravariant,
- user_ty,
- user_ty_span: span,
+ ascription: Ascription {
+ /// Note that use `Contravariant` here. See the
+ /// `variance` field documentation for details.
+ variance: ty::Variance::Contravariant,
+ user_ty,
+ user_ty_span: span,
+ },
}
),
ty: value.ty,
}
/// Converts literals, paths and negation of literals to patterns.
- /// The special case for negation exists to allow things like -128i8
- /// which would overflow if we tried to evaluate 128i8 and then negate
+ /// The special case for negation exists to allow things like `-128_i8`
+ /// which would overflow if we tried to evaluate `128_i8` and then negate
/// afterwards.
fn lower_lit(&mut self, expr: &'tcx hir::Expr) -> PatternKind<'tcx> {
match expr.node {
/// Converts an evaluated constant to a pattern (if possible).
/// This means aggregate values (like structs and enums) are converted
- /// to a pattern that matches the value (as if you'd compare via eq).
+ /// to a pattern that matches the value (as if you'd compared via equality).
fn const_to_pat(
&self,
instance: ty::Instance<'tcx>,
PatternKind::Wild => PatternKind::Wild,
PatternKind::AscribeUserType {
ref subpattern,
- variance,
- ref user_ty,
- user_ty_span,
+ ascription: Ascription {
+ variance,
+ ref user_ty,
+ user_ty_span,
+ },
} => PatternKind::AscribeUserType {
subpattern: subpattern.fold_with(folder),
- user_ty: user_ty.fold_with(folder),
- variance,
- user_ty_span,
+ ascription: Ascription {
+ user_ty: user_ty.fold_with(folder),
+ variance,
+ user_ty_span,
+ },
},
PatternKind::Binding {
mutability,
let user_provided_types = self.tables().user_provided_types();
let mut user_ty = *user_provided_types.get(hir_id)?;
debug!("user_subts_applied_to_ty_of_hir_id: user_ty={:?}", user_ty);
- match &self.tables().node_id_to_type(hir_id).sty {
+ match &self.tables().node_type(hir_id).sty {
ty::Adt(adt_def, ..) => {
if let UserType::TypeOf(ref mut did, _) = &mut user_ty.value {
*did = adt_def.did;
use rustc::mir::CastKind;
use rustc_apfloat::Float;
-use super::{EvalContext, Machine, PlaceTy, OpTy, Immediate};
+use super::{EvalContext, Machine, PlaceTy, OpTy, ImmTy, Immediate};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
fn type_is_fat_ptr(&self, ty: Ty<'tcx>) -> bool {
assert_eq!(src.layout.fields.offset(i).bytes(), 0);
assert_eq!(src_field_layout.size, src.layout.size);
// just sawp out the layout
- OpTy { op: src.op, layout: src_field_layout }
+ OpTy::from(ImmTy { imm: src.to_immediate(), layout: src_field_layout })
}
};
if src_field.layout.ty == dst_field.layout.ty {
/// The MIR for the function called on this frame.
pub mir: &'mir mir::Mir<'tcx>,
- /// The def_id and substs of the current function
+ /// The def_id and substs of the current function.
pub instance: ty::Instance<'tcx>,
/// The span of the call site.
////////////////////////////////////////////////////////////////////////////////
// Return place and locals
////////////////////////////////////////////////////////////////////////////////
- /// Work to perform when returning from this function
+ /// Work to perform when returning from this function.
pub return_to_block: StackPopCleanup,
/// The location where the result of the current stack frame should be written to,
/// The index of the currently evaluated statement.
pub stmt: usize,
- /// Extra data for the machine
+ /// Extra data for the machine.
pub extra: Extra,
}
/// we can validate it at that layout.
Goto(Option<mir::BasicBlock>),
/// Just do nohing: Used by Main and for the box_alloc hook in miri.
- /// `cleanup` says whether locals are deallocated. Static computation
+ /// `cleanup` says whether locals are deallocated. Static computation
/// wants them leaked to intern what they need (and just throw away
/// the entire `ecx` when it is done).
None { cleanup: bool },
Ok(Immediate::new_slice(Scalar::Ptr(ptr), s.len() as u64, self))
}
- /// Return the actual dynamic size and alignment of the place at the given type.
+ /// Returns the actual dynamic size and alignment of the place at the given type.
/// Only the "meta" (metadata) part of the place matters.
/// This can fail to provide an answer for extern types.
pub(super) fn size_and_align_of(
//! Intrinsics and other functions that the miri engine executes without
-//! looking at their MIR. Intrinsics/functions supported here are shared by CTFE
+//! looking at their MIR. Intrinsics/functions supported here are shared by CTFE
//! and miri.
use syntax::symbol::Symbol;
}
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
- /// Returns whether emulation happened.
+ /// Returns `true` if emulation happened.
pub fn emulate_intrinsic(
&mut self,
instance: ty::Instance<'tcx>,
let l = self.read_immediate(args[0])?;
let r = self.read_immediate(args[1])?;
let is_add = intrinsic_name == "saturating_add";
- let (val, overflowed) = self.binary_op_imm(if is_add {
+ let (val, overflowed) = self.binary_op(if is_add {
BinOp::Add
} else {
BinOp::Sub
"unchecked_shr" => BinOp::Shr,
_ => bug!("Already checked for int ops")
};
- let (val, overflowed) = self.binary_op_imm(bin_op, l, r)?;
+ let (val, overflowed) = self.binary_op(bin_op, l, r)?;
if overflowed {
let layout = self.layout_of(substs.type_at(0))?;
let r_val = r.to_scalar()?.to_bits(layout.size)?;
}
/// "Intercept" a function call because we have something special to do for it.
- /// Returns whether an intercept happened.
+ /// Returns `true` if an intercept happened.
pub fn hook_fn(
&mut self,
instance: ty::Instance<'tcx>,
use rustc::hir::{self, def_id::DefId};
use rustc::mir;
-use rustc::ty::{self, layout::TyLayout, query::TyCtxtAt};
+use rustc::ty::{self, query::TyCtxtAt};
use super::{
Allocation, AllocId, EvalResult, Scalar, AllocationExtra,
- EvalContext, PlaceTy, MPlaceTy, OpTy, Pointer, MemoryKind,
+ EvalContext, PlaceTy, MPlaceTy, OpTy, ImmTy, Pointer, MemoryKind,
};
/// Whether this kind of memory is allowed to leak
/// The functionality needed by memory to manage its allocations
pub trait AllocMap<K: Hash + Eq, V> {
- /// Test if the map contains the given key.
+ /// Tests if the map contains the given key.
/// Deliberately takes `&mut` because that is sufficient, and some implementations
/// can be more efficient then (using `RefCell::get_mut`).
fn contains_key<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> bool
where K: Borrow<Q>;
- /// Insert new entry into the map.
+ /// Inserts a new entry into the map.
fn insert(&mut self, k: K, v: V) -> Option<V>;
- /// Remove entry from the map.
+ /// Removes an entry from the map.
fn remove<Q: ?Sized + Hash + Eq>(&mut self, k: &Q) -> Option<V>
where K: Borrow<Q>;
- /// Return data based the keys and values in the map.
+ /// Returns data based the keys and values in the map.
fn filter_map_collect<T>(&self, f: impl FnMut(&K, &V) -> Option<T>) -> Vec<T>;
- /// Return a reference to entry `k`. If no such entry exists, call
+ /// Returns a reference to entry `k`. If no such entry exists, call
/// `vacant` and either forward its error, or add its result to the map
/// and return a reference to *that*.
fn get_or<E>(
vacant: impl FnOnce() -> Result<V, E>
) -> Result<&V, E>;
- /// Return a mutable reference to entry `k`. If no such entry exists, call
+ /// Returns a mutable reference to entry `k`. If no such entry exists, call
/// `vacant` and either forward its error, or add its result to the map
/// and return a reference to *that*.
fn get_mut_or<E>(
/// Additional memory kinds a machine wishes to distinguish from the builtin ones
type MemoryKinds: ::std::fmt::Debug + MayLeak + Eq + 'static;
- /// Tag tracked alongside every pointer. This is used to implement "Stacked Borrows"
+ /// Tag tracked alongside every pointer. This is used to implement "Stacked Borrows"
/// <https://www.ralfj.de/blog/2018/08/07/stacked-borrows.html>.
/// The `default()` is used for pointers to consts, statics, vtables and functions.
type PointerTag: ::std::fmt::Debug + Default + Copy + Eq + Hash + 'static;
/// Extra data stored in every call frame.
type FrameExtra;
- /// Extra data stored in memory. A reference to this is available when `AllocExtra`
+ /// Extra data stored in memory. A reference to this is available when `AllocExtra`
/// gets initialized, so you can e.g., have an `Rc` here if there is global state you
/// need access to in the `AllocExtra` hooks.
type MemoryExtra: Default;
///
/// Returns either the mir to use for the call, or `None` if execution should
/// just proceed (which usually means this hook did all the work that the
- /// called function should usually have done). In the latter case, it is
+ /// called function should usually have done). In the latter case, it is
/// this hook's responsibility to call `goto_block(ret)` to advance the instruction pointer!
/// (This is to support functions like `__rust_maybe_catch_panic` that neither find a MIR
/// nor just jump to `ret`, but instead push their own stack frame.)
fn ptr_op(
ecx: &EvalContext<'a, 'mir, 'tcx, Self>,
bin_op: mir::BinOp,
- left: Scalar<Self::PointerTag>,
- left_layout: TyLayout<'tcx>,
- right: Scalar<Self::PointerTag>,
- right_layout: TyLayout<'tcx>,
+ left: ImmTy<'tcx, Self::PointerTag>,
+ right: ImmTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx, (Scalar<Self::PointerTag>, bool)>;
/// Heap allocations via the `box` keyword.
dest: PlaceTy<'tcx, Self::PointerTag>,
) -> EvalResult<'tcx>;
- /// Add the tag for a newly allocated pointer.
+ /// Adds the tag for a newly allocated pointer.
fn tag_new_allocation(
ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
ptr: Pointer,
) -> Pointer<Self::PointerTag>;
/// Executed when evaluating the `*` operator: Following a reference.
- /// This has the chance to adjust the tag. It should not change anything else!
+ /// This has the chance to adjust the tag. It should not change anything else!
/// `mutability` can be `None` in case a raw ptr is being dereferenced.
#[inline]
fn tag_dereference(
Ok(place.ptr)
}
- /// Execute a retagging operation
+ /// Executes a retagging operation
#[inline]
fn retag(
_ecx: &mut EvalContext<'a, 'mir, 'tcx, Self>,
//! Generally, we use `Pointer` to denote memory addresses. However, some operations
//! have a "size"-like parameter, and they take `Scalar` for the address because
//! if the size is 0, then the pointer can also be a (properly aligned, non-NULL)
-//! integer. It is crucial that these operations call `check_align` *before*
+//! integer. It is crucial that these operations call `check_align` *before*
//! short-circuiting the empty case!
use std::collections::VecDeque;
// `Memory` has to depend on the `Machine` because some of its operations
// (e.g., `get`) call a `Machine` hook.
pub struct Memory<'a, 'mir, 'tcx: 'a + 'mir, M: Machine<'a, 'mir, 'tcx>> {
- /// Allocations local to this instance of the miri engine. The kind
+ /// Allocations local to this instance of the miri engine. The kind
/// helps ensure that the same mechanism is used for allocation and
- /// deallocation. When an allocation is not found here, it is a
- /// static and looked up in the `tcx` for read access. Some machines may
+ /// deallocation. When an allocation is not found here, it is a
+ /// static and looked up in the `tcx` for read access. Some machines may
/// have to mutate this map even on a read-only access to a static (because
/// they do pointer provenance tracking and the allocations in `tcx` have
/// the wrong type), so we let the machine override this type.
Ok(())
}
- /// Check that the pointer is aligned AND non-NULL. This supports ZSTs in two ways:
+ /// Checks that the pointer is aligned AND non-NULL. This supports ZSTs in two ways:
/// You can pass a scalar, and a `Pointer` does not have to actually still be allocated.
pub fn check_align(
&self,
}
}
- /// Check if the pointer is "in-bounds". Notice that a pointer pointing at the end
+ /// Checks if the pointer is "in-bounds". Notice that a pointer pointing at the end
/// of an allocation (i.e., at the first *inaccessible* location) *is* considered
/// in-bounds! This follows C's/LLVM's rules.
/// If you want to check bounds before doing a memory access, better first obtain
}
}
-/// Reading and writing
+/// Reading and writing.
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> Memory<'a, 'mir, 'tcx, M> {
pub fn copy(
&mut self,
ConstValue, Pointer, Scalar,
EvalResult, EvalErrorKind,
};
-use super::{EvalContext, Machine, MemPlace, MPlaceTy, MemoryKind};
+use super::{
+ EvalContext, Machine, AllocMap, Allocation, AllocationExtra,
+ MemPlace, MPlaceTy, PlaceTy, Place, MemoryKind,
+};
pub use rustc::mir::interpret::ScalarMaybeUndef;
/// A `Value` represents a single immediate self-contained Rust value.
}
impl<'tcx, Tag> Immediate<Tag> {
+ #[inline]
+ pub fn from_scalar(val: Scalar<Tag>) -> Self {
+ Immediate::Scalar(ScalarMaybeUndef::Scalar(val))
+ }
+
#[inline]
pub fn erase_tag(self) -> Immediate
{
}
}
- /// Convert the immediate into a pointer (or a pointer-sized integer).
+ /// Converts the immediate into a pointer (or a pointer-sized integer).
/// Throws away the second half of a ScalarPair!
#[inline]
pub fn to_scalar_ptr(self) -> EvalResult<'tcx, Scalar<Tag>> {
}
}
- /// Convert the value into its metadata.
+ /// Converts the value into its metadata.
/// Throws away the first half of a ScalarPair!
#[inline]
pub fn to_meta(self) -> EvalResult<'tcx, Option<Scalar<Tag>>> {
// as input for binary and cast operations.
#[derive(Copy, Clone, Debug)]
pub struct ImmTy<'tcx, Tag=()> {
- immediate: Immediate<Tag>,
+ pub imm: Immediate<Tag>,
pub layout: TyLayout<'tcx>,
}
type Target = Immediate<Tag>;
#[inline(always)]
fn deref(&self) -> &Immediate<Tag> {
- &self.immediate
+ &self.imm
}
}
/// An `Operand` is the result of computing a `mir::Operand`. It can be immediate,
-/// or still in memory. The latter is an optimization, to delay reading that chunk of
+/// or still in memory. The latter is an optimization, to delay reading that chunk of
/// memory and to avoid having to store arbitrary-sized data here.
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub enum Operand<Tag=(), Id=AllocId> {
#[derive(Copy, Clone, Debug, Hash, PartialEq, Eq)]
pub struct OpTy<'tcx, Tag=()> {
- crate op: Operand<Tag>, // ideally we'd make this private, but const_prop needs this
+ op: Operand<Tag>,
pub layout: TyLayout<'tcx>,
}
#[inline(always)]
fn from(val: ImmTy<'tcx, Tag>) -> Self {
OpTy {
- op: Operand::Immediate(val.immediate),
+ op: Operand::Immediate(val.imm),
layout: val.layout
}
}
}
+impl<'tcx, Tag: Copy> ImmTy<'tcx, Tag>
+{
+ #[inline]
+ pub fn from_scalar(val: Scalar<Tag>, layout: TyLayout<'tcx>) -> Self {
+ ImmTy { imm: Immediate::from_scalar(val), layout }
+ }
+
+ #[inline]
+ pub fn to_bits(self) -> EvalResult<'tcx, u128> {
+ self.to_scalar()?.to_bits(self.layout.size)
+ }
+}
+
impl<'tcx, Tag> OpTy<'tcx, Tag>
{
#[inline]
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
/// Try reading an immediate in memory; this is interesting particularly for ScalarPair.
- /// Return None if the layout does not permit loading this as a value.
+ /// Returns `None` if the layout does not permit loading this as a value.
pub(super) fn try_read_immediate_from_mplace(
&self,
mplace: MPlaceTy<'tcx, M::PointerTag>,
&self,
op: OpTy<'tcx, M::PointerTag>
) -> EvalResult<'tcx, ImmTy<'tcx, M::PointerTag>> {
- if let Ok(immediate) = self.try_read_immediate(op)? {
- Ok(ImmTy { immediate, layout: op.layout })
+ if let Ok(imm) = self.try_read_immediate(op)? {
+ Ok(ImmTy { imm, layout: op.layout })
} else {
bug!("primitive read failed for type: {:?}", op.layout.ty);
}
Ok(OpTy { op, layout })
}
+ /// Every place can be read from, so we can turm them into an operand
+ #[inline(always)]
+ pub fn place_to_op(
+ &self,
+ place: PlaceTy<'tcx, M::PointerTag>
+ ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
+ let op = match *place {
+ Place::Ptr(mplace) => {
+ Operand::Indirect(mplace)
+ }
+ Place::Local { frame, local } =>
+ *self.stack[frame].locals[local].access()?
+ };
+ Ok(OpTy { op, layout: place.layout })
+ }
+
// Evaluate a place with the goal of reading from it. This lets us sometimes
// avoid allocations.
fn eval_place_to_op(
.collect()
}
- // Used when miri runs into a constant, and by CTFE.
- // FIXME: CTFE should use allocations, then we can make this private (embed it into
- // `eval_operand`, ideally).
- pub(crate) fn const_value_to_op(
+ // Used when Miri runs into a constant, and (indirectly through lazy_const_to_op) by CTFE.
+ fn const_value_to_op(
&self,
val: ty::LazyConst<'tcx>,
) -> EvalResult<'tcx, Operand<M::PointerTag>> {
}
}
+
+impl<'a, 'mir, 'tcx, M> EvalContext<'a, 'mir, 'tcx, M>
+where
+ M: Machine<'a, 'mir, 'tcx, PointerTag=()>,
+ // FIXME: Working around https://github.com/rust-lang/rust/issues/24159
+ M::MemoryMap: AllocMap<AllocId, (MemoryKind<M::MemoryKinds>, Allocation<(), M::AllocExtra>)>,
+ M::AllocExtra: AllocationExtra<(), M::MemoryExtra>,
+{
+ // FIXME: CTFE should use allocations, then we can remove this.
+ pub(crate) fn lazy_const_to_op(
+ &self,
+ cnst: ty::LazyConst<'tcx>,
+ ty: ty::Ty<'tcx>,
+ ) -> EvalResult<'tcx, OpTy<'tcx>> {
+ let op = self.const_value_to_op(cnst)?;
+ Ok(OpTy { op, layout: self.layout_of(ty)? })
+ }
+}
right: ImmTy<'tcx, M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
- let (val, overflowed) = self.binary_op_imm(op, left, right)?;
+ let (val, overflowed) = self.binary_op(op, left, right)?;
let val = Immediate::ScalarPair(val.into(), Scalar::from_bool(overflowed).into());
self.write_immediate(val, dest)
}
right: ImmTy<'tcx, M::PointerTag>,
dest: PlaceTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx> {
- let (val, _overflowed) = self.binary_op_imm(op, left, right)?;
+ let (val, _overflowed) = self.binary_op(op, left, right)?;
self.write_scalar(val, dest)
}
}
Ok((val, false))
}
- /// Convenience wrapper that's useful when keeping the layout together with the
- /// immediate value.
+ /// Returns the result of the specified operation and whether it overflowed.
#[inline]
- pub fn binary_op_imm(
+ pub fn binary_op(
&self,
bin_op: mir::BinOp,
left: ImmTy<'tcx, M::PointerTag>,
right: ImmTy<'tcx, M::PointerTag>,
- ) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
- self.binary_op(
- bin_op,
- left.to_scalar()?, left.layout,
- right.to_scalar()?, right.layout,
- )
- }
-
- /// Returns the result of the specified operation and whether it overflowed.
- pub fn binary_op(
- &self,
- bin_op: mir::BinOp,
- left: Scalar<M::PointerTag>,
- left_layout: TyLayout<'tcx>,
- right: Scalar<M::PointerTag>,
- right_layout: TyLayout<'tcx>,
) -> EvalResult<'tcx, (Scalar<M::PointerTag>, bool)> {
trace!("Running binary op {:?}: {:?} ({:?}), {:?} ({:?})",
- bin_op, left, left_layout.ty, right, right_layout.ty);
+ bin_op, *left, left.layout.ty, *right, right.layout.ty);
- match left_layout.ty.sty {
+ match left.layout.ty.sty {
ty::Char => {
- assert_eq!(left_layout.ty, right_layout.ty);
- let left = left.to_char()?;
- let right = right.to_char()?;
+ assert_eq!(left.layout.ty, right.layout.ty);
+ let left = left.to_scalar()?.to_char()?;
+ let right = right.to_scalar()?.to_char()?;
self.binary_char_op(bin_op, left, right)
}
ty::Bool => {
- assert_eq!(left_layout.ty, right_layout.ty);
- let left = left.to_bool()?;
- let right = right.to_bool()?;
+ assert_eq!(left.layout.ty, right.layout.ty);
+ let left = left.to_scalar()?.to_bool()?;
+ let right = right.to_scalar()?.to_bool()?;
self.binary_bool_op(bin_op, left, right)
}
ty::Float(fty) => {
- assert_eq!(left_layout.ty, right_layout.ty);
- let left = left.to_bits(left_layout.size)?;
- let right = right.to_bits(right_layout.size)?;
+ assert_eq!(left.layout.ty, right.layout.ty);
+ let left = left.to_bits()?;
+ let right = right.to_bits()?;
self.binary_float_op(bin_op, fty, left, right)
}
_ => {
// Must be integer(-like) types. Don't forget about == on fn pointers.
- assert!(left_layout.ty.is_integral() || left_layout.ty.is_unsafe_ptr() ||
- left_layout.ty.is_fn());
- assert!(right_layout.ty.is_integral() || right_layout.ty.is_unsafe_ptr() ||
- right_layout.ty.is_fn());
+ assert!(left.layout.ty.is_integral() || left.layout.ty.is_unsafe_ptr() ||
+ left.layout.ty.is_fn());
+ assert!(right.layout.ty.is_integral() || right.layout.ty.is_unsafe_ptr() ||
+ right.layout.ty.is_fn());
// Handle operations that support pointer values
- if left.is_ptr() || right.is_ptr() || bin_op == mir::BinOp::Offset {
- return M::ptr_op(self, bin_op, left, left_layout, right, right_layout);
+ if left.to_scalar_ptr()?.is_ptr() ||
+ right.to_scalar_ptr()?.is_ptr() ||
+ bin_op == mir::BinOp::Offset
+ {
+ return M::ptr_op(self, bin_op, left, right);
}
// Everything else only works with "proper" bits
- let left = left.to_bits(left_layout.size).expect("we checked is_ptr");
- let right = right.to_bits(right_layout.size).expect("we checked is_ptr");
- self.binary_int_op(bin_op, left, left_layout, right, right_layout)
+ let l = left.to_bits().expect("we checked is_ptr");
+ let r = right.to_bits().expect("we checked is_ptr");
+ self.binary_int_op(bin_op, l, left.layout, r, right.layout)
}
}
}
pub fn unary_op(
&self,
un_op: mir::UnOp,
- val: Scalar<M::PointerTag>,
- layout: TyLayout<'tcx>,
+ val: ImmTy<'tcx, M::PointerTag>,
) -> EvalResult<'tcx, Scalar<M::PointerTag>> {
use rustc::mir::UnOp::*;
use rustc_apfloat::ieee::{Single, Double};
use rustc_apfloat::Float;
+ let layout = val.layout;
+ let val = val.to_scalar()?;
trace!("Running unary op {:?}: {:?} ({:?})", un_op, val, layout.ty.sty);
match layout.ty.sty {
/// However, it may never be undef.
pub ptr: Scalar<Tag, Id>,
pub align: Align,
- /// Metadata for unsized places. Interpretation is up to the type.
+ /// Metadata for unsized places. Interpretation is up to the type.
/// Must not be present for sized types, but can be missing for unsized types
/// (e.g., `extern type`).
pub meta: Option<Scalar<Tag, Id>>,
}
}
-impl<'tcx, Tag: ::std::fmt::Debug> OpTy<'tcx, Tag> {
+impl<'tcx, Tag: ::std::fmt::Debug + Copy> OpTy<'tcx, Tag> {
#[inline(always)]
pub fn try_as_mplace(self) -> Result<MPlaceTy<'tcx, Tag>, Immediate<Tag>> {
- match self.op {
+ match *self {
Operand::Indirect(mplace) => Ok(MPlaceTy { mplace, layout: self.layout }),
Operand::Immediate(imm) => Err(imm),
}
Deref => self.deref_operand(base.into())?,
Index(local) => {
- let n = *self.frame().locals[local].access()?;
- let n_layout = self.layout_of(self.tcx.types.usize)?;
- let n = self.read_scalar(OpTy { op: n, layout: n_layout })?;
+ let layout = self.layout_of(self.tcx.types.usize)?;
+ let n = self.access_local(self.frame(), local, Some(layout))?;
+ let n = self.read_scalar(n)?;
let n = n.to_bits(self.tcx.data_layout.pointer_size)?;
self.mplace_field(base, u64::try_from(n).unwrap())?
}
})
}
- /// Get the place of a field inside the place, and also the field's type.
+ /// Gets the place of a field inside the place, and also the field's type.
/// Just a convenience function, but used quite a bit.
/// This is the only projection that might have a side-effect: We cannot project
/// into the field of a local `ScalarPair`, we have to first allocate it.
})
}
- /// Project into a place
+ /// Projects into a place.
pub fn place_projection(
&mut self,
base: PlaceTy<'tcx, M::PointerTag>,
})
}
- /// Evaluate statics and promoteds to an `MPlace`. Used to share some code between
+ /// Evaluate statics and promoteds to an `MPlace`. Used to share some code between
/// `eval_place` and `eval_place_to_op`.
pub(super) fn eval_place_to_mplace(
&self,
})
}
- /// Compute a place. You should only use this if you intend to write into this
+ /// Computes a place. You should only use this if you intend to write into this
/// place; for reading, a more efficient alternative is `eval_place_for_read`.
pub fn eval_place(
&mut self,
}
}
- /// Copy the data from an operand to a place. This does not support transmuting!
+ /// Copies the data from an operand to a place. This does not support transmuting!
/// Use `copy_op_transmute` if the layouts could disagree.
#[inline(always)]
pub fn copy_op(
Ok(())
}
- /// Copy the data from an operand to a place. This does not support transmuting!
+ /// Copies the data from an operand to a place. This does not support transmuting!
/// Use `copy_op_transmute` if the layouts could disagree.
/// Also, if you use this you are responsible for validating that things git copied at the
/// right type.
Ok(())
}
- /// Copy the data from an operand to a place. The layouts may disagree, but they must
+ /// Copies the data from an operand to a place. The layouts may disagree, but they must
/// have the same size.
pub fn copy_op_transmute(
&mut self,
Ok(())
}
- /// Make sure that a place is in memory, and return where it is.
+ /// Ensures that a place is in memory, and returns where it is.
/// If the place currently refers to a local that doesn't yet have a matching allocation,
/// create such an allocation.
/// This is essentially `force_to_memplace`.
Ok(())
}
- /// Every place can be read from, so we can turm them into an operand
- #[inline(always)]
- pub fn place_to_op(
- &self,
- place: PlaceTy<'tcx, M::PointerTag>
- ) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>> {
- let op = match place.place {
- Place::Ptr(mplace) => {
- Operand::Indirect(mplace)
- }
- Place::Local { frame, local } =>
- *self.stack[frame].locals[local].access()?
- };
- Ok(OpTy { op, layout: place.layout })
- }
-
pub fn raw_const_to_mplace(
&self,
raw: RawConst<'tcx>,
Ok(())
}
- /// Returns true as long as there are more things to do.
+ /// Returns `true` as long as there are more things to do.
///
/// This is used by [priroda](https://github.com/oli-obk/priroda)
pub fn step(&mut self) -> EvalResult<'tcx, bool> {
UnaryOp(un_op, ref operand) => {
// The operand always has the same type as the result.
let val = self.read_immediate(self.eval_operand(operand, Some(dest.layout))?)?;
- let val = self.unary_op(un_op, val.to_scalar()?, dest.layout)?;
+ let val = self.unary_op(un_op, val)?;
self.write_scalar(val, dest)?;
}
use rustc::mir::interpret::{EvalResult, PointerArithmetic, EvalErrorKind, Scalar};
use super::{
- EvalContext, Machine, Immediate, OpTy, PlaceTy, MPlaceTy, Operand, StackPopCleanup
+ EvalContext, Machine, Immediate, OpTy, ImmTy, PlaceTy, MPlaceTy, StackPopCleanup
};
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
// Compare using binary_op, to also support pointer values
let const_int = Scalar::from_uint(const_int, discr.layout.size);
let (res, _) = self.binary_op(mir::BinOp::Eq,
- discr.to_scalar()?, discr.layout,
- const_int, discr.layout,
+ discr,
+ ImmTy::from_scalar(const_int, discr.layout),
)?;
if res.to_bool()? {
target_block = targets[index];
let mut args = args.to_vec();
let pointee = args[0].layout.ty.builtin_deref(true).unwrap().ty;
let fake_fat_ptr_ty = self.tcx.mk_mut_ptr(pointee);
- args[0].layout = self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?;
- args[0].op = Operand::Immediate(Immediate::Scalar(ptr.ptr.into())); // strip vtable
+ args[0] = OpTy::from(ImmTy { // strip vtable
+ layout: self.layout_of(fake_fat_ptr_ty)?.field(self, 0)?,
+ imm: Immediate::Scalar(ptr.ptr.into())
+ });
trace!("Patched self operand to {:#?}", args[0]);
// recurse with concrete function
self.eval_fn_call(instance, span, caller_abi, &args, dest, ret)
_ => (instance, place),
};
- let arg = OpTy {
- op: Operand::Immediate(place.to_ref()),
+ let arg = ImmTy {
+ imm: place.to_ref(),
layout: self.layout_of(self.tcx.mk_mut_ptr(place.layout.ty))?,
};
instance,
span,
Abi::Rust,
- &[arg],
+ &[arg.into()],
Some(dest.into()),
Some(target),
)
let (ty, poly_trait_ref) = self.tcx.erase_regions(&(ty, poly_trait_ref));
if let Some(&vtable) = self.vtables.get(&(ty, poly_trait_ref)) {
+ // This means we guarantee that there are no duplicate vtables, we will
+ // always use the same vtable for the same (Type, Trait) combination.
+ // That's not what happens in rustc, but emulating per-crate deduplication
+ // does not sound like it actually makes anything any better.
return Ok(Pointer::from(vtable).with_default_tag());
}
Ok(vtable)
}
- /// Return the drop fn instance as well as the actual dynamic type
+ /// Returns the drop fn instance as well as the actual dynamic type
pub fn read_drop_type_from_vtable(
&self,
vtable: Pointer<M::PointerTag>,
}
impl<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>> EvalContext<'a, 'mir, 'tcx, M> {
- /// This function checks the data at `op`. `op` is assumed to cover valid memory if it
+ /// This function checks the data at `op`. `op` is assumed to cover valid memory if it
/// is an indirect operand.
/// It will error if the bits at the destination do not match the ones described by the layout.
///
// that's just more convenient to work with (avoids repeating all the `Machine` bounds).
pub trait Value<'a, 'mir, 'tcx, M: Machine<'a, 'mir, 'tcx>>: Copy
{
- /// Get this value's layout.
+ /// Gets this value's layout.
fn layout(&self) -> TyLayout<'tcx>;
- /// Make this into an `OpTy`.
+ /// Makes this into an `OpTy`.
fn to_op(
self,
ecx: &EvalContext<'a, 'mir, 'tcx, M>,
) -> EvalResult<'tcx, OpTy<'tcx, M::PointerTag>>;
- /// Create this from an `MPlaceTy`.
+ /// Creates this from an `MPlaceTy`.
fn from_mem_place(mplace: MPlaceTy<'tcx, M::PointerTag>) -> Self;
- /// Project to the given enum variant.
+ /// Projects to the given enum variant.
fn project_downcast(
self,
ecx: &EvalContext<'a, 'mir, 'tcx, M>,
variant: VariantIdx,
) -> EvalResult<'tcx, Self>;
- /// Project to the n-th field.
+ /// Projects to the n-th field.
fn project_field(
self,
ecx: &EvalContext<'a, 'mir, 'tcx, M>,
-> &$($mutability)? EvalContext<'a, 'mir, 'tcx, M>;
// Recursive actions, ready to be overloaded.
- /// Visit the given value, dispatching as appropriate to more specialized visitors.
+ /// Visits the given value, dispatching as appropriate to more specialized visitors.
#[inline(always)]
fn visit_value(&mut self, v: Self::V) -> EvalResult<'tcx>
{
self.walk_value(v)
}
- /// Visit the given value as a union. No automatic recursion can happen here.
+ /// Visits the given value as a union. No automatic recursion can happen here.
#[inline(always)]
fn visit_union(&mut self, _v: Self::V) -> EvalResult<'tcx>
{
Ok(())
}
- /// Visit this vale as an aggregate, you are even getting an iterator yielding
+ /// Visits this vale as an aggregate, you are even getting an iterator yielding
/// all the fields (still in an `EvalResult`, you have to do error handling yourself).
/// Recurses into the fields.
#[inline(always)]
self.visit_value(new_val)
}
- /// Called for recursing into the field of a generator. These are not known to be
+ /// Called for recursing into the field of a generator. These are not known to be
/// initialized, so we treat them like unions.
#[inline(always)]
fn visit_generator_field(
fn visit_scalar(&mut self, _v: Self::V, _layout: &layout::Scalar) -> EvalResult<'tcx>
{ Ok(()) }
- /// Called whenever we reach a value of primitive type. There can be no recursion
- /// below such a value. This is the leaf function.
+ /// Called whenever we reach a value of primitive type. There can be no recursion
+ /// below such a value. This is the leaf function.
/// We do *not* provide an `ImmTy` here because some implementations might want
/// to write to the place this primitive lives in.
#[inline(always)]
codegen_fn_attrs.linkage
}
- /// Returns whether this instance is instantiable - whether it has no unsatisfied
+ /// Returns `true` if this instance is instantiable - whether it has no unsatisfied
/// predicates.
///
/// In order to codegen an item, all of its predicates must hold, because
pub use rustc::mir::mono::CodegenUnit;
pub enum PartitioningStrategy {
- /// Generate one codegen unit per source-level module.
+ /// Generates one codegen unit per source-level module.
PerModule,
/// Partition the whole crate into a fixed number of codegen units.
}
}
-/// Build a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`.
+/// Builds a `Clone::clone` shim for `self_ty`. Here, `def_id` is `Clone::clone`.
fn build_clone_shim<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
self_ty: Ty<'tcx>)
}
}
-/// Build a "call" shim for `def_id`. The shim calls the
+/// Builds a "call" shim for `def_id`. The shim calls the
/// function specified by `call_kind`, first adjusting its first
/// argument according to `rcvr_adjustment`.
///
source_info: SourceInfo,
tcx: TyCtxt<'a, 'tcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- /// mark an `unsafe` block as used, so we don't lint it
+ /// Mark an `unsafe` block as used, so we don't lint it.
used_unsafe: FxHashSet<ast::NodeId>,
inherited_blocks: Vec<(ast::NodeId, bool)>,
}
&message);
}
-/// Return the NodeId for an enclosing scope that is also `unsafe`
+/// Returns the `NodeId` for an enclosing scope that is also `unsafe`.
fn is_enclosed(tcx: TyCtxt<'_, '_, '_>,
used_unsafe: &FxHashSet<ast::NodeId>,
id: ast::NodeId) -> Option<(String, ast::NodeId)> {
HasTyCtxt, TargetDataLayout, HasDataLayout,
};
-use crate::interpret::{self, EvalContext, ScalarMaybeUndef, Immediate, OpTy, MemoryKind};
+use crate::interpret::{EvalContext, ScalarMaybeUndef, Immediate, OpTy, ImmTy, MemoryKind};
use crate::const_eval::{
CompileTimeInterpreter, error_to_const_error, eval_promoted, mk_eval_cx,
- lazy_const_to_op,
};
use crate::transform::{MirPass, MirSource};
source_info: SourceInfo,
) -> Option<Const<'tcx>> {
self.ecx.tcx.span = source_info.span;
- match lazy_const_to_op(&self.ecx, *c.literal, c.ty) {
+ match self.ecx.lazy_const_to_op(*c.literal, c.ty) {
Ok(op) => {
Some((op, c.span))
},
Rvalue::Len(_) => None,
Rvalue::NullaryOp(NullOp::SizeOf, ty) => {
type_size_of(self.tcx, self.param_env, ty).and_then(|n| Some((
- OpTy {
- op: interpret::Operand::Immediate(Immediate::Scalar(
+ ImmTy {
+ imm: Immediate::Scalar(
Scalar::Bits {
bits: n as u128,
size: self.tcx.data_layout.pointer_size.bytes() as u8,
}.into()
- )),
+ ),
layout: self.tcx.layout_of(self.param_env.and(self.tcx.types.usize)).ok()?,
- },
+ }.into(),
span,
)))
}
let (arg, _) = self.eval_operand(arg, source_info)?;
let val = self.use_ecx(source_info, |this| {
- let prim = this.ecx.read_scalar(arg)?.not_undef()?;
+ let prim = this.ecx.read_immediate(arg)?;
match op {
UnOp::Neg => {
// Need to do overflow check here: For actual CTFE, MIR
// generation emits code that does this before calling the op.
- let size = arg.layout.size;
- if prim.to_bits(size)? == (1 << (size.bits() - 1)) {
+ if prim.to_bits()? == (1 << (prim.layout.size.bits() - 1)) {
return err!(OverflowNeg);
}
}
}
}
// Now run the actual operation.
- this.ecx.unary_op(op, prim, arg.layout)
+ this.ecx.unary_op(op, prim)
})?;
- let res = OpTy {
- op: interpret::Operand::Immediate(Immediate::Scalar(val.into())),
+ let res = ImmTy {
+ imm: Immediate::Scalar(val.into()),
layout: place_layout,
};
- Some((res, span))
+ Some((res.into(), span))
}
Rvalue::CheckedBinaryOp(op, ref left, ref right) |
Rvalue::BinaryOp(op, ref left, ref right) => {
})?;
trace!("const evaluating {:?} for {:?} and {:?}", op, left, right);
let (val, overflow) = self.use_ecx(source_info, |this| {
- this.ecx.binary_op_imm(op, l, r)
+ this.ecx.binary_op(op, l, r)
})?;
let val = if let Rvalue::CheckedBinaryOp(..) = *rvalue {
Immediate::ScalarPair(
}
Immediate::Scalar(val.into())
};
- let res = OpTy {
- op: interpret::Operand::Immediate(val),
+ let res = ImmTy {
+ imm: val,
layout: place_layout,
};
- Some((res, span))
+ Some((res.into(), span))
},
}
}
}
}
-/// Return the set of basic blocks whose unwind edges are known
+/// Returns the set of basic blocks whose unwind edges are known
/// to not be reachable, because they are `drop` terminators
/// that can't drop anything.
fn find_dead_unwinds<'a, 'tcx>(
//! This pass erases all early-bound regions from the types occurring in the MIR.
//! We want to do this once just before codegen, so codegen does not have to take
//! care erasing regions all over the place.
-//! NOTE: We do NOT erase regions of statements that are relevant for
-//! "types-as-contracts"-validation, namely, AcquireValid, ReleaseValid
+//! N.B., we do _not_ erase regions of statements that are relevant for
+//! "types-as-contracts"-validation, namely, `AcquireValid` and `ReleaseValid`.
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
tcx.mir_keys(def_id.krate).contains(&def_id)
}
-/// Finds the full set of def-ids within the current crate that have
+/// Finds the full set of `DefId`s within the current crate that have
/// MIR associated with them.
fn mir_keys<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, krate: CrateNum)
-> Lrc<DefIdSet> {
});
}
- /// Copy the initialization of this temp to the
+ /// Copies the initialization of this temp to the
/// promoted MIR, recursing through temps.
fn promote_temp(&mut self, temp: Local) -> Local {
let old_keep_original = self.keep_original;
use rustc::middle::lang_items;
use rustc::session::config::nightly_options;
use syntax::ast::LitKind;
-use syntax::feature_gate::{UnstableFeatures, emit_feature_err, GateIssue};
+use syntax::feature_gate::{emit_feature_err, GateIssue};
use syntax_pos::{Span, DUMMY_SP};
use std::fmt;
+use std::ops::{Deref, Index, IndexMut};
use std::usize;
use crate::transform::{MirPass, MirSource};
use super::promote_consts::{self, Candidate, TempState};
-bitflags::bitflags! {
- // Borrows of temporaries can be promoted only if
- // they have none of these qualifications, with
- // the exception of `STATIC_REF` (in statics only).
- struct Qualif: u8 {
- // Constant containing interior mutability (UnsafeCell).
- const MUTABLE_INTERIOR = 1 << 0;
-
- // Constant containing an ADT that implements Drop.
- const NEEDS_DROP = 1 << 1;
-
- // Function argument.
- const FN_ARGUMENT = 1 << 2;
-
- // Not constant at all - non-`const fn` calls, asm!,
- // pointer comparisons, ptr-to-int casts, etc.
- const NOT_CONST = 1 << 3;
-
- // Refers to temporaries which cannot be promoted as
- // promote_consts decided they weren't simple enough.
- const NOT_PROMOTABLE = 1 << 4;
-
- // Const items can only have MUTABLE_INTERIOR
- // and NOT_PROMOTABLE without producing an error.
- const CONST_ERROR = !Qualif::MUTABLE_INTERIOR.bits &
- !Qualif::NOT_PROMOTABLE.bits;
- }
-}
-
-impl<'a, 'tcx> Qualif {
- /// Remove flags which are impossible for the given type.
- fn restrict(&mut self, ty: Ty<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>) {
- if ty.is_freeze(tcx, param_env, DUMMY_SP) {
- *self = *self - Qualif::MUTABLE_INTERIOR;
- }
- if !ty.needs_drop(tcx, param_env) {
- *self = *self - Qualif::NEEDS_DROP;
- }
- }
-}
-
/// What kind of item we are in.
#[derive(Copy, Clone, Debug, PartialEq, Eq)]
enum Mode {
}
}
-struct Qualifier<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
+const QUALIF_COUNT: usize = 4;
+
+// FIXME(eddyb) once we can use const generics, replace this array with
+// something like `IndexVec` but for fixed-size arrays (`IndexArray`?).
+#[derive(Copy, Clone, Default)]
+struct PerQualif<T>([T; QUALIF_COUNT]);
+
+impl<T: Clone> PerQualif<T> {
+ fn new(x: T) -> Self {
+ PerQualif([x.clone(), x.clone(), x.clone(), x])
+ }
+}
+
+impl<T> PerQualif<T> {
+ fn as_mut(&mut self) -> PerQualif<&mut T> {
+ let [x0, x1, x2, x3] = &mut self.0;
+ PerQualif([x0, x1, x2, x3])
+ }
+
+ fn zip<U>(self, other: PerQualif<U>) -> PerQualif<(T, U)> {
+ let [x0, x1, x2, x3] = self.0;
+ let [y0, y1, y2, y3] = other.0;
+ PerQualif([(x0, y0), (x1, y1), (x2, y2), (x3, y3)])
+ }
+}
+
+impl PerQualif<bool> {
+ fn encode_to_bits(self) -> u8 {
+ self.0.iter().enumerate().fold(0, |bits, (i, &qualif)| {
+ bits | ((qualif as u8) << i)
+ })
+ }
+
+ fn decode_from_bits(bits: u8) -> Self {
+ let mut qualifs = Self::default();
+ for (i, qualif) in qualifs.0.iter_mut().enumerate() {
+ *qualif = (bits & (1 << i)) != 0;
+ }
+ qualifs
+ }
+}
+
+impl<Q: Qualif, T> Index<Q> for PerQualif<T> {
+ type Output = T;
+
+ fn index(&self, _: Q) -> &T {
+ &self.0[Q::IDX]
+ }
+}
+
+impl<Q: Qualif, T> IndexMut<Q> for PerQualif<T> {
+ fn index_mut(&mut self, _: Q) -> &mut T {
+ &mut self.0[Q::IDX]
+ }
+}
+
+struct ConstCx<'a, 'tcx> {
+ tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ param_env: ty::ParamEnv<'tcx>,
mode: Mode,
+ mir: &'a Mir<'tcx>,
+
+ per_local: PerQualif<BitSet<Local>>,
+}
+
+impl<'a, 'tcx> ConstCx<'a, 'tcx> {
+ fn is_const_panic_fn(&self, def_id: DefId) -> bool {
+ Some(def_id) == self.tcx.lang_items().panic_fn() ||
+ Some(def_id) == self.tcx.lang_items().begin_panic_fn()
+ }
+}
+
+#[derive(Copy, Clone, Debug)]
+enum ValueSource<'a, 'tcx> {
+ Rvalue(&'a Rvalue<'tcx>),
+ Call {
+ callee: &'a Operand<'tcx>,
+ args: &'a [Operand<'tcx>],
+ return_ty: Ty<'tcx>,
+ },
+}
+
+trait Qualif {
+ const IDX: usize;
+
+ /// Return the qualification that is (conservatively) correct for any value
+ /// of the type, or `None` if the qualification is not value/type-based.
+ fn in_any_value_of_ty(_cx: &ConstCx<'_, 'tcx>, _ty: Ty<'tcx>) -> Option<bool> {
+ None
+ }
+
+ /// Return a mask for the qualification, given a type. This is `false` iff
+ /// no value of that type can have the qualification.
+ fn mask_for_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> bool {
+ Self::in_any_value_of_ty(cx, ty).unwrap_or(true)
+ }
+
+ fn in_local(cx: &ConstCx<'_, '_>, local: Local) -> bool {
+ cx.per_local.0[Self::IDX].contains(local)
+ }
+
+ fn in_static(_cx: &ConstCx<'_, 'tcx>, _static: &Static<'tcx>) -> bool {
+ // FIXME(eddyb) should we do anything here for value properties?
+ false
+ }
+
+ fn in_projection_structurally(
+ cx: &ConstCx<'_, 'tcx>,
+ proj: &PlaceProjection<'tcx>,
+ ) -> bool {
+ let base_qualif = Self::in_place(cx, &proj.base);
+ let qualif = base_qualif && Self::mask_for_ty(
+ cx,
+ proj.base.ty(cx.mir, cx.tcx)
+ .projection_ty(cx.tcx, &proj.elem)
+ .to_ty(cx.tcx),
+ );
+ match proj.elem {
+ ProjectionElem::Deref |
+ ProjectionElem::Subslice { .. } |
+ ProjectionElem::Field(..) |
+ ProjectionElem::ConstantIndex { .. } |
+ ProjectionElem::Downcast(..) => qualif,
+
+ ProjectionElem::Index(local) => qualif || Self::in_local(cx, local),
+ }
+ }
+
+ fn in_projection(cx: &ConstCx<'_, 'tcx>, proj: &PlaceProjection<'tcx>) -> bool {
+ Self::in_projection_structurally(cx, proj)
+ }
+
+ fn in_place(cx: &ConstCx<'_, 'tcx>, place: &Place<'tcx>) -> bool {
+ match *place {
+ Place::Local(local) => Self::in_local(cx, local),
+ Place::Promoted(_) => bug!("qualifying already promoted MIR"),
+ Place::Static(ref static_) => Self::in_static(cx, static_),
+ Place::Projection(ref proj) => Self::in_projection(cx, proj),
+ }
+ }
+
+ fn in_operand(cx: &ConstCx<'_, 'tcx>, operand: &Operand<'tcx>) -> bool {
+ match *operand {
+ Operand::Copy(ref place) |
+ Operand::Move(ref place) => Self::in_place(cx, place),
+
+ Operand::Constant(ref constant) => {
+ if let ty::LazyConst::Unevaluated(def_id, _) = constant.literal {
+ // Don't peek inside trait associated constants.
+ if cx.tcx.trait_of_item(*def_id).is_some() {
+ Self::in_any_value_of_ty(cx, constant.ty).unwrap_or(false)
+ } else {
+ let (bits, _) = cx.tcx.at(constant.span).mir_const_qualif(*def_id);
+
+ let qualif = PerQualif::decode_from_bits(bits).0[Self::IDX];
+
+ // Just in case the type is more specific than
+ // the definition, e.g., impl associated const
+ // with type parameters, take it into account.
+ qualif && Self::mask_for_ty(cx, constant.ty)
+ }
+ } else {
+ false
+ }
+ }
+ }
+ }
+
+ fn in_rvalue_structurally(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+ match *rvalue {
+ Rvalue::NullaryOp(..) => false,
+
+ Rvalue::Discriminant(ref place) |
+ Rvalue::Len(ref place) => Self::in_place(cx, place),
+
+ Rvalue::Use(ref operand) |
+ Rvalue::Repeat(ref operand, _) |
+ Rvalue::UnaryOp(_, ref operand) |
+ Rvalue::Cast(_, ref operand, _) => Self::in_operand(cx, operand),
+
+ Rvalue::BinaryOp(_, ref lhs, ref rhs) |
+ Rvalue::CheckedBinaryOp(_, ref lhs, ref rhs) => {
+ Self::in_operand(cx, lhs) || Self::in_operand(cx, rhs)
+ }
+
+ Rvalue::Ref(_, _, ref place) => {
+ // Special-case reborrows to be more like a copy of the reference.
+ if let Place::Projection(ref proj) = *place {
+ if let ProjectionElem::Deref = proj.elem {
+ let base_ty = proj.base.ty(cx.mir, cx.tcx).to_ty(cx.tcx);
+ if let ty::Ref(..) = base_ty.sty {
+ return Self::in_place(cx, &proj.base);
+ }
+ }
+ }
+
+ Self::in_place(cx, place)
+ }
+
+ Rvalue::Aggregate(_, ref operands) => {
+ operands.iter().any(|o| Self::in_operand(cx, o))
+ }
+ }
+ }
+
+ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+ Self::in_rvalue_structurally(cx, rvalue)
+ }
+
+ fn in_call(
+ cx: &ConstCx<'_, 'tcx>,
+ _callee: &Operand<'tcx>,
+ _args: &[Operand<'tcx>],
+ return_ty: Ty<'tcx>,
+ ) -> bool {
+ // Be conservative about the returned value of a const fn.
+ Self::in_any_value_of_ty(cx, return_ty).unwrap_or(false)
+ }
+
+ fn in_value(cx: &ConstCx<'_, 'tcx>, source: ValueSource<'_, 'tcx>) -> bool {
+ match source {
+ ValueSource::Rvalue(rvalue) => Self::in_rvalue(cx, rvalue),
+ ValueSource::Call { callee, args, return_ty } => {
+ Self::in_call(cx, callee, args, return_ty)
+ }
+ }
+ }
+}
+
+// Constant containing interior mutability (UnsafeCell).
+struct HasMutInterior;
+
+impl Qualif for HasMutInterior {
+ const IDX: usize = 0;
+
+ fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> {
+ Some(!ty.is_freeze(cx.tcx, cx.param_env, DUMMY_SP))
+ }
+
+ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+ match *rvalue {
+ // Returning `true` for `Rvalue::Ref` indicates the borrow isn't
+ // allowed in constants (and the `Checker` will error), and/or it
+ // won't be promoted, due to `&mut ...` or interior mutability.
+ Rvalue::Ref(_, kind, ref place) => {
+ let ty = place.ty(cx.mir, cx.tcx).to_ty(cx.tcx);
+
+ if let BorrowKind::Mut { .. } = kind {
+ // In theory, any zero-sized value could be borrowed
+ // mutably without consequences. However, only &mut []
+ // is allowed right now, and only in functions.
+ if cx.mode == Mode::StaticMut {
+ // Inside a `static mut`, &mut [...] is also allowed.
+ match ty.sty {
+ ty::Array(..) | ty::Slice(_) => {}
+ _ => return true,
+ }
+ } else if let ty::Array(_, len) = ty.sty {
+ // FIXME(eddyb) the `cx.mode == Mode::Fn` condition
+ // seems unnecessary, given that this is merely a ZST.
+ if !(len.unwrap_usize(cx.tcx) == 0 && cx.mode == Mode::Fn) {
+ return true;
+ }
+ } else {
+ return true;
+ }
+ }
+ }
+
+ Rvalue::Aggregate(ref kind, _) => {
+ if let AggregateKind::Adt(def, ..) = **kind {
+ if Some(def.did) == cx.tcx.lang_items().unsafe_cell_type() {
+ let ty = rvalue.ty(cx.mir, cx.tcx);
+ assert_eq!(Self::in_any_value_of_ty(cx, ty), Some(true));
+ return true;
+ }
+ }
+ }
+
+ _ => {}
+ }
+
+ Self::in_rvalue_structurally(cx, rvalue)
+ }
+}
+
+// Constant containing an ADT that implements Drop.
+struct NeedsDrop;
+
+impl Qualif for NeedsDrop {
+ const IDX: usize = 1;
+
+ fn in_any_value_of_ty(cx: &ConstCx<'_, 'tcx>, ty: Ty<'tcx>) -> Option<bool> {
+ Some(ty.needs_drop(cx.tcx, cx.param_env))
+ }
+
+ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+ if let Rvalue::Aggregate(ref kind, _) = *rvalue {
+ if let AggregateKind::Adt(def, ..) = **kind {
+ if def.has_dtor(cx.tcx) {
+ return true;
+ }
+ }
+ }
+
+ Self::in_rvalue_structurally(cx, rvalue)
+ }
+}
+
+// Not constant at all - non-`const fn` calls, asm!,
+// pointer comparisons, ptr-to-int casts, etc.
+struct IsNotConst;
+
+impl Qualif for IsNotConst {
+ const IDX: usize = 2;
+
+ fn in_static(cx: &ConstCx<'_, 'tcx>, static_: &Static<'tcx>) -> bool {
+ // Only allow statics (not consts) to refer to other statics.
+ let allowed = cx.mode == Mode::Static || cx.mode == Mode::StaticMut;
+
+ !allowed ||
+ cx.tcx.get_attrs(static_.def_id).iter().any(|attr| attr.check_name("thread_local"))
+ }
+
+ fn in_projection(cx: &ConstCx<'_, 'tcx>, proj: &PlaceProjection<'tcx>) -> bool {
+ match proj.elem {
+ ProjectionElem::Deref |
+ ProjectionElem::Downcast(..) => return true,
+
+ ProjectionElem::ConstantIndex {..} |
+ ProjectionElem::Subslice {..} |
+ ProjectionElem::Index(_) => {}
+
+ ProjectionElem::Field(..) => {
+ if cx.mode == Mode::Fn {
+ let base_ty = proj.base.ty(cx.mir, cx.tcx).to_ty(cx.tcx);
+ if let Some(def) = base_ty.ty_adt_def() {
+ if def.is_union() {
+ return true;
+ }
+ }
+ }
+ }
+ }
+
+ Self::in_projection_structurally(cx, proj)
+ }
+
+ fn in_rvalue(cx: &ConstCx<'_, 'tcx>, rvalue: &Rvalue<'tcx>) -> bool {
+ match *rvalue {
+ Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) if cx.mode == Mode::Fn => {
+ let operand_ty = operand.ty(cx.mir, cx.tcx);
+ let cast_in = CastTy::from_ty(operand_ty).expect("bad input type for cast");
+ let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
+ match (cast_in, cast_out) {
+ (CastTy::Ptr(_), CastTy::Int(_)) |
+ (CastTy::FnPtr, CastTy::Int(_)) => {
+ // in normal functions, mark such casts as not promotable
+ return true;
+ }
+ _ => {}
+ }
+ }
+
+ Rvalue::BinaryOp(op, ref lhs, _) if cx.mode == Mode::Fn => {
+ if let ty::RawPtr(_) | ty::FnPtr(..) = lhs.ty(cx.mir, cx.tcx).sty {
+ assert!(op == BinOp::Eq || op == BinOp::Ne ||
+ op == BinOp::Le || op == BinOp::Lt ||
+ op == BinOp::Ge || op == BinOp::Gt ||
+ op == BinOp::Offset);
+
+ // raw pointer operations are not allowed inside promoteds
+ return true;
+ }
+ }
+
+ Rvalue::NullaryOp(NullOp::Box, _) => return true,
+
+ _ => {}
+ }
+
+ Self::in_rvalue_structurally(cx, rvalue)
+ }
+
+ fn in_call(
+ cx: &ConstCx<'_, 'tcx>,
+ callee: &Operand<'tcx>,
+ args: &[Operand<'tcx>],
+ _return_ty: Ty<'tcx>,
+ ) -> bool {
+ let fn_ty = callee.ty(cx.mir, cx.tcx);
+ match fn_ty.sty {
+ ty::FnDef(def_id, _) => {
+ match cx.tcx.fn_sig(def_id).abi() {
+ Abi::RustIntrinsic |
+ Abi::PlatformIntrinsic => {
+ assert!(!cx.tcx.is_const_fn(def_id));
+ match &cx.tcx.item_name(def_id).as_str()[..] {
+ | "size_of"
+ | "min_align_of"
+ | "needs_drop"
+ | "type_id"
+ | "bswap"
+ | "bitreverse"
+ | "ctpop"
+ | "cttz"
+ | "cttz_nonzero"
+ | "ctlz"
+ | "ctlz_nonzero"
+ | "overflowing_add"
+ | "overflowing_sub"
+ | "overflowing_mul"
+ | "unchecked_shl"
+ | "unchecked_shr"
+ | "rotate_left"
+ | "rotate_right"
+ | "add_with_overflow"
+ | "sub_with_overflow"
+ | "mul_with_overflow"
+ | "saturating_add"
+ | "saturating_sub"
+ | "transmute"
+ => return true,
+
+ _ => {}
+ }
+ }
+ _ => {
+ let is_const_fn =
+ cx.tcx.is_const_fn(def_id) ||
+ cx.tcx.is_unstable_const_fn(def_id).is_some() ||
+ cx.is_const_panic_fn(def_id);
+ if !is_const_fn {
+ return true;
+ }
+ }
+ }
+ }
+ _ => return true,
+ }
+
+ Self::in_operand(cx, callee) || args.iter().any(|arg| Self::in_operand(cx, arg))
+ }
+}
+
+// Refers to temporaries which cannot be promoted as
+// promote_consts decided they weren't simple enough.
+struct IsNotPromotable;
+
+impl Qualif for IsNotPromotable {
+ const IDX: usize = 3;
+
+ fn in_call(
+ cx: &ConstCx<'_, 'tcx>,
+ callee: &Operand<'tcx>,
+ _args: &[Operand<'tcx>],
+ _return_ty: Ty<'tcx>,
+ ) -> bool {
+ if cx.mode == Mode::Fn {
+ if let ty::FnDef(def_id, _) = callee.ty(cx.mir, cx.tcx).sty {
+ // Never promote runtime `const fn` calls of
+ // functions without `#[rustc_promotable]`.
+ if !cx.tcx.is_promotable_const_fn(def_id) {
+ return true;
+ }
+ }
+ }
+
+ // FIXME(eddyb) do we need "not promotable" in anything
+ // other than `Mode::Fn` by any chance?
+
+ false
+ }
+}
+
+// Ensure the `IDX` values are sequential (`0..QUALIF_COUNT`).
+macro_rules! static_assert_seq_qualifs {
+ ($i:expr => $first:ident $(, $rest:ident)*) => {
+ static_assert!(SEQ_QUALIFS: {
+ static_assert_seq_qualifs!($i + 1 => $($rest),*);
+
+ $first::IDX == $i
+ });
+ };
+ ($i:expr =>) => {
+ static_assert!(SEQ_QUALIFS: QUALIF_COUNT == $i);
+ };
+}
+static_assert_seq_qualifs!(0 => HasMutInterior, NeedsDrop, IsNotConst, IsNotPromotable);
+
+impl ConstCx<'_, 'tcx> {
+ fn qualifs_in_any_value_of_ty(&self, ty: Ty<'tcx>) -> PerQualif<bool> {
+ let mut qualifs = PerQualif::default();
+ qualifs[HasMutInterior] = HasMutInterior::in_any_value_of_ty(self, ty).unwrap_or(false);
+ qualifs[NeedsDrop] = NeedsDrop::in_any_value_of_ty(self, ty).unwrap_or(false);
+ qualifs[IsNotConst] = IsNotConst::in_any_value_of_ty(self, ty).unwrap_or(false);
+ qualifs[IsNotPromotable] = IsNotPromotable::in_any_value_of_ty(self, ty).unwrap_or(false);
+ qualifs
+ }
+
+ fn qualifs_in_local(&self, local: Local) -> PerQualif<bool> {
+ let mut qualifs = PerQualif::default();
+ qualifs[HasMutInterior] = HasMutInterior::in_local(self, local);
+ qualifs[NeedsDrop] = NeedsDrop::in_local(self, local);
+ qualifs[IsNotConst] = IsNotConst::in_local(self, local);
+ qualifs[IsNotPromotable] = IsNotPromotable::in_local(self, local);
+ qualifs
+ }
+
+ fn qualifs_in_value(&self, source: ValueSource<'_, 'tcx>) -> PerQualif<bool> {
+ let mut qualifs = PerQualif::default();
+ qualifs[HasMutInterior] = HasMutInterior::in_value(self, source);
+ qualifs[NeedsDrop] = NeedsDrop::in_value(self, source);
+ qualifs[IsNotConst] = IsNotConst::in_value(self, source);
+ qualifs[IsNotPromotable] = IsNotPromotable::in_value(self, source);
+ qualifs
+ }
+}
+
+struct Checker<'a, 'tcx> {
+ cx: ConstCx<'a, 'tcx>,
+
span: Span,
def_id: DefId,
- mir: &'a Mir<'tcx>,
rpo: ReversePostorder<'a, 'tcx>,
- tcx: TyCtxt<'a, 'gcx, 'tcx>,
- param_env: ty::ParamEnv<'tcx>,
- local_qualif: IndexVec<Local, Option<Qualif>>,
- qualif: Qualif,
+
temp_promotion_state: IndexVec<Local, TempState>,
- promotion_candidates: Vec<Candidate>
+ promotion_candidates: Vec<Candidate>,
}
macro_rules! unleash_miri {
}}
}
-impl<'a, 'tcx> Qualifier<'a, 'tcx, 'tcx> {
+impl Deref for Checker<'a, 'tcx> {
+ type Target = ConstCx<'a, 'tcx>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.cx
+ }
+}
+
+impl<'a, 'tcx> Checker<'a, 'tcx> {
fn new(tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
mir: &'a Mir<'tcx>,
mode: Mode)
- -> Qualifier<'a, 'tcx, 'tcx> {
+ -> Self {
assert!(def_id.is_local());
let mut rpo = traversal::reverse_postorder(mir);
let temps = promote_consts::collect_temps(mir, &mut rpo);
let param_env = tcx.param_env(def_id);
- let mut local_qualif = IndexVec::from_elem(None, &mir.local_decls);
- for arg in mir.args_iter() {
- let mut qualif = Qualif::NEEDS_DROP;
- qualif.restrict(mir.local_decls[arg].ty, tcx, param_env);
- local_qualif[arg] = Some(qualif);
+ let mut cx = ConstCx {
+ tcx,
+ param_env,
+ mode,
+ mir,
+ per_local: PerQualif::new(BitSet::new_empty(mir.local_decls.len())),
+ };
+
+ for (local, decl) in mir.local_decls.iter_enumerated() {
+ match mir.local_kind(local) {
+ LocalKind::Arg => {
+ let qualifs = cx.qualifs_in_any_value_of_ty(decl.ty);
+ for (per_local, qualif) in &mut cx.per_local.as_mut().zip(qualifs).0 {
+ if *qualif {
+ per_local.insert(local);
+ }
+ }
+ cx.per_local[IsNotPromotable].insert(local);
+ }
+
+ LocalKind::Var if mode == Mode::Fn => {
+ cx.per_local[IsNotConst].insert(local);
+ }
+
+ LocalKind::Temp if !temps[local].is_promotable() => {
+ cx.per_local[IsNotPromotable].insert(local);
+ }
+
+ _ => {}
+ }
}
- Qualifier {
- mode,
+ Checker {
+ cx,
span: mir.span,
def_id,
- mir,
rpo,
- tcx,
- param_env,
- local_qualif,
- qualif: Qualif::empty(),
temp_promotion_state: temps,
promotion_candidates: vec![]
}
// slightly pointless (even with feature-gating).
fn not_const(&mut self) {
unleash_miri!(self);
- self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(
self.tcx.sess,
}
}
- /// Add the given qualification to self.qualif.
- fn add(&mut self, qualif: Qualif) {
- self.qualif = self.qualif | qualif;
- }
+ /// Assigns an rvalue/call qualification to the given destination.
+ fn assign(&mut self, dest: &Place<'tcx>, source: ValueSource<'_, 'tcx>, location: Location) {
+ trace!("assign: {:?} <- {:?}", dest, source);
- /// Add the given type's qualification to self.qualif.
- fn add_type(&mut self, ty: Ty<'tcx>) {
- self.add(Qualif::MUTABLE_INTERIOR | Qualif::NEEDS_DROP);
- self.qualif.restrict(ty, self.tcx, self.param_env);
- }
+ let mut qualifs = self.qualifs_in_value(source);
- /// Within the provided closure, self.qualif will start
- /// out empty, and its value after the closure returns will
- /// be combined with the value before the call to nest.
- fn nest<F: FnOnce(&mut Self)>(&mut self, f: F) {
- let original = self.qualif;
- self.qualif = Qualif::empty();
- f(self);
- self.add(original);
- }
+ if let ValueSource::Rvalue(&Rvalue::Ref(_, kind, ref place)) = source {
+ // Getting `true` from `HasMutInterior::in_rvalue` means
+ // the borrowed place is disallowed from being borrowed,
+ // due to either a mutable borrow (with some exceptions),
+ // or an shared borrow of a value with interior mutability.
+ // Then `HasMutInterior` is replaced with `IsNotConst`,
+ // to avoid duplicate errors (e.g. from reborrowing).
+ if qualifs[HasMutInterior] {
+ qualifs[HasMutInterior] = false;
+ qualifs[IsNotConst] = true;
- /// Assign the current qualification to the given destination.
- fn assign(&mut self, dest: &Place<'tcx>, location: Location) {
- trace!("assign: {:?}", dest);
- let qualif = self.qualif;
- let span = self.span;
- let store = |slot: &mut Option<Qualif>| {
- if slot.is_some() {
- span_bug!(span, "multiple assignments to {:?}", dest);
- }
- *slot = Some(qualif);
- };
-
- // Only handle promotable temps in non-const functions.
- if self.mode == Mode::Fn {
- if let Place::Local(index) = *dest {
- if self.mir.local_kind(index) == LocalKind::Temp
- && self.temp_promotion_state[index].is_promotable() {
- debug!("store to promotable temp {:?} ({:?})", index, qualif);
- store(&mut self.local_qualif[index]);
+ if self.mode != Mode::Fn {
+ if let BorrowKind::Mut { .. } = kind {
+ let mut err = struct_span_err!(self.tcx.sess, self.span, E0017,
+ "references in {}s may only refer \
+ to immutable values", self.mode);
+ err.span_label(self.span, format!("{}s require immutable values",
+ self.mode));
+ if self.tcx.sess.teach(&err.get_code().unwrap()) {
+ err.note("References in statics and constants may only refer to \
+ immutable values.\n\n\
+ Statics are shared everywhere, and if they refer to \
+ mutable data one might violate memory safety since \
+ holding multiple mutable references to shared data is \
+ not allowed.\n\n\
+ If you really want global mutable state, try using \
+ static mut or a global UnsafeCell.");
+ }
+ err.emit();
+ } else {
+ span_err!(self.tcx.sess, self.span, E0492,
+ "cannot borrow a constant which may contain \
+ interior mutability, create a static instead");
+ }
+ }
+ } else {
+ // We might have a candidate for promotion.
+ let candidate = Candidate::Ref(location);
+ // We can only promote interior borrows of promotable temps.
+ let mut place = place;
+ while let Place::Projection(ref proj) = *place {
+ if proj.elem == ProjectionElem::Deref {
+ break;
+ }
+ place = &proj.base;
+ }
+ debug!("qualify_consts: promotion candidate: place={:?}", place);
+ if let Place::Local(local) = *place {
+ if self.mir.local_kind(local) == LocalKind::Temp {
+ debug!("qualify_consts: promotion candidate: local={:?}", local);
+ // The borrowed place doesn't have `HasMutInterior`
+ // (from `in_rvalue`), so we can safely ignore
+ // `HasMutInterior` from the local's qualifications.
+ // This allows borrowing fields which don't have
+ // `HasMutInterior`, from a type that does, e.g.:
+ // `let _: &'static _ = &(Cell::new(1), 2).1;`
+ let mut local_qualifs = self.qualifs_in_local(local);
+ local_qualifs[HasMutInterior] = false;
+ if !local_qualifs.0.iter().any(|&qualif| qualif) {
+ debug!("qualify_consts: promotion candidate: {:?}", candidate);
+ self.promotion_candidates.push(candidate);
+ }
+ }
}
}
- return;
}
let mut dest = dest;
}
}
};
- debug!("store to var {:?}", index);
- match &mut self.local_qualif[index] {
- // this is overly restrictive, because even full assignments do not clear the qualif
- // While we could special case full assignments, this would be inconsistent with
- // aggregates where we overwrite all fields via assignments, which would not get
- // that feature.
- Some(ref mut qualif) => *qualif = *qualif | self.qualif,
- // insert new qualification
- qualif @ None => *qualif = Some(self.qualif),
+
+ let kind = self.mir.local_kind(index);
+ debug!("store to {:?} {:?}", kind, index);
+
+ // Only handle promotable temps in non-const functions.
+ if self.mode == Mode::Fn {
+ if kind != LocalKind::Temp ||
+ !self.temp_promotion_state[index].is_promotable() {
+ return;
+ }
+ }
+
+ // this is overly restrictive, because even full assignments do not clear the qualif
+ // While we could special case full assignments, this would be inconsistent with
+ // aggregates where we overwrite all fields via assignments, which would not get
+ // that feature.
+ for (per_local, qualif) in &mut self.cx.per_local.as_mut().zip(qualifs).0 {
+ if *qualif {
+ per_local.insert(index);
+ }
+ }
+
+ // Ensure the `IsNotPromotable` qualification is preserved.
+ // NOTE(eddyb) this is actually unnecessary right now, as
+ // we never replace the local's qualif, but we might in
+ // the future, and so it serves to catch changes that unset
+ // important bits (in which case, asserting `contains` could
+ // be replaced with calling `insert` to re-set the bit).
+ if kind == LocalKind::Temp {
+ if !self.temp_promotion_state[index].is_promotable() {
+ assert!(self.cx.per_local[IsNotPromotable].contains(index));
+ }
}
}
- /// Qualify a whole const, static initializer or const fn.
- fn qualify_const(&mut self) -> (Qualif, Lrc<BitSet<Local>>) {
- debug!("qualifying {} {:?}", self.mode, self.def_id);
+ /// Check a whole const, static initializer or const fn.
+ fn check_const(&mut self) -> (u8, Lrc<BitSet<Local>>) {
+ debug!("const-checking {} {:?}", self.mode, self.def_id);
let mir = self.mir;
}
}
- self.qualif = self.local_qualif[RETURN_PLACE].unwrap_or(Qualif::NOT_CONST);
-
- // Account for errors in consts by using the
- // conservative type qualification instead.
- if self.qualif.intersects(Qualif::CONST_ERROR) {
- self.qualif = Qualif::empty();
- let return_ty = mir.return_ty();
- self.add_type(return_ty);
- }
-
// Collect all the temps we need to promote.
let mut promoted_temps = BitSet::new_empty(self.temp_promotion_state.len());
}
}
- (self.qualif, Lrc::new(promoted_temps))
- }
-
- fn is_const_panic_fn(&self, def_id: DefId) -> bool {
- Some(def_id) == self.tcx.lang_items().panic_fn() ||
- Some(def_id) == self.tcx.lang_items().begin_panic_fn()
- }
-}
-
-/// Accumulates an Rvalue or Call's effects in self.qualif.
-/// For functions (constant or not), it also records
-/// candidates for promotion in promotion_candidates.
-impl<'a, 'tcx> Visitor<'tcx> for Qualifier<'a, 'tcx, 'tcx> {
- fn visit_local(&mut self,
- &local: &Local,
- _: PlaceContext<'tcx>,
- _: Location) {
- debug!("visit_local: local={:?}", local);
- let kind = self.mir.local_kind(local);
- match kind {
- LocalKind::ReturnPointer => {
- self.not_const();
- }
- LocalKind::Var if self.mode == Mode::Fn => {
- self.add(Qualif::NOT_CONST);
- }
- LocalKind::Var |
- LocalKind::Arg |
- LocalKind::Temp => {
- if let LocalKind::Arg = kind {
- self.add(Qualif::FN_ARGUMENT);
- }
+ let promoted_temps = Lrc::new(promoted_temps);
- if !self.temp_promotion_state[local].is_promotable() {
- debug!("visit_local: (not promotable) local={:?}", local);
- self.add(Qualif::NOT_PROMOTABLE);
- }
+ let mut qualifs = self.qualifs_in_local(RETURN_PLACE);
- if let Some(qualif) = self.local_qualif[local] {
- self.add(qualif);
- } else {
- self.not_const();
- }
- }
+ // Account for errors in consts by using the
+ // conservative type qualification instead.
+ if qualifs[IsNotConst] {
+ qualifs = self.qualifs_in_any_value_of_ty(mir.return_ty());
}
+
+ (qualifs.encode_to_bits(), promoted_temps)
}
+}
+/// Checks MIR for const-correctness, using `ConstCx`
+/// for value qualifications, and accumulates writes of
+/// rvalue/call results to locals, in `local_qualif`.
+/// For functions (constant or not), it also records
+/// candidates for promotion in `promotion_candidates`.
+impl<'a, 'tcx> Visitor<'tcx> for Checker<'a, 'tcx> {
fn visit_place(&mut self,
place: &Place<'tcx>,
context: PlaceContext<'tcx>,
location: Location) {
debug!("visit_place: place={:?} context={:?} location={:?}", place, context, location);
+ self.super_place(place, context, location);
match *place {
- Place::Local(ref local) => self.visit_local(local, context, location),
- Place::Promoted(_) => bug!("promoting already promoted MIR"),
+ Place::Local(_) |
+ Place::Promoted(_) => {}
Place::Static(ref global) => {
if self.tcx
.get_attrs(global.def_id)
"thread-local statics cannot be \
accessed at compile-time");
}
- self.add(Qualif::NOT_CONST);
return;
}
return;
}
unleash_miri!(self);
- self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0013,
}
}
Place::Projection(ref proj) => {
- self.nest(|this| {
- this.super_place(place, context, location);
- match proj.elem {
- ProjectionElem::Deref => {
- if context.is_mutating_use() {
- // `not_const` errors out in const contexts
- this.not_const()
- } else {
- // just make sure this doesn't get promoted
- this.add(Qualif::NOT_CONST);
- }
- let base_ty = proj.base.ty(this.mir, this.tcx).to_ty(this.tcx);
- match this.mode {
- Mode::Fn => {},
- _ => {
- if let ty::RawPtr(_) = base_ty.sty {
- if !this.tcx.features().const_raw_ptr_deref {
- emit_feature_err(
- &this.tcx.sess.parse_sess, "const_raw_ptr_deref",
- this.span, GateIssue::Language,
- &format!(
- "dereferencing raw pointers in {}s is unstable",
- this.mode,
- ),
- );
- }
+ match proj.elem {
+ ProjectionElem::Deref => {
+ if context.is_mutating_use() {
+ // `not_const` errors out in const contexts
+ self.not_const()
+ }
+ let base_ty = proj.base.ty(self.mir, self.tcx).to_ty(self.tcx);
+ match self.mode {
+ Mode::Fn => {},
+ _ => {
+ if let ty::RawPtr(_) = base_ty.sty {
+ if !self.tcx.features().const_raw_ptr_deref {
+ emit_feature_err(
+ &self.tcx.sess.parse_sess, "const_raw_ptr_deref",
+ self.span, GateIssue::Language,
+ &format!(
+ "dereferencing raw pointers in {}s is unstable",
+ self.mode,
+ ),
+ );
}
}
}
}
+ }
- ProjectionElem::ConstantIndex {..} |
- ProjectionElem::Subslice {..} |
- ProjectionElem::Field(..) |
- ProjectionElem::Index(_) => {
- let base_ty = proj.base.ty(this.mir, this.tcx).to_ty(this.tcx);
- if let Some(def) = base_ty.ty_adt_def() {
- if def.is_union() {
- match this.mode {
- Mode::Fn => this.not_const(),
- Mode::ConstFn => {
- if !this.tcx.features().const_fn_union {
- emit_feature_err(
- &this.tcx.sess.parse_sess, "const_fn_union",
- this.span, GateIssue::Language,
- "unions in const fn are unstable",
- );
- }
- },
-
- | Mode::Static
- | Mode::StaticMut
- | Mode::Const
- => {},
- }
+ ProjectionElem::ConstantIndex {..} |
+ ProjectionElem::Subslice {..} |
+ ProjectionElem::Field(..) |
+ ProjectionElem::Index(_) => {
+ let base_ty = proj.base.ty(self.mir, self.tcx).to_ty(self.tcx);
+ if let Some(def) = base_ty.ty_adt_def() {
+ if def.is_union() {
+ match self.mode {
+ Mode::ConstFn => {
+ if !self.tcx.features().const_fn_union {
+ emit_feature_err(
+ &self.tcx.sess.parse_sess, "const_fn_union",
+ self.span, GateIssue::Language,
+ "unions in const fn are unstable",
+ );
+ }
+ },
+
+ | Mode::Fn
+ | Mode::Static
+ | Mode::StaticMut
+ | Mode::Const
+ => {},
}
}
-
- let ty = place.ty(this.mir, this.tcx).to_ty(this.tcx);
- this.qualif.restrict(ty, this.tcx, this.param_env);
}
+ }
- ProjectionElem::Downcast(..) => {
- this.not_const()
- }
+ ProjectionElem::Downcast(..) => {
+ self.not_const()
}
- });
+ }
}
}
}
self.super_operand(operand, location);
match *operand {
- Operand::Copy(_) |
- Operand::Move(_) => {
+ Operand::Move(ref place) => {
// Mark the consumed locals to indicate later drops are noops.
- if let Operand::Move(Place::Local(local)) = *operand {
- self.local_qualif[local] = self.local_qualif[local].map(|q|
- q - Qualif::NEEDS_DROP
- );
- }
- }
- Operand::Constant(ref constant) => {
- if let ty::LazyConst::Unevaluated(def_id, _) = constant.literal {
- // Don't peek inside trait associated constants.
- if self.tcx.trait_of_item(*def_id).is_some() {
- self.add_type(constant.ty);
- } else {
- let (bits, _) = self.tcx.at(constant.span).mir_const_qualif(*def_id);
-
- let qualif = Qualif::from_bits(bits).expect("invalid mir_const_qualif");
- self.add(qualif);
-
- // Just in case the type is more specific than
- // the definition, e.g., impl associated const
- // with type parameters, take it into account.
- self.qualif.restrict(constant.ty, self.tcx, self.param_env);
- }
+ if let Place::Local(local) = *place {
+ self.cx.per_local[NeedsDrop].remove(local);
}
}
+ Operand::Copy(_) |
+ Operand::Constant(_) => {}
}
}
fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
debug!("visit_rvalue: rvalue={:?} location={:?}", rvalue, location);
- // Recurse through operands and places.
+
+ // Check nested operands and places.
if let Rvalue::Ref(region, kind, ref place) = *rvalue {
+ // Special-case reborrows.
let mut is_reborrow = false;
if let Place::Projection(ref proj) = *place {
if let ProjectionElem::Deref = proj.elem {
if is_reborrow {
let ctx = match kind {
- BorrowKind::Shared =>
- PlaceContext::NonMutatingUse(NonMutatingUseContext::SharedBorrow(region)),
- BorrowKind::Shallow =>
- PlaceContext::NonMutatingUse(NonMutatingUseContext::ShallowBorrow(region)),
- BorrowKind::Unique =>
- PlaceContext::NonMutatingUse(NonMutatingUseContext::UniqueBorrow(region)),
- BorrowKind::Mut { .. } =>
- PlaceContext::MutatingUse(MutatingUseContext::Borrow(region)),
+ BorrowKind::Shared => PlaceContext::NonMutatingUse(
+ NonMutatingUseContext::SharedBorrow(region),
+ ),
+ BorrowKind::Shallow => PlaceContext::NonMutatingUse(
+ NonMutatingUseContext::ShallowBorrow(region),
+ ),
+ BorrowKind::Unique => PlaceContext::NonMutatingUse(
+ NonMutatingUseContext::UniqueBorrow(region),
+ ),
+ BorrowKind::Mut { .. } => PlaceContext::MutatingUse(
+ MutatingUseContext::Borrow(region),
+ ),
};
self.super_place(place, ctx, location);
} else {
Rvalue::Cast(CastKind::ClosureFnPointer, ..) |
Rvalue::Cast(CastKind::Unsize, ..) |
Rvalue::Discriminant(..) |
- Rvalue::Len(_) => {}
-
- Rvalue::Ref(_, kind, ref place) => {
- let ty = place.ty(self.mir, self.tcx).to_ty(self.tcx);
-
- // Default to forbidding the borrow and/or its promotion,
- // due to the potential for direct or interior mutability,
- // and only proceed by setting `forbidden_mut` to `false`.
- let mut forbidden_mut = true;
-
- if let BorrowKind::Mut { .. } = kind {
- // In theory, any zero-sized value could be borrowed
- // mutably without consequences. However, only &mut []
- // is allowed right now, and only in functions.
- if self.mode == Mode::StaticMut {
- // Inside a `static mut`, &mut [...] is also allowed.
- match ty.sty {
- ty::Array(..) | ty::Slice(_) => forbidden_mut = false,
- _ => {}
- }
- } else if let ty::Array(_, len) = ty.sty {
- // FIXME(eddyb) the `self.mode == Mode::Fn` condition
- // seems unnecessary, given that this is merely a ZST.
- if len.unwrap_usize(self.tcx) == 0 && self.mode == Mode::Fn {
- forbidden_mut = false;
- }
- }
-
- if forbidden_mut {
- unleash_miri!(self);
- self.add(Qualif::NOT_CONST);
- if self.mode != Mode::Fn {
- let mut err = struct_span_err!(self.tcx.sess, self.span, E0017,
- "references in {}s may only refer \
- to immutable values", self.mode);
- err.span_label(self.span, format!("{}s require immutable values",
- self.mode));
- if self.tcx.sess.teach(&err.get_code().unwrap()) {
- err.note("References in statics and constants may only refer to \
- immutable values.\n\n\
- Statics are shared everywhere, and if they refer to \
- mutable data one might violate memory safety since \
- holding multiple mutable references to shared data is \
- not allowed.\n\n\
- If you really want global mutable state, try using \
- static mut or a global UnsafeCell.");
- }
- err.emit();
- }
- }
- } else {
- // Constants cannot be borrowed if they contain interior mutability as
- // it means that our "silent insertion of statics" could change
- // initializer values (very bad).
- if self.qualif.contains(Qualif::MUTABLE_INTERIOR) {
- // A reference of a MUTABLE_INTERIOR place is instead
- // NOT_CONST (see `if forbidden_mut` below), to avoid
- // duplicate errors (from reborrowing, for example).
- self.qualif = self.qualif - Qualif::MUTABLE_INTERIOR;
- if self.mode != Mode::Fn {
- span_err!(self.tcx.sess, self.span, E0492,
- "cannot borrow a constant which may contain \
- interior mutability, create a static instead");
- }
- } else {
- // We allow immutable borrows of frozen data.
- forbidden_mut = false;
- }
- }
-
- debug!("visit_rvalue: forbidden_mut={:?}", forbidden_mut);
- if forbidden_mut {
- unleash_miri!(self);
- self.add(Qualif::NOT_CONST);
- } else {
- // We might have a candidate for promotion.
- let candidate = Candidate::Ref(location);
- // We can only promote interior borrows of promotable temps.
- let mut place = place;
- while let Place::Projection(ref proj) = *place {
- if proj.elem == ProjectionElem::Deref {
- break;
- }
- place = &proj.base;
- }
- debug!("visit_rvalue: place={:?}", place);
- if let Place::Local(local) = *place {
- if self.mir.local_kind(local) == LocalKind::Temp {
- debug!("visit_rvalue: local={:?}", local);
- if let Some(qualif) = self.local_qualif[local] {
- // `forbidden_mut` is false, so we can safely ignore
- // `MUTABLE_INTERIOR` from the local's qualifications.
- // This allows borrowing fields which don't have
- // `MUTABLE_INTERIOR`, from a type that does, e.g.:
- // `let _: &'static _ = &(Cell::new(1), 2).1;`
- debug!("visit_rvalue: qualif={:?}", qualif);
- if (qualif - Qualif::MUTABLE_INTERIOR).is_empty() {
- debug!("visit_rvalue: candidate={:?}", candidate);
- self.promotion_candidates.push(candidate);
- }
- }
- }
- }
- }
- }
+ Rvalue::Len(_) |
+ Rvalue::Ref(..) |
+ Rvalue::Aggregate(..) => {}
Rvalue::Cast(CastKind::Misc, ref operand, cast_ty) => {
let operand_ty = operand.ty(self.mir, self.tcx);
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
match (cast_in, cast_out) {
(CastTy::Ptr(_), CastTy::Int(_)) |
- (CastTy::FnPtr, CastTy::Int(_)) => {
+ (CastTy::FnPtr, CastTy::Int(_)) if self.mode != Mode::Fn => {
unleash_miri!(self);
- if let Mode::Fn = self.mode {
- // in normal functions, mark such casts as not promotable
- self.add(Qualif::NOT_CONST);
- } else if !self.tcx.features().const_raw_ptr_to_usize_cast {
+ if !self.tcx.features().const_raw_ptr_to_usize_cast {
// in const fn and constants require the feature gate
// FIXME: make it unsafe inside const fn and constants
emit_feature_err(
op == BinOp::Offset);
unleash_miri!(self);
- if let Mode::Fn = self.mode {
- // raw pointer operations are not allowed inside promoteds
- self.add(Qualif::NOT_CONST);
- } else if !self.tcx.features().const_compare_raw_pointers {
+ if self.mode != Mode::Fn && !self.tcx.features().const_compare_raw_pointers {
// require the feature gate inside constants and const fn
// FIXME: make it unsafe to use these operations
emit_feature_err(
Rvalue::NullaryOp(NullOp::Box, _) => {
unleash_miri!(self);
- self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
let mut err = struct_span_err!(self.tcx.sess, self.span, E0010,
"allocations are not allowed in {}s", self.mode);
err.emit();
}
}
-
- Rvalue::Aggregate(ref kind, _) => {
- if let AggregateKind::Adt(def, ..) = **kind {
- if def.has_dtor(self.tcx) {
- self.add(Qualif::NEEDS_DROP);
- }
-
- if Some(def.did) == self.tcx.lang_items().unsafe_cell_type() {
- let ty = rvalue.ty(self.mir, self.tcx);
- self.add_type(ty);
- assert!(self.qualif.contains(Qualif::MUTABLE_INTERIOR));
- }
- }
- }
}
}
location: Location) {
debug!("visit_terminator_kind: bb={:?} kind={:?} location={:?}", bb, kind, location);
if let TerminatorKind::Call { ref func, ref args, ref destination, .. } = *kind {
- self.visit_operand(func, location);
+ if let Some((ref dest, _)) = *destination {
+ self.assign(dest, ValueSource::Call {
+ callee: func,
+ args,
+ return_ty: dest.ty(self.mir, self.tcx).to_ty(self.tcx),
+ }, location);
+ }
let fn_ty = func.ty(self.mir, self.tcx);
let mut callee_def_id = None;
let mut is_shuffle = false;
- let mut is_const_fn = false;
- let mut is_promotable_const_fn = false;
match fn_ty.sty {
ty::FnDef(def_id, _) => {
callee_def_id = Some(def_id);
Abi::PlatformIntrinsic => {
assert!(!self.tcx.is_const_fn(def_id));
match &self.tcx.item_name(def_id).as_str()[..] {
- | "size_of"
- | "min_align_of"
- | "needs_drop"
- | "type_id"
- | "bswap"
- | "bitreverse"
- | "ctpop"
- | "cttz"
- | "cttz_nonzero"
- | "ctlz"
- | "ctlz_nonzero"
- | "overflowing_add"
- | "overflowing_sub"
- | "overflowing_mul"
- | "unchecked_shl"
- | "unchecked_shr"
- | "rotate_left"
- | "rotate_right"
- | "add_with_overflow"
- | "sub_with_overflow"
- | "mul_with_overflow"
- | "saturating_add"
- | "saturating_sub"
- // no need to check feature gates, intrinsics are only callable
- // from the libstd or with forever unstable feature gates
- => is_const_fn = true,
// special intrinsic that can be called diretly without an intrinsic
// feature gate needs a language feature gate
"transmute" => {
// never promote transmute calls
if self.mode != Mode::Fn {
- is_const_fn = true;
// const eval transmute calls only with the feature gate
if !self.tcx.features().const_transmute {
emit_feature_err(
is_shuffle = true;
}
+ // no need to check feature gates, intrinsics are only callable
+ // from the libstd or with forever unstable feature gates
_ => {}
}
}
_ => {
- // In normal functions we only care about promotion.
- if self.mode == Mode::Fn {
- // Never promote const fn calls of
- // functions without `#[rustc_promotable]`.
- if self.tcx.is_promotable_const_fn(def_id) {
- is_const_fn = true;
- is_promotable_const_fn = true;
- } else if self.tcx.is_const_fn(def_id) {
- is_const_fn = true;
- }
- } else {
- // stable const fns or unstable const fns with their feature gate
- // active
+ // In normal functions no calls are feature-gated.
+ if self.mode != Mode::Fn {
let unleash_miri = self
.tcx
.sess
.debugging_opts
.unleash_the_miri_inside_of_you;
if self.tcx.is_const_fn(def_id) || unleash_miri {
- is_const_fn = true;
+ // stable const fns or unstable const fns
+ // with their feature gate active
+ // FIXME(eddyb) move stability checks from `is_const_fn` here.
} else if self.is_const_panic_fn(def_id) {
// Check the const_panic feature gate.
// FIXME: cannot allow this inside `allow_internal_unstable`
// because that would make `panic!` insta stable in constants,
// since the macro is marked with the attribute.
- if self.tcx.features().const_panic {
- is_const_fn = true;
- } else {
+ if !self.tcx.features().const_panic {
// Don't allow panics in constants without the feature gate.
emit_feature_err(
&self.tcx.sess.parse_sess,
// Check `#[unstable]` const fns or `#[rustc_const_unstable]`
// functions without the feature gate active in this crate in
// order to report a better error message than the one below.
- if self.span.allows_unstable(&feature.as_str()) {
- // `allow_internal_unstable` can make such calls stable.
- is_const_fn = true;
- } else {
+ if !self.span.allows_unstable(&feature.as_str()) {
let mut err = self.tcx.sess.struct_span_err(self.span,
&format!("`{}` is not yet stable as a const fn",
self.tcx.item_path_str(def_id)));
err.emit();
}
} else {
- // FIXME(#57563): remove this check when const fn stabilizes.
- let (msg, note) = if let UnstableFeatures::Disallow =
- self.tcx.sess.opts.unstable_features {
- (format!("calls in {}s are limited to \
- tuple structs and tuple variants",
- self.mode),
- Some("a limited form of compile-time function \
- evaluation is available on a nightly \
- compiler via `const fn`"))
- } else {
- (format!("calls in {}s are limited \
- to constant functions, \
- tuple structs and tuple variants",
- self.mode),
- None)
- };
let mut err = struct_span_err!(
self.tcx.sess,
self.span,
E0015,
- "{}",
- msg,
+ "calls in {}s are limited to constant functions, \
+ tuple structs and tuple variants",
+ self.mode,
);
- if let Some(note) = note {
- err.span_note(self.span, note);
- }
err.emit();
}
}
}
}
- },
+ }
ty::FnPtr(_) => {
if self.mode != Mode::Fn {
let mut err = self.tcx.sess.struct_span_err(
&format!("function pointers are not allowed in const fn"));
err.emit();
}
- },
+ }
_ => {
self.not_const();
- return
}
}
-
- let constant_arguments = callee_def_id.and_then(|id| {
- args_required_const(self.tcx, id)
- });
- for (i, arg) in args.iter().enumerate() {
- self.nest(|this| {
- this.visit_operand(arg, location);
- if this.mode != Mode::Fn {
- return
- }
- let candidate = Candidate::Argument { bb, index: i };
- if is_shuffle && i == 2 {
- if this.qualif.is_empty() {
- debug!("visit_terminator_kind: candidate={:?}", candidate);
- this.promotion_candidates.push(candidate);
- } else {
- span_err!(this.tcx.sess, this.span, E0526,
- "shuffle indices are not constant");
- }
- return
+ if self.mode == Mode::Fn {
+ let constant_args = callee_def_id.and_then(|id| {
+ args_required_const(self.tcx, id)
+ }).unwrap_or_default();
+ for (i, arg) in args.iter().enumerate() {
+ if !(is_shuffle && i == 2 || constant_args.contains(&i)) {
+ continue;
}
- let constant_arguments = match constant_arguments.as_ref() {
- Some(s) => s,
- None => return,
- };
- if !constant_arguments.contains(&i) {
- return
- }
+ let candidate = Candidate::Argument { bb, index: i };
// Since the argument is required to be constant,
// we care about constness, not promotability.
// If we checked for promotability, we'd miss out on
// which happens even without the user requesting it.
// We can error out with a hard error if the argument is not
// constant here.
- if (this.qualif - Qualif::NOT_PROMOTABLE).is_empty() {
+ if !IsNotConst::in_operand(self, arg) {
debug!("visit_terminator_kind: candidate={:?}", candidate);
- this.promotion_candidates.push(candidate);
+ self.promotion_candidates.push(candidate);
} else {
- this.tcx.sess.span_err(this.span,
- &format!("argument {} is required to be a constant",
- i + 1));
+ if is_shuffle {
+ span_err!(self.tcx.sess, self.span, E0526,
+ "shuffle indices are not constant");
+ } else {
+ self.tcx.sess.span_err(self.span,
+ &format!("argument {} is required to be a constant",
+ i + 1));
+ }
}
- });
- }
-
- // non-const fn calls
- if !is_const_fn {
- self.qualif = Qualif::NOT_CONST;
- if self.mode != Mode::Fn {
- self.tcx.sess.delay_span_bug(
- self.span,
- "should have reported an error about non-const fn calls in constants",
- )
}
}
- if let Some((ref dest, _)) = *destination {
- // Avoid propagating irrelevant callee/argument qualifications.
- if self.qualif.intersects(Qualif::CONST_ERROR) {
- self.qualif = Qualif::NOT_CONST;
- } else {
- // Be conservative about the returned value of a const fn.
- let tcx = self.tcx;
- let ty = dest.ty(self.mir, tcx).to_ty(tcx);
- if is_const_fn && !is_promotable_const_fn && self.mode == Mode::Fn {
- self.qualif = Qualif::NOT_PROMOTABLE;
- } else {
- self.qualif = Qualif::empty();
- }
- self.add_type(ty);
- }
- self.assign(dest, location);
+ // Check callee and argument operands.
+ self.visit_operand(func, location);
+ for arg in args {
+ self.visit_operand(arg, location);
}
} else if let TerminatorKind::Drop { location: ref place, .. } = *kind {
self.super_terminator_kind(bb, kind, location);
// HACK(eddyb): emulate a bit of dataflow analysis,
// conservatively, that drop elaboration will do.
let needs_drop = if let Place::Local(local) = *place {
- if self.local_qualif[local].map_or(true, |q| q.contains(Qualif::NEEDS_DROP)) {
+ if NeedsDrop::in_local(self, local) {
Some(self.mir.local_decls[local].source_info.span)
} else {
None
rvalue: &Rvalue<'tcx>,
location: Location) {
debug!("visit_assign: dest={:?} rvalue={:?} location={:?}", dest, rvalue, location);
- self.visit_rvalue(rvalue, location);
+ self.assign(dest, ValueSource::Rvalue(rvalue), location);
- self.assign(dest, location);
+ self.visit_rvalue(rvalue, location);
}
fn visit_source_info(&mut self, source_info: &SourceInfo) {
fn visit_statement(&mut self, bb: BasicBlock, statement: &Statement<'tcx>, location: Location) {
debug!("visit_statement: bb={:?} statement={:?} location={:?}", bb, statement, location);
- self.nest(|this| {
- this.visit_source_info(&statement.source_info);
- match statement.kind {
- StatementKind::Assign(ref place, ref rvalue) => {
- this.visit_assign(bb, place, rvalue, location);
- }
- StatementKind::FakeRead(..) |
- StatementKind::SetDiscriminant { .. } |
- StatementKind::StorageLive(_) |
- StatementKind::StorageDead(_) |
- StatementKind::InlineAsm {..} |
- StatementKind::Retag { .. } |
- StatementKind::AscribeUserType(..) |
- StatementKind::Nop => {}
+ match statement.kind {
+ StatementKind::Assign(..) => {
+ self.super_statement(bb, statement, location);
}
- });
+ // FIXME(eddyb) should these really do nothing?
+ StatementKind::FakeRead(..) |
+ StatementKind::SetDiscriminant { .. } |
+ StatementKind::StorageLive(_) |
+ StatementKind::StorageDead(_) |
+ StatementKind::InlineAsm {..} |
+ StatementKind::Retag { .. } |
+ StatementKind::AscribeUserType(..) |
+ StatementKind::Nop => {}
+ }
}
fn visit_terminator(&mut self,
terminator: &Terminator<'tcx>,
location: Location) {
debug!("visit_terminator: bb={:?} terminator={:?} location={:?}", bb, terminator, location);
- self.nest(|this| this.super_terminator(bb, terminator, location));
+ self.super_terminator(bb, terminator, location);
}
}
if mir.return_ty().references_error() {
tcx.sess.delay_span_bug(mir.span, "mir_const_qualif: Mir had errors");
- return (Qualif::NOT_CONST.bits(), Lrc::new(BitSet::new_empty(0)));
+ return (1 << IsNotConst::IDX, Lrc::new(BitSet::new_empty(0)));
}
- let mut qualifier = Qualifier::new(tcx, def_id, mir, Mode::Const);
- let (qualif, promoted_temps) = qualifier.qualify_const();
- (qualif.bits(), promoted_temps)
+ Checker::new(tcx, def_id, mir, Mode::Const).check_const()
}
pub struct QualifyAndPromoteConstants;
}
let def_id = src.def_id();
- let id = tcx.hir().as_local_node_id(def_id).unwrap();
+ let id = tcx.hir().as_local_hir_id(def_id).unwrap();
let mut const_promoted_temps = None;
- let mode = match tcx.hir().body_owner_kind(id) {
+ let mode = match tcx.hir().body_owner_kind_by_hir_id(id) {
hir::BodyOwnerKind::Closure => Mode::Fn,
hir::BodyOwnerKind::Fn => {
if tcx.is_const_fn(def_id) {
debug!("run_pass: mode={:?}", mode);
if mode == Mode::Fn || mode == Mode::ConstFn {
- // This is ugly because Qualifier holds onto mir,
+ // This is ugly because Checker holds onto mir,
// which can't be mutated until its scope ends.
let (temps, candidates) = {
- let mut qualifier = Qualifier::new(tcx, def_id, mir, mode);
+ let mut checker = Checker::new(tcx, def_id, mir, mode);
if mode == Mode::ConstFn {
if tcx.sess.opts.debugging_opts.unleash_the_miri_inside_of_you {
- qualifier.qualify_const();
+ checker.check_const();
} else if tcx.is_min_const_fn(def_id) {
// enforce `min_const_fn` for stable const fns
use super::qualify_min_const_fn::is_min_const_fn;
} else {
// this should not produce any errors, but better safe than sorry
// FIXME(#53819)
- qualifier.qualify_const();
+ checker.check_const();
}
} else {
// Enforce a constant-like CFG for `const fn`.
- qualifier.qualify_const();
+ checker.check_const();
}
} else {
- while let Some((bb, data)) = qualifier.rpo.next() {
- qualifier.visit_basic_block_data(bb, data);
+ while let Some((bb, data)) = checker.rpo.next() {
+ checker.visit_basic_block_data(bb, data);
}
}
- (qualifier.temp_promotion_state, qualifier.promotion_candidates)
+ (checker.temp_promotion_state, checker.promotion_candidates)
};
// Do the actual promotion, now that we know what's viable.
// Already computed by `mir_const_qualif`.
const_promoted_temps.unwrap()
} else {
- Qualifier::new(tcx, def_id, mir, mode).qualify_const().1
+ Checker::new(tcx, def_id, mir, mode).check_const().1
};
// In `const` and `static` everything without `StorageDead`
}
}
-/// Returns true if the `def_id` refers to an intrisic which we've whitelisted
+/// Returns `true` if the `def_id` refers to an intrisic which we've whitelisted
/// for being called from stable `const fn`s (`min_const_fn`).
///
/// Adding more intrinsics requires sign-off from @rust-lang/lang.
use crate::transform::{MirPass, MirSource};
use crate::util::patch::MirPatch;
-/// A pass that removes no-op landing pads and replaces jumps to them with
+/// A pass that removes noop landing pads and replaces jumps to them with
/// `None`. This is important because otherwise LLVM generates terrible
/// code for these.
pub struct RemoveNoopLandingPads;
use rustc::ty::{self, TyCtxt};
use rustc::mir::*;
-/// Return `true` if this place is allowed to be less aligned
+/// Returns `true` if this place is allowed to be less aligned
/// than its containing struct (because it is within a packed
/// struct).
pub fn is_disaligned<'a, 'tcx, L>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
}
- /// FIXME(pcwalton): This should update the def-use chains.
+ // FIXME(pcwalton): this should update the def-use chains.
pub fn replace_all_defs_and_uses_with(&self,
local: Local,
mir: &mut Mir<'tcx>,
/// joined together under the `rest` subpath. They are all controlled
/// by the primary drop flag, but only the last rest-field dropped
/// should clear it (and it must also not clear anything else).
- ///
- /// FIXME: I think we should just control the flags externally
- /// and then we do not need this machinery.
+ //
+ // FIXME: I think we should just control the flags externally,
+ // and then we do not need this machinery.
pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
debug!("elaborate_drop({:?})", self);
let style = self.elaborator.drop_style(self.path, DropFlagMode::Deep);
}
}
- /// Return the place and move path for each field of `variant`,
+ /// Returns the place and move path for each field of `variant`,
/// (the move path is `None` if the field is a rest field).
fn move_paths_for_fields(&self,
base_place: &Place<'tcx>,
}
}
- /// Create one-half of the drop ladder for a list of fields, and return
+ /// Creates one-half of the drop ladder for a list of fields, and return
/// the list of steps in it in reverse order, with the first step
/// dropping 0 fields and so on.
///
)
}
- /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
+ /// Creates a full drop ladder, consisting of 2 connected half-drop-ladders
///
/// For example, with 3 fields, the drop ladder is
///
}
}
- /// Return a basic block that drop a place using the context
+ /// Returns a basic block that drop a place using the context
/// and path in `c`. If `mode` is something, also clear `c`
/// according to it.
///
-//! Liveness analysis which computes liveness of MIR local variables at the boundary of basic blocks
+//! Liveness analysis which computes liveness of MIR local variables at the boundary of basic
+//! blocks.
//!
//! This analysis considers references as being used only at the point of the
//! borrow. This means that this does not track uses because of references that
//! already exist:
//!
-//! ```Rust
-//! fn foo() {
-//! x = 0;
-//! // `x` is live here
-//! GLOBAL = &x: *const u32;
-//! // but not here, even while it can be accessed through `GLOBAL`.
-//! foo();
-//! x = 1;
-//! // `x` is live again here, because it is assigned to `OTHER_GLOBAL`
-//! OTHER_GLOBAL = &x: *const u32;
-//! // ...
-//! }
+//! ```rust
+//! fn foo() {
+//! x = 0;
+//! // `x` is live here ...
+//! GLOBAL = &x: *const u32;
+//! // ... but not here, even while it can be accessed through `GLOBAL`.
+//! foo();
+//! x = 1;
+//! // `x` is live again here, because it is assigned to `OTHER_GLOBAL`.
+//! OTHER_GLOBAL = &x: *const u32;
+//! // ...
+//! }
//! ```
//!
//! This means that users of this analysis still have to check whether
}
}
-/// Compute which local variables are live within the given function
+/// Computes which local variables are live within the given function
/// `mir`. The liveness mode `mode` determines what sorts of uses are
/// considered to make a variable live (e.g., do drops count?).
pub fn liveness_of_locals<'tcx, V: Idx>(
.chars()
.filter_map(|c| match c {
' ' => None,
- ':' => Some('_'),
+ ':' | '<' | '>' => Some('_'),
c => Some(c)
}));
s
match (descr, src.promoted) {
(_, Some(i)) => write!(w, "{:?} in ", i)?,
(Some(Def::StructCtor(..)), _) => write!(w, "struct ")?,
- (Some(Def::Const(_)), _) => write!(w, "const ")?,
+ (Some(Def::Const(_)), _)
+ | (Some(Def::AssociatedConst(_)), _) => write!(w, "const ")?,
(Some(Def::Static(_, /*is_mutbl*/false)), _) => write!(w, "static ")?,
(Some(Def::Static(_, /*is_mutbl*/true)), _) => write!(w, "static mut ")?,
(_, _) if is_function => write!(w, "fn ")?,
}
}
- /// matches '-' lit | lit (cf. parser::Parser::parse_literal_maybe_minus),
- /// or path for ranges.
- ///
- /// FIXME: do we want to allow expr -> pattern conversion to create path expressions?
- /// That means making this work:
- ///
- /// ```rust,ignore (FIXME)
- /// struct S;
- /// macro_rules! m {
- /// ($a:expr) => {
- /// let $a = S;
- /// }
- /// }
- /// m!(S);
- /// ```
+ /// Matches `'-' lit | lit (cf. parser::Parser::parse_literal_maybe_minus)`,
+ /// or paths for ranges.
+ //
+ // FIXME: do we want to allow `expr -> pattern` conversion to create path expressions?
+ // That means making this work:
+ //
+ // ```rust,ignore (FIXME)
+ // struct S;
+ // macro_rules! m {
+ // ($a:expr) => {
+ // let $a = S;
+ // }
+ // }
+ // m!(S);
+ // ```
fn check_expr_within_pat(&self, expr: &Expr, allow_paths: bool) {
match expr.node {
ExprKind::Lit(..) => {}
let node_id = tcx.hir().as_local_node_id(def_id)
.expect("rvalue_promotable_map invoked with non-local def-id");
let body_id = tcx.hir().body_owned_by(node_id);
- let body_hir_id = tcx.hir().node_to_hir_id(body_id.node_id);
- tcx.rvalue_promotable_map(def_id).contains(&body_hir_id.local_id)
+ tcx.rvalue_promotable_map(def_id).contains(&body_id.hir_id.local_id)
}
fn rvalue_promotable_map<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
}
/// While the `ExprUseVisitor` walks, we will identify which
- /// expressions are borrowed, and insert their ids into this
+ /// expressions are borrowed, and insert their IDs into this
/// table. Actually, we insert the "borrow-id", which is normally
- /// the id of the expression being borrowed: but in the case of
+ /// the ID of the expression being borrowed: but in the case of
/// `ref mut` borrows, the `id` of the pattern is
- /// inserted. Therefore later we remove that entry from the table
+ /// inserted. Therefore, later we remove that entry from the table
/// and transfer it over to the value being matched. This will
/// then prevent said value from being promoted.
fn remove_mut_rvalue_borrow(&mut self, pat: &hir::Pat) -> bool {
}
fn check_expr(&mut self, ex: &'tcx hir::Expr) -> Promotability {
- let node_ty = self.tables.node_id_to_type(ex.hir_id);
+ let node_ty = self.tables.node_type(ex.hir_id);
let mut outer = check_expr_kind(self, ex, node_ty);
outer &= check_adjustments(self, ex);
if v.tables.is_method_call(e) {
return NotPromotable;
}
- match v.tables.node_id_to_type(lhs.hir_id).sty {
+ match v.tables.node_type(lhs.hir_id).sty {
ty::RawPtr(_) | ty::FnPtr(..) => {
assert!(op.node == hir::BinOpKind::Eq || op.node == hir::BinOpKind::Ne ||
op.node == hir::BinOpKind::Le || op.node == hir::BinOpKind::Lt ||
ty_result & node_result
}
-/// Check the adjustments of an expression
+/// Checks the adjustments of an expression.
fn check_adjustments<'a, 'tcx>(
v: &mut CheckCrateVisitor<'a, 'tcx>,
e: &hir::Expr) -> Promotability {
}
}
-/// Find the function marked with `#[plugin_registrar]`, if any.
+/// Finds the function marked with `#[plugin_registrar]`, if any.
pub fn find_plugin_registrar<'tcx>(tcx: TyCtxt<'_, 'tcx, 'tcx>) -> Option<DefId> {
tcx.plugin_registrar_fn(LOCAL_CRATE)
}
//! in various ways.
//!
//! Plugin authors will use the `Registry` type re-exported by
-//! this module, along with its methods. The rest of the module
+//! this module, along with its methods. The rest of the module
//! is for use by `rustc` itself.
//!
//! To define a plugin, build a dylib crate with a
}
}
- /// Get the plugin's arguments, if any.
+ /// Gets the plugin's arguments, if any.
///
/// These are specified inside the `plugin` crate attribute as
///
/// Implemented to visit all `DefId`s in a type.
/// Visiting `DefId`s is useful because visibilities and reachabilities are attached to them.
/// The idea is to visit "all components of a type", as documented in
-/// https://github.com/rust-lang/rfcs/blob/master/text/2145-type-privacy.md#how-to-determine-visibility-of-a-type
-/// Default type visitor (`TypeVisitor`) does most of the job, but it has some shortcomings.
-/// First, it doesn't have overridable `fn visit_trait_ref`, so we have to catch trait def-ids
+/// https://github.com/rust-lang/rfcs/blob/master/text/2145-type-privacy.md#how-to-determine-visibility-of-a-type.
+/// The default type visitor (`TypeVisitor`) does most of the job, but it has some shortcomings.
+/// First, it doesn't have overridable `fn visit_trait_ref`, so we have to catch trait `DefId`s
/// manually. Second, it doesn't visit some type components like signatures of fn types, or traits
/// in `impl Trait`, see individual comments in `DefIdVisitorSkeleton::visit_ty`.
trait DefIdVisitor<'a, 'tcx: 'a> {
}
////////////////////////////////////////////////////////////////////////////////
-/// The embargo visitor, used to determine the exports of the ast
+/// The embargo visitor, used to determine the exports of the AST.
////////////////////////////////////////////////////////////////////////////////
struct EmbargoVisitor<'a, 'tcx: 'a> {
def: &'tcx ty::AdtDef, // definition of the struct or enum
field: &'tcx ty::FieldDef) { // definition of the field
let ident = Ident::new(keywords::Invalid.name(), use_ctxt);
- let def_id = self.tcx.adjust_ident(ident, def.did, self.current_item).1;
+ let current_hir = self.tcx.hir().node_to_hir_id(self.current_item);
+ let def_id = self.tcx.adjust_ident(ident, def.did, current_hir).1;
if !def.is_enum() && !field.vis.is_accessible_from(def_id, self.tcx) {
struct_span_err!(self.tcx.sess, span, E0451, "field `{}` of {} `{}` is private",
field.ident, def.variant_descr(), self.tcx.item_path_str(def.did))
// Take node-id of an expression or pattern and check its type for privacy.
fn check_expr_pat_type(&mut self, id: hir::HirId, span: Span) -> bool {
self.span = span;
- if self.visit(self.tables.node_id_to_type(id)) || self.visit(self.tables.node_substs(id)) {
+ if self.visit(self.tables.node_type(id)) || self.visit(self.tables.node_substs(id)) {
return true;
}
if let Some(adjustments) = self.tables.adjustments().get(id) {
self.span = hir_ty.span;
if self.in_body {
// Types in bodies.
- if self.visit(self.tables.node_id_to_type(hir_ty.hir_id)) {
+ if self.visit(self.tables.node_type(hir_ty.hir_id)) {
return;
}
} else {
syntax = { path = "../libsyntax" }
rustc = { path = "../librustc" }
arena = { path = "../libarena" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
syntax_pos = { path = "../libsyntax_pos" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_metadata = { path = "../librustc_metadata" }
-//! Reduced graph building
+//! Reduced graph building.
//!
//! Here we build the "reduced graph": the graph of the module tree without
//! any imports resolved.
use std::ptr;
use rustc_data_structures::sync::Lrc;
-use crate::errors::Applicability;
+use errors::Applicability;
use syntax::ast::{Name, Ident};
use syntax::attr;
}
}
- // This returns true if we should consider the underlying `extern crate` to be used.
+ /// Returns `true` if we should consider the underlying `extern crate` to be used.
fn process_legacy_macro_imports(&mut self, item: &Item, module: Module<'a>,
parent_scope: &ParentScope<'a>) -> bool {
let mut import_all = None;
import_all.is_some() || !single_imports.is_empty()
}
- // does this attribute list contain "macro_use"?
+ /// Returns `true` if this attribute list contains `macro_use`.
fn contains_macro_use(&mut self, attrs: &[ast::Attribute]) -> bool {
for attr in attrs {
if attr.check_name("macro_escape") {
use std::cmp::Reverse;
+use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use log::debug;
use rustc::hir::def::*;
use rustc::hir::def::Namespace::*;
use syntax::symbol::keywords;
use syntax_pos::Span;
-use crate::errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use crate::macros::ParentScope;
use crate::resolve_imports::ImportResolver;
use crate::{import_candidate_to_enum_paths, is_self_type, is_self_value, path_names_to_string};
// Try to lookup name in more relaxed fashion for better error reporting.
let ident = path.last().unwrap().ident;
- let candidates = self.lookup_import_candidates(ident, ns, is_expected);
+ let candidates = self.lookup_import_candidates(ident, ns, is_expected)
+ .drain(..)
+ .filter(|ImportSuggestion { did, .. }| {
+ match (did, def.and_then(|def| def.opt_def_id())) {
+ (Some(suggestion_did), Some(actual_did)) => *suggestion_did != actual_did,
+ _ => true,
+ }
+ })
+ .collect::<Vec<_>>();
if candidates.is_empty() && is_expected(Def::Enum(DefId::local(CRATE_DEF_INDEX))) {
let enum_candidates =
self.lookup_import_candidates(ident, ns, is_enum_variant);
#![deny(rust_2018_idioms)]
-use rustc_errors as errors;
-
pub use rustc::hir::def::{Namespace, PerNS};
use GenericParameters::*;
use rustc::hir::def::Namespace::*;
use rustc::hir::def_id::{CRATE_DEF_INDEX, LOCAL_CRATE, DefId};
use rustc::hir::{Freevar, FreevarMap, TraitCandidate, TraitMap, GlobMap};
-use rustc::ty;
+use rustc::ty::{self, DefIdTree};
use rustc::util::nodemap::{NodeMap, NodeSet, FxHashMap, FxHashSet, DefIdMap};
use rustc::{bug, span_bug};
/// A free importable items suggested in case of resolution failure.
struct ImportSuggestion {
+ did: Option<DefId>,
path: Path,
}
}
enum ResolutionError<'a> {
- /// error E0401: can't use type or const parameters from outer function
+ /// Error E0401: can't use type or const parameters from outer function.
GenericParamsFromOuterFunction(Def),
- /// error E0403: the name is already used for a type/const parameter in this list of
- /// generic parameters
+ /// Error E0403: the name is already used for a type or const parameter in this generic
+ /// parameter list.
NameAlreadyUsedInParameterList(Name, &'a Span),
- /// error E0407: method is not a member of trait
+ /// Error E0407: method is not a member of trait.
MethodNotMemberOfTrait(Name, &'a str),
- /// error E0437: type is not a member of trait
+ /// Error E0437: type is not a member of trait.
TypeNotMemberOfTrait(Name, &'a str),
- /// error E0438: const is not a member of trait
+ /// Error E0438: const is not a member of trait.
ConstNotMemberOfTrait(Name, &'a str),
- /// error E0408: variable `{}` is not bound in all patterns
+ /// Error E0408: variable `{}` is not bound in all patterns.
VariableNotBoundInPattern(&'a BindingError),
- /// error E0409: variable `{}` is bound in inconsistent ways within the same match arm
+ /// Error E0409: variable `{}` is bound in inconsistent ways within the same match arm.
VariableBoundWithDifferentMode(Name, Span),
- /// error E0415: identifier is bound more than once in this parameter list
+ /// Error E0415: identifier is bound more than once in this parameter list.
IdentifierBoundMoreThanOnceInParameterList(&'a str),
- /// error E0416: identifier is bound more than once in the same pattern
+ /// Error E0416: identifier is bound more than once in the same pattern.
IdentifierBoundMoreThanOnceInSamePattern(&'a str),
- /// error E0426: use of undeclared label
+ /// Error E0426: use of undeclared label.
UndeclaredLabel(&'a str, Option<Name>),
- /// error E0429: `self` imports are only allowed within a { } list
+ /// Error E0429: `self` imports are only allowed within a `{ }` list.
SelfImportsOnlyAllowedWithin,
- /// error E0430: `self` import can only appear once in the list
+ /// Error E0430: `self` import can only appear once in the list.
SelfImportCanOnlyAppearOnceInTheList,
- /// error E0431: `self` import can only appear in an import list with a non-empty prefix
+ /// Error E0431: `self` import can only appear in an import list with a non-empty prefix.
SelfImportOnlyInImportListWithNonEmptyPrefix,
- /// error E0433: failed to resolve
+ /// Error E0433: failed to resolve.
FailedToResolve(&'a str),
- /// error E0434: can't capture dynamic environment in a fn item
+ /// Error E0434: can't capture dynamic environment in a fn item.
CannotCaptureDynamicEnvironmentInFnItem,
- /// error E0435: attempt to use a non-constant value in a constant
+ /// Error E0435: attempt to use a non-constant value in a constant.
AttemptToUseNonConstantValueInConstant,
- /// error E0530: X bindings cannot shadow Ys
+ /// Error E0530: `X` bindings cannot shadow `Y`s.
BindingShadowsSomethingUnacceptable(&'a str, Name, &'a NameBinding<'a>),
- /// error E0128: type parameters with a default cannot use forward declared identifiers
+ /// Error E0128: type parameters with a default cannot use forward-declared identifiers.
ForwardDeclaredTyParam, // FIXME(const_generics:defaults)
}
-/// Combines an error with provided span and emits it
+/// Combines an error with provided span and emits it.
///
/// This takes the error provided, combines it with the span and any additional spans inside the
/// error and emits it.
/// Adjust the impl span so that just the `impl` keyword is taken by removing
/// everything after `<` (`"impl<T> Iterator for A<T> {}" -> "impl"`) and
-/// everything after the first whitespace (`"impl Iterator for A" -> "impl"`)
+/// everything after the first whitespace (`"impl Iterator for A" -> "impl"`).
///
-/// Attention: The method used is very fragile since it essentially duplicates the work of the
+/// *Attention*: the method used is very fragile since it essentially duplicates the work of the
/// parser. If you need to use this function or something similar, please consider updating the
-/// source_map functions and this function to something more robust.
+/// `source_map` functions and this function to something more robust.
fn reduce_impl_span_to_impl_keyword(cm: &SourceMap, impl_span: Span) -> Span {
let impl_span = cm.span_until_char(impl_span, '<');
let impl_span = cm.span_until_whitespace(impl_span);
}
}
-/// This thing walks the whole crate in DFS manner, visiting each item, resolving names as it goes.
+/// Walks the whole crate in DFS order, visiting each item, resolving names as it goes.
impl<'a, 'tcx> Visitor<'tcx> for Resolver<'a> {
fn visit_item(&mut self, item: &'tcx Item) {
self.resolve_item(item);
/// No translation needs to be applied.
NormalRibKind,
- /// We passed through a closure scope at the given node ID.
+ /// We passed through a closure scope at the given `NodeId`.
/// Translate upvars as appropriate.
ClosureRibKind(NodeId /* func id */),
ForwardTyParamBanRibKind,
}
-/// One local scope.
+/// A single local scope.
///
/// A rib represents a scope names can live in. Note that these appear in many places, not just
/// around braces. At any place where the list of accessible names (of the given namespace)
}
enum ModuleKind {
- /// An anonymous module, eg. just a block.
+ /// An anonymous module; e.g., just a block.
///
/// ```
/// fn main() {
struct UseError<'a> {
err: DiagnosticBuilder<'a>,
- /// Attach `use` statements for these candidates
+ /// Attach `use` statements for these candidates.
candidates: Vec<ImportSuggestion>,
- /// The node id of the module to place the use statements in
+ /// The `NodeId` of the module to place the use-statements in.
node_id: NodeId,
- /// Whether the diagnostic should state that it's "better"
+ /// Whether the diagnostic should state that it's "better".
better: bool,
}
prelude: Option<Module<'a>>,
pub extern_prelude: FxHashMap<Ident, ExternPreludeEntry<'a>>,
- /// n.b. This is used only for better diagnostics, not name resolution itself.
+ /// N.B., this is used only for better diagnostics, not name resolution itself.
has_self: FxHashSet<DefId>,
/// Names of fields of an item `DefId` accessible with dot syntax.
/// they are used (in a `break` or `continue` statement)
pub unused_labels: FxHashMap<NodeId, Span>,
- /// privacy errors are delayed until the end in order to deduplicate them
+ /// Privacy errors are delayed until the end in order to deduplicate them.
privacy_errors: Vec<PrivacyError<'a>>,
- /// ambiguity errors are delayed for deduplication
+ /// Ambiguity errors are delayed for deduplication.
ambiguity_errors: Vec<AmbiguityError<'a>>,
- /// `use` injections are delayed for better placement and deduplication
+ /// `use` injections are delayed for better placement and deduplication.
use_injections: Vec<UseError<'a>>,
- /// crate-local macro expanded `macro_export` referred to by a module-relative path
+ /// Crate-local macro expanded `macro_export` referred to by a module-relative path.
macro_expanded_macro_export_errors: BTreeSet<(Span, Span)>,
arenas: &'a ResolverArenas<'a>,
potentially_unused_imports: Vec<&'a ImportDirective<'a>>,
- /// This table maps struct IDs into struct constructor IDs,
+ /// Table for mapping struct IDs into struct constructor IDs,
/// it's not used during normal resolution, only for better error reporting.
struct_constructors: DefIdMap<(Def, ty::Visibility)>,
- /// Only used for better errors on `fn(): fn()`
+ /// Only used for better errors on `fn(): fn()`.
current_type_ascription: Vec<Span>,
injected_crate: Option<Module<'a>>,
}
-/// Nothing really interesting here, it just provides memory for the rest of the crate.
+/// Nothing really interesting here; it just provides memory for the rest of the crate.
#[derive(Default)]
pub struct ResolverArenas<'a> {
modules: arena::TypedArena<ModuleData<'a>>,
}
impl<'a> Resolver<'a> {
- /// Rustdoc uses this to resolve things in a recoverable way. ResolutionError<'a>
+ /// Rustdoc uses this to resolve things in a recoverable way. `ResolutionError<'a>`
/// isn't something that can be returned because it can't be made to live that long,
/// and also it's a private type. Fortunately rustdoc doesn't need to know the error,
/// just that an error occurred.
}
}
- /// Searches the current set of local scopes for labels. Returns the first non-None label that
+ /// Searches the current set of local scopes for labels. Returns the first non-`None` label that
/// is returned by the given predicate function
///
/// Stops after meeting a closure.
result
}
- /// This is called to resolve a trait reference from an `impl` (i.e., `impl Trait for Foo`)
+ /// This is called to resolve a trait reference from an `impl` (i.e., `impl Trait for Foo`).
fn with_optional_trait_ref<T, F>(&mut self, opt_trait_ref: Option<&TraitRef>, f: F) -> T
where F: FnOnce(&mut Resolver<'_>, Option<DefId>) -> T
{
/// sometimes needed for the lint that recommends rewriting
/// absolute paths to `crate`, so that it knows how to frame the
/// suggestion. If you are just resolving a path like `foo::bar`
- /// that appears...somewhere, though, then you just want
+ /// that appears in an arbitrary location, then you just want
/// `CrateLint::SimplePath`, which is what `smart_resolve_path`
/// already provides.
fn smart_resolve_path_with_crate_lint(
// collect results based on the filter function
if ident.name == lookup_ident.name && ns == namespace {
- if filter_fn(name_binding.def()) {
+ let def = name_binding.def();
+ if filter_fn(def) {
// create the path
let mut segms = path_segments.clone();
if lookup_ident.span.rust_2018() {
// declared as public (due to pruning, we don't explore
// outside crate private modules => no need to check this)
if !in_module_is_extern || name_binding.vis == ty::Visibility::Public {
- candidates.push(ImportSuggestion { path });
+ let did = match def {
+ Def::StructCtor(did, _) | Def::VariantCtor(did, _) =>
+ self.parent(did),
+ _ => def.opt_def_id(),
+ };
+ candidates.push(ImportSuggestion { did, path });
}
}
}
/// When name resolution fails, this method can be used to look up candidate
/// entities with the expected name. It allows filtering them using the
/// supplied predicate (which should be used to only accept the types of
- /// definitions expected e.g., traits). The lookup spans across all crates.
+ /// definitions expected, e.g., traits). The lookup spans across all crates.
///
- /// NOTE: The method does not look into imports, but this is not a problem,
+ /// N.B., the method does not look into imports, but this is not a problem,
/// since we report the definitions (thus, the de-aliased imports).
fn lookup_import_candidates<FilterFn>(&mut self,
lookup_ident: Ident,
span: name_binding.span,
segments: path_segments,
};
- result = Some((module, ImportSuggestion { path }));
+ let did = module.def().and_then(|def| def.opt_def_id());
+ result = Some((module, ImportSuggestion { did, path }));
} else {
// add the module to the lookup
if seen_modules.insert(module.def_id().unwrap()) {
.collect::<Vec<_>>())
}
-/// Get the stringified path for an enum from an `ImportSuggestion` for an enum variant.
+/// Gets the stringified path for an enum from an `ImportSuggestion` for an enum variant.
fn import_candidate_to_enum_paths(suggestion: &ImportSuggestion) -> (String, String) {
let variant_path = &suggestion.path;
let variant_path_string = path_names_to_string(variant_path);
#[derive(Copy, Clone, Debug)]
enum CrateLint {
- /// Do not issue the lint
+ /// Do not issue the lint.
No,
- /// This lint applies to some random path like `impl ::foo::Bar`
- /// or whatever. In this case, we can take the span of that path.
+ /// This lint applies to some arbitrary path; e.g., `impl ::foo::Bar`.
+ /// In this case, we can take the span of that path.
SimplePath(NodeId),
/// This lint comes from a `use` statement. In this case, what we
use syntax::visit::Visitor;
use syntax::util::lev_distance::find_best_match_for_name;
use syntax_pos::{Span, DUMMY_SP};
-use crate::errors::Applicability;
+use errors::Applicability;
use std::cell::Cell;
use std::{mem, ptr};
#[derive(Clone, Debug)]
pub struct InvocationData<'a> {
def_index: DefIndex,
- /// Module in which the macro was invoked.
+ /// The module in which the macro was invoked.
crate module: Cell<Module<'a>>,
- /// Legacy scope in which the macro was invoked.
+ /// The legacy scope in which the macro was invoked.
/// The invocation path is resolved in this scope.
crate parent_legacy_scope: Cell<LegacyScope<'a>>,
- /// Legacy scope *produced* by expanding this macro invocation,
+ /// The legacy scope *produced* by expanding this macro invocation,
/// includes all the macro_rules items, other invocations, etc generated by it.
/// `None` if the macro is not expanded yet.
crate output_legacy_scope: Cell<Option<LegacyScope<'a>>>,
ident: Ident,
}
-/// Scope introduced by a `macro_rules!` macro.
-/// Starts at the macro's definition and ends at the end of the macro's parent module
-/// (named or unnamed), or even further if it escapes with `#[macro_use]`.
+/// The scope introduced by a `macro_rules!` macro.
+/// This starts at the macro's definition and ends at the end of the macro's parent
+/// module (named or unnamed), or even further if it escapes with `#[macro_use]`.
/// Some macro invocations need to introduce legacy scopes too because they
-/// potentially can expand into macro definitions.
+/// can potentially expand into macro definitions.
#[derive(Copy, Clone, Debug)]
pub enum LegacyScope<'a> {
- /// Created when invocation data is allocated in the arena,
+ /// Created when invocation data is allocated in the arena;
/// must be replaced with a proper scope later.
Uninitialized,
/// Empty "root" scope at the crate start containing no names.
Empty,
- /// Scope introduced by a `macro_rules!` macro definition.
+ /// The scope introduced by a `macro_rules!` macro definition.
Binding(&'a LegacyBinding<'a>),
- /// Scope introduced by a macro invocation that can potentially
+ /// The scope introduced by a macro invocation that can potentially
/// create a `macro_rules!` macro definition.
Invocation(&'a InvocationData<'a>),
}
/// One import directive.
#[derive(Debug,Clone)]
crate struct ImportDirective<'a> {
- /// The id of the `extern crate`, `UseTree` etc that imported this `ImportDirective`.
+ /// The ID of the `extern crate`, `UseTree` etc that imported this `ImportDirective`.
///
/// In the case where the `ImportDirective` was expanded from a "nested" use tree,
- /// this id is the id of the leaf tree. For example:
+ /// this id is the ID of the leaf tree. For example:
///
/// ```ignore (pacify the mercilous tidy)
/// use foo::bar::{a, b}
/// ```
///
- /// If this is the import directive for `foo::bar::a`, we would have the id of the `UseTree`
+ /// If this is the import directive for `foo::bar::a`, we would have the ID of the `UseTree`
/// for `a` in this field.
pub id: NodeId,
/// The `id` of the "root" use-kind -- this is always the same as
/// `id` except in the case of "nested" use trees, in which case
/// it will be the `id` of the root use tree. e.g., in the example
- /// from `id`, this would be the id of the `use foo::bar`
+ /// from `id`, this would be the ID of the `use foo::bar`
/// `UseTree` node.
pub root_id: NodeId,
//! Write the output of rustc's analysis to an implementor of Dump.
//!
//! Dumping the analysis is implemented by walking the AST and getting a bunch of
-//! info out from all over the place. We use Def IDs to identify objects. The
+//! info out from all over the place. We use `DefId`s to identify objects. The
//! tricky part is getting syntactic (span, source text) and semantic (reference
-//! Def IDs) information for parts of expressions which the compiler has discarded.
+//! `DefId`s) information for parts of expressions which the compiler has discarded.
//! E.g., in a path `foo::bar::baz`, the compiler only keeps a span for the whole
//! path and a reference to `baz`, but we want spans and references for all three
//! idents.
for (id, ident, ..) in collector.collected_idents {
let hir_id = self.tcx.hir().node_to_hir_id(id);
- let typ = match self.save_ctxt.tables.node_id_to_type_opt(hir_id) {
+ let typ = match self.save_ctxt.tables.node_type_opt(hir_id) {
Some(s) => s.to_string(),
None => continue,
};
PatKind::Struct(ref _path, ref fields, _) => {
// FIXME do something with _path?
let hir_id = self.tcx.hir().node_to_hir_id(p.id);
- let adt = match self.save_ctxt.tables.node_id_to_type_opt(hir_id) {
+ let adt = match self.save_ctxt.tables.node_type_opt(hir_id) {
Some(ty) => ty.ty_adt_def().unwrap(),
None => {
visit::walk_pat(self, p);
let hir_id = self.tcx.hir().node_to_hir_id(id);
let typ = self.save_ctxt
.tables
- .node_id_to_type_opt(hir_id)
+ .node_type_opt(hir_id)
.map(|t| t.to_string())
.unwrap_or_default();
value.push_str(": ");
_ => String::new(),
};
let hir_id = self.tcx.hir().node_to_hir_id(id);
- let typ = match self.save_ctxt.tables.node_id_to_type_opt(hir_id) {
+ let typ = match self.save_ctxt.tables.node_type_opt(hir_id) {
Some(typ) => {
let typ = typ.to_string();
if !value.is_empty() {
}
}
- /// Extract macro use and definition information from the AST node defined
+ /// Extracts macro use and definition information from the AST node defined
/// by the given NodeId, using the expansion information from the node's
/// span.
///
///
/// A use tree is an import that may contain nested braces (RFC 2128). The `use_tree` parameter
/// is the current use tree under scrutiny, while `id` and `prefix` are its corresponding node
- /// id and path. `root_item` is the topmost use tree in the hierarchy.
+ /// ID and path. `root_item` is the topmost use tree in the hierarchy.
///
/// If `use_tree` is a simple or glob import, it is dumped into the analysis data. Otherwise,
/// each child use tree is dumped recursively.
Some(def) if def != HirDef::Err => def,
_ => self.get_path_def(self.tcx.hir().get_parent_node(id)),
}
- },
+ }
+
Node::Expr(&hir::Expr {
node: hir::ExprKind::Struct(ref qpath, ..),
..
- }) |
+ }) => {
+ let hir_id = self.tcx.hir().node_to_hir_id(id);
+ self.tables.qpath_def(qpath, hir_id)
+ }
+
Node::Expr(&hir::Expr {
node: hir::ExprKind::Path(ref qpath),
..
/// Attempt to return MacroRef for any AST node.
///
/// For a given piece of AST defined by the supplied Span and NodeId,
- /// returns None if the node is not macro-generated or the span is malformed,
+ /// returns `None` if the node is not macro-generated or the span is malformed,
/// else uses the expansion callsite and callee to return some MacroRef.
pub fn get_macro_use_data(&self, span: Span) -> Option<MacroRef> {
if !generated_code(span) {
pub unit: Reg,
/// The total size of the argument, which can be:
- /// * equal to `unit.size` (one scalar/vector)
- /// * a multiple of `unit.size` (an array of scalar/vectors)
+ /// * equal to `unit.size` (one scalar/vector),
+ /// * a multiple of `unit.size` (an array of scalar/vectors),
/// * if `unit.kind` is `Integer`, the last element
/// can be shorter, i.e., `{ i64, i64, i32 }` for
- /// 64-bit integers with a total size of 20 bytes
+ /// 64-bit integers with a total size of 20 bytes.
pub total: Size,
}
}
}
-/// Return value from the `homogeneous_aggregate` test function.
+/// Returns value from the `homogeneous_aggregate` test function.
#[derive(Copy, Clone, Debug)]
pub enum HomogeneousAggregate {
/// Yes, all the "leaf fields" of this struct are passed in the
}
}
- /// True if this layout is an aggregate containing fields of only
+ /// Returns `true` if this layout is an aggregate containing fields of only
/// a single type (e.g., `(u32, u32)`). Such aggregates are often
/// special-cased in ABIs.
///
/// Note: We generally ignore fields of zero-sized type when computing
- /// this value (cc #56877).
+ /// this value (see #56877).
///
/// This is public so that it can be used in unit tests, but
/// should generally only be relevant to the ABI details of
Ok(dl)
}
- /// Return exclusive upper bound on object size.
+ /// Returns exclusive upper bound on object size.
///
/// The theoretical maximum object size is defined as the maximum positive `isize` value.
/// This ensures that the `offset` semantics remain well-defined by allowing it to correctly
self.bytes() * 8
}
- /// Compute the best alignment possible for the given offset
+ /// Computes the best alignment possible for the given offset
/// (the largest power of two that the offset is a multiple of).
///
/// N.B., for an offset of `0`, this happens to return `2^64`.
}
}
- /// Find the smallest Integer type which can represent the signed value.
+ /// Finds the smallest Integer type which can represent the signed value.
pub fn fit_signed(x: i128) -> Integer {
match x {
-0x0000_0000_0000_0080..=0x0000_0000_0000_007f => I8,
}
}
- /// Find the smallest Integer type which can represent the unsigned value.
+ /// Finds the smallest Integer type which can represent the unsigned value.
pub fn fit_unsigned(x: u128) -> Integer {
match x {
0..=0x0000_0000_0000_00ff => I8,
}
}
- /// Find the smallest integer with the given alignment.
+ /// Finds the smallest integer with the given alignment.
pub fn for_align<C: HasDataLayout>(cx: &C, wanted: Align) -> Option<Integer> {
let dl = cx.data_layout();
}
}
- /// Get source indices of the fields by increasing offsets.
+ /// Gets source indices of the fields by increasing offsets.
#[inline]
pub fn index_by_increasing_offset<'a>(&'a self) -> impl Iterator<Item=usize>+'a {
let mut inverse_small = [0u8; 64];
}
impl Abi {
- /// Returns true if the layout corresponds to an unsized type.
+ /// Returns `true` if the layout corresponds to an unsized type.
pub fn is_unsized(&self) -> bool {
match *self {
Abi::Uninhabited |
}
}
- /// Returns true if this is a single signed integer scalar
+ /// Returns `true` if this is a single signed integer scalar
pub fn is_signed(&self) -> bool {
match *self {
Abi::Scalar(ref scal) => match scal.value {
}
}
- /// Returns true if this is an uninhabited type
+ /// Returns `true` if this is an uninhabited type
pub fn is_uninhabited(&self) -> bool {
match *self {
Abi::Uninhabited => true,
}
impl<'a, Ty> TyLayout<'a, Ty> {
- /// Returns true if the layout corresponds to an unsized type.
+ /// Returns `true` if the layout corresponds to an unsized type.
pub fn is_unsized(&self) -> bool {
self.abi.is_unsized()
}
- /// Returns true if the type is a ZST and not unsized.
+ /// Returns `true` if the type is a ZST and not unsized.
pub fn is_zst(&self) -> bool {
match self.abi {
Abi::Scalar(_) |
//! compiler 'backend', though LLVM is rustc's backend, so rustc_target
//! is really just odds-and-ends relating to code gen and linking.
//! This crate mostly exists to make rustc smaller, so we might put
-//! more 'stuff' here in the future. It does not have a dependency on
+//! more 'stuff' here in the future. It does not have a dependency on
//! LLVM.
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
pub pre_link_objects_exe_crt: Vec<String>, // ... when linking an executable with a bundled crt
pub pre_link_objects_dll: Vec<String>, // ... when linking a dylib
/// Linker arguments that are unconditionally passed after any
- /// user-defined but before post_link_objects. Standard platform
+ /// user-defined but before post_link_objects. Standard platform
/// libraries that should be always be linked to, usually go here.
pub late_link_args: LinkArgs,
/// Objects to link after all others, always found within the
pub allow_asm: bool,
/// Whether the target uses a custom unwind resumption routine.
/// By default LLVM lowers `resume` instructions into calls to `_Unwind_Resume`
- /// defined in libgcc. If this option is enabled, the target must provide
+ /// defined in libgcc. If this option is enabled, the target must provide
/// `eh_unwind_resume` lang item.
pub custom_unwind_resume: bool,
/// for this target unconditionally.
pub no_builtins: bool,
- /// Whether to lower 128-bit operations to compiler_builtins calls. Use if
+ /// Whether to lower 128-bit operations to compiler_builtins calls. Use if
/// your backend only supports 64-bit and smaller math.
pub i128_lowering: bool,
}
impl Default for TargetOptions {
- /// Create a set of "sane defaults" for any target. This is still
+ /// Creates a set of "sane defaults" for any target. This is still
/// incomplete, and if used for compilation, will certainly not work.
fn default() -> TargetOptions {
TargetOptions {
abi.generic() || !self.options.abi_blacklist.contains(&abi)
}
- /// Load a target descriptor from a JSON object.
+ /// Loads a target descriptor from a JSON object.
pub fn from_json(obj: Json) -> TargetResult {
// While ugly, this code must remain this way to retain
// compatibility with existing JSON fields and the internal
// UEFI uses COFF/PE32+ format for binaries. All binaries must be statically linked. No dynamic
// linker is supported. As native to COFF, binaries are position-dependent, but will be relocated
// by the loader if the pre-chosen memory location is already in use.
-// UEFI forbids running code on anything but the boot-CPU. Not interrupts are allowed other than
+// UEFI forbids running code on anything but the boot-CPU. No interrupts are allowed other than
// the timer-interrupt. Device-drivers are required to use polling-based models. Furthermore, all
// code runs in the same environment, no process separation is supported.
"/NOLOGO".to_string(),
// UEFI is fully compatible to non-executable data pages. Tell the compiler that
- // non-code sections can be marked as non-executable, including stack pages.
+ // non-code sections can be marked as non-executable, including stack pages. In fact,
+ // firmware might enforce this, so we better let the linker know about this, so it
+ // will fail if the compiler ever tries placing code on the stack (e.g., trampoline
+ // constructs and alike).
"/NXCOMPAT".to_string(),
// There is no runtime for UEFI targets, prevent them from being linked. UEFI targets
base.cpu = "x86-64".to_string();
base.max_atomic_width = Some(64);
- // We disable MMX and SSE for now. UEFI does not prevent these from being used, but there have
- // been reports to GRUB that some firmware does not initialize the FP exception handlers
- // properly. Therefore, using FP coprocessors will end you up at random memory locations when
- // you throw FP exceptions.
- // To be safe, we disable them for now and force soft-float. This can be revisited when we
- // have more test coverage. Disabling FP served GRUB well so far, so it should be good for us
- // as well.
+ // We disable MMX and SSE for now, even though UEFI allows using them. Problem is, you have to
+ // enable these CPU features explicitly before their first use, otherwise their instructions
+ // will trigger an exception. Rust does not inject any code that enables AVX/MMX/SSE
+ // instruction sets, so this must be done by the firmware. However, existing firmware is known
+ // to leave these uninitialized, thus triggering exceptions if we make use of them. Which is
+ // why we avoid them and instead use soft-floats. This is also what GRUB and friends did so
+ // far.
+ // If you initialize FP units yourself, you can override these flags with custom linker
+ // arguments, thus giving you access to full MMX/SSE acceleration.
base.features = "-mmx,-sse,+soft-float".to_string();
// UEFI systems run without a host OS, hence we cannot assume any code locality. We must tell
// places no locality-restrictions, so it fits well here.
base.code_model = Some("large".to_string());
- // UEFI mostly mirrors the calling-conventions used on windows. In case of x86-64 this means
- // small structs will be returned as int. This shouldn't matter much, since the restrictions
- // placed by the UEFI specifications forbid any ABI to return structures.
+ // UEFI mirrors the calling-conventions used on windows. In case of x86-64 this means small
+ // structs will be returned as int. This shouldn't matter much, since the restrictions placed
+ // by the UEFI specifications forbid any ABI to return structures.
base.abi_return_struct_as_int = true;
Ok(Target {
}
impl context::ContextOps<ChalkArenas<'gcx>> for ChalkContext<'cx, 'gcx> {
- /// True if this is a coinductive goal: basically proving that an auto trait
+ /// Returns `true` if this is a coinductive goal: basically proving that an auto trait
/// is implemented or proving that a trait reference is well-formed.
fn is_coinductive(
&self,
}
}
- /// Create an inference table for processing a new goal and instantiate that goal
+ /// Creates an inference table for processing a new goal and instantiate that goal
/// in that context, returning "all the pieces".
///
/// More specifically: given a u-canonical goal `arg`, creates a
/// each bound variable in `arg` to a fresh inference variable
/// from T. Returns:
///
- /// - the table `T`
- /// - the substitution `S`
- /// - the environment and goal found by substitution `S` into `arg`
+ /// - the table `T`,
+ /// - the substitution `S`,
+ /// - the environment and goal found by substitution `S` into `arg`.
fn instantiate_ucanonical_goal<R>(
&self,
arg: &Canonical<'gcx, InEnvironment<'gcx, Goal<'gcx>>>,
})
}
- /// True if this solution has no region constraints.
+ /// Returns `true` if this solution has no region constraints.
fn empty_constraints(ccs: &Canonical<'gcx, ConstrainedSubst<'gcx>>) -> bool {
ccs.value.constraints.is_empty()
}
)
}
-/// Return a set of constraints that needs to be satisfied in
+/// Returns a set of constraints that needs to be satisfied in
/// order for `ty` to be valid for destruction.
fn dtorck_constraint_for_ty<'a, 'gcx, 'tcx>(
tcx: TyCtxt<'a, 'gcx, 'tcx>,
//! Provider for the `implied_outlives_bounds` query.
//! Do not call this query directory. See [`rustc::traits::query::implied_outlives_bounds`].
+use rustc::hir;
use rustc::infer::InferCtxt;
use rustc::infer::canonical::{self, Canonical};
use rustc::traits::{TraitEngine, TraitEngineExt};
use rustc::ty::query::Providers;
use rustc::ty::wf;
use smallvec::{SmallVec, smallvec};
-use syntax::ast::DUMMY_NODE_ID;
use syntax::source_map::DUMMY_SP;
use rustc::traits::FulfillmentContext;
// unresolved inference variables here anyway, but there might be
// during typeck under some circumstances.)
let obligations =
- wf::obligations(infcx, param_env, DUMMY_NODE_ID, ty, DUMMY_SP).unwrap_or(vec![]);
+ wf::obligations(infcx, param_env, hir::DUMMY_HIR_ID, ty, DUMMY_SP).unwrap_or(vec![]);
// N.B., all of these predicates *ought* to be easily proven
// true. In fact, their correctness is (mostly) implied by
+use rustc::hir;
use rustc::infer::canonical::{Canonical, QueryResponse};
use rustc::traits::query::{normalize::NormalizationResult, CanonicalProjectionGoal, NoSolution};
use rustc::traits::{self, ObligationCause, SelectionContext, TraitEngineExt};
use rustc::ty::{ParamEnvAnd, TyCtxt};
use rustc_data_structures::sync::Lrc;
use std::sync::atomic::Ordering;
-use syntax::ast::DUMMY_NODE_ID;
use syntax_pos::DUMMY_SP;
crate fn provide(p: &mut Providers<'_>) {
value: goal,
}| {
let selcx = &mut SelectionContext::new(infcx);
- let cause = ObligationCause::misc(DUMMY_SP, DUMMY_NODE_ID);
+ let cause = ObligationCause::misc(DUMMY_SP, hir::DUMMY_HIR_ID);
let mut obligations = vec![];
let answer = traits::normalize_projection_type(
selcx,
use rustc::infer::at::ToTrace;
use rustc::infer::canonical::{Canonical, QueryResponse};
use rustc::infer::InferCtxt;
+use rustc::hir;
use rustc::hir::def_id::DefId;
use rustc::traits::query::type_op::ascribe_user_type::AscribeUserType;
use rustc::traits::query::type_op::eq::Eq;
};
use rustc_data_structures::sync::Lrc;
use std::fmt;
-use syntax::ast;
use syntax_pos::DUMMY_SP;
crate fn provide(p: &mut Providers<'_>) {
self.infcx
.partially_normalize_associated_types_in(
DUMMY_SP,
- ast::CRATE_NODE_ID,
+ hir::CRATE_HIR_ID,
self.param_env,
&value,
)
impl_trait
}
- /// Check that the correct number of generic arguments have been provided.
+ /// Checks that the correct number of generic arguments have been provided.
/// Used specifically for function calls.
pub fn check_generic_arg_count_for_call(
tcx: TyCtxt,
).0
}
- /// Check that the correct number of generic arguments have been provided.
+ /// Checks that the correct number of generic arguments have been provided.
/// This is used both for datatypes and function calls.
fn check_generic_arg_count(
tcx: TyCtxt,
/// Creates the relevant generic argument substitutions
/// corresponding to a set of generic parameters. This is a
- /// rather complex little function. Let me try to explain the
- /// role of each of its parameters:
+ /// rather complex function. Let us try to explain the role
+ /// of each of its parameters:
///
/// To start, we are given the `def_id` of the thing we are
/// creating the substitutions for, and a partial set of
/// we can append those and move on. Otherwise, it invokes the
/// three callback functions:
///
- /// - `args_for_def_id`: given the def-id `P`, supplies back the
+ /// - `args_for_def_id`: given the `DefId` `P`, supplies back the
/// generic arguments that were given to that parent from within
- /// the path; so e.g., if you have `<T as Foo>::Bar`, the def-id
+ /// the path; so e.g., if you have `<T as Foo>::Bar`, the `DefId`
/// might refer to the trait `Foo`, and the arguments might be
/// `[T]`. The boolean value indicates whether to infer values
/// for arguments whose values were not explicitly provided.
/// bound to a valid trait type. Returns the def_id for the defining trait.
/// The type _cannot_ be a type other than a trait type.
///
- /// If the `projections` argument is `None`, then assoc type bindings like `Foo<T=X>`
+ /// If the `projections` argument is `None`, then assoc type bindings like `Foo<T = X>`
/// are disallowed. Otherwise, they are pushed onto the vector given.
pub fn instantiate_mono_trait_ref(&self,
trait_ref: &hir::TraitRef,
binding.item_name, binding.span)
}?;
+ let hir_ref_id = self.tcx().hir().node_to_hir_id(ref_id);
let (assoc_ident, def_scope) =
- tcx.adjust_ident(binding.item_name, candidate.def_id(), ref_id);
+ tcx.adjust_ident(binding.item_name, candidate.def_id(), hir_ref_id);
let assoc_ty = tcx.associated_items(candidate.def_id()).find(|i| {
i.kind == ty::AssociatedKind::Type && i.ident.modern() == assoc_ident
}).expect("missing associated type");
};
let trait_did = bound.def_id();
- let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_ident, trait_did, ref_id);
+ let hir_ref_id = self.tcx().hir().node_to_hir_id(ref_id);
+ let (assoc_ident, def_scope) = tcx.adjust_ident(assoc_ident, trait_did, hir_ref_id);
let item = tcx.associated_items(trait_did).find(|i| {
Namespace::from(i.kind) == Namespace::Type &&
i.ident.modern() == assoc_ident
CoerceMany::with_coercion_sites(coerce_first, arms)
};
+ let mut other_arms = vec![]; // used only for diagnostics
+ let mut prior_arm_ty = None;
for (i, (arm, pats_diverge)) in arms.iter().zip(all_arm_pats_diverge).enumerate() {
if let Some(ref g) = arm.guard {
self.diverges.set(pats_diverge);
_ => false
};
+ let arm_span = if let hir::ExprKind::Block(ref blk, _) = arm.body.node {
+ // Point at the block expr instead of the entire block
+ blk.expr.as_ref().map(|e| e.span).unwrap_or(arm.body.span)
+ } else {
+ arm.body.span
+ };
if is_if_let_fallback {
let cause = self.cause(expr.span, ObligationCauseCode::IfExpressionWithNoElse);
assert!(arm_ty.is_unit());
coercion.coerce_forced_unit(self, &cause, &mut |_| (), true);
} else {
- let cause = self.cause(expr.span, ObligationCauseCode::MatchExpressionArm {
- arm_span: arm.body.span,
- source: match_src
- });
+ let cause = if i == 0 {
+ // The reason for the first arm to fail is not that the match arms diverge,
+ // but rather that there's a prior obligation that doesn't hold.
+ self.cause(arm_span, ObligationCauseCode::BlockTailExpression(arm.body.id))
+ } else {
+ self.cause(expr.span, ObligationCauseCode::MatchExpressionArm {
+ arm_span,
+ source: match_src,
+ prior_arms: other_arms.clone(),
+ last_ty: prior_arm_ty.unwrap(),
+ })
+ };
coercion.coerce(self, &cause, &arm.body, arm_ty);
}
+ other_arms.push(arm_span);
+ if other_arms.len() > 5 {
+ other_arms.remove(0);
+ }
+ prior_arm_ty = Some(arm_ty);
}
// We won't diverge unless the discriminant or all arms diverge.
use super::{FnCtxt, PlaceOp, Needs};
use super::method::MethodCallee;
+use rustc::hir;
use rustc::infer::{InferCtxt, InferOk};
use rustc::session::DiagnosticMessageId;
use rustc::traits::{self, TraitEngine};
use rustc::ty::adjustment::{Adjustment, Adjust, OverloadedDeref};
use syntax_pos::Span;
-use syntax::ast::{self, Ident};
+use syntax::ast::Ident;
use std::iter;
pub struct Autoderef<'a, 'gcx: 'tcx, 'tcx: 'a> {
infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
steps: Vec<(Ty<'tcx>, AutoderefKind)>,
cur_ty: Ty<'tcx>,
impl<'a, 'gcx, 'tcx> Autoderef<'a, 'gcx, 'tcx> {
pub fn new(infcx: &'a InferCtxt<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
span: Span,
base_ty: Ty<'tcx>)
-> Autoderef<'a, 'gcx, 'tcx>
use rustc::hir;
-/// Check that it is legal to call methods of the trait corresponding
+/// Checks that it is legal to call methods of the trait corresponding
/// to `trait_id` (this only cares about the trait, not the specific
-/// method that is called)
+/// method that is called).
pub fn check_legal_trait_for_method_call(tcx: TyCtxt, span: Span, trait_id: DefId) {
if tcx.lang_items().drop_trait() == Some(trait_id) {
struct_span_err!(tcx.sess, span, E0040, "explicit use of destructor method")
enum CallStep<'tcx> {
Builtin(Ty<'tcx>),
DeferredClosure(ty::FnSig<'tcx>),
- /// e.g., enum variant constructors
+ /// E.g., enum variant constructors.
Overloaded(MethodCallee<'tcx>),
}
CastToBool,
CastToChar,
DifferingKinds,
- /// Cast of thin to fat raw ptr (eg. `*const () as *const [u8]`)
+ /// Cast of thin to fat raw ptr (e.g., `*const () as *const [u8]`).
SizedUnsizedCast,
IllegalCast,
NeedDeref,
}
}
- /// Check a cast, and report an error if one exists. In some cases, this
+ /// Checks a cast, and report an error if one exists. In some cases, this
/// can return Ok and create type errors in the fcx rather than returning
/// directly. coercion-cast is handled in check instead of here.
fn do_check(&self, fcx: &FnCtxt<'a, 'gcx, 'tcx>) -> Result<CastKind, CastError> {
///
/// # Arguments
///
- /// - `expr_def_id`: the def-id of the closure expression
+ /// - `expr_def_id`: the `DefId` of the closure expression
/// - `decl`: the HIR declaration of the closure
/// - `body`: the body of the closure
/// - `expected_sig`: the expected signature (if any). Note that
self.closure_sigs(expr_def_id, body, error_sig)
}
- /// Enforce the user's types against the expectation. See
+ /// Enforce the user's types against the expectation. See
/// `sig_of_closure_with_expectation` for details on the overall
/// strategy.
fn check_supplied_sig_against_expectation(
.liberate_late_bound_regions(expr_def_id, &bound_sig);
let liberated_sig = self.inh.normalize_associated_types_in(
body.value.span,
- body.value.id,
+ body.value.hir_id,
self.param_env,
&liberated_sig,
);
//! # Type Coercion
//!
//! Under certain circumstances we will coerce from one type to another,
-//! for example by auto-borrowing. This occurs in situations where the
+//! for example by auto-borrowing. This occurs in situations where the
//! compiler has a firm 'expected type' that was supplied from the user,
//! and where the actual type is similar to that expected type in purpose
//! but not in representation (so actual subtyping is inappropriate).
//! ## Reborrowing
//!
//! Note that if we are expecting a reference, we will *reborrow*
-//! even if the argument provided was already a reference. This is
+//! even if the argument provided was already a reference. This is
//! useful for freezing mut/const things (that is, when the expected is &T
//! but you have &const T or &mut T) and also for avoiding the linearity
-//! of mut things (when the expected is &mut T and you have &mut T). See
+//! of mut things (when the expected is &mut T and you have &mut T). See
//! the various `src/test/run-pass/coerce-reborrow-*.rs` tests for
//! examples of where this is useful.
//!
//! ## Subtle note
//!
//! When deciding what type coercions to consider, we do not attempt to
-//! resolve any type variables we may encounter. This is because `b`
+//! resolve any type variables we may encounter. This is because `b`
//! represents the expected type "as the user wrote it", meaning that if
//! the user defined a generic function like
//!
//! fn foo<A>(a: A, b: A) { ... }
//!
//! and then we wrote `foo(&1, @2)`, we will not auto-borrow
-//! either argument. In older code we went to some lengths to
+//! either argument. In older code we went to some lengths to
//! resolve the `b` variable, which could mean that we'd
//! auto-borrow later arguments but not earlier ones, which
//! seems very confusing.
//! foo::<&int>(@1, @2)
//!
//! then we *will* auto-borrow, because we can't distinguish this from a
-//! function that declared `&int`. This is inconsistent but it's easiest
+//! function that declared `&int`. This is inconsistent but it's easiest
//! at the moment. The right thing to do, I think, is to consider the
//! *unsubstituted* type when deciding whether to auto-borrow, but the
//! *substituted* type when considering the bounds and so forth. But most
//! of our methods don't give access to the unsubstituted type, and
-//! rightly so because they'd be error-prone. So maybe the thing to do is
+//! rightly so because they'd be error-prone. So maybe the thing to do is
//! to actually determine the kind of coercions that should occur
-//! separately and pass them in. Or maybe it's ok as is. Anyway, it's
-//! sort of a minor point so I've opted to leave it for later---after all
+//! separately and pass them in. Or maybe it's ok as is. Anyway, it's
+//! sort of a minor point so I've opted to leave it for later -- after all,
//! we may want to adjust precisely when coercions occur.
use crate::check::{FnCtxt, Needs};
}
}
- /// Return the "expected type" with which this coercion was
- /// constructed. This represents the "downward propagated" type
+ /// Returns the "expected type" with which this coercion was
+ /// constructed. This represents the "downward propagated" type
/// that was given to us at the start of typing whatever construct
/// we are typing (e.g., the match expression).
///
///
/// # Parameters
///
-/// - impl_m: type of the method we are checking
-/// - impl_m_span: span to use for reporting errors
-/// - trait_m: the method in the trait
-/// - impl_trait_ref: the TraitRef corresponding to the trait implementation
+/// - `impl_m`: type of the method we are checking
+/// - `impl_m_span`: span to use for reporting errors
+/// - `trait_m`: the method in the trait
+/// - `impl_trait_ref`: the TraitRef corresponding to the trait implementation
pub fn compare_impl_method<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
impl_m: &ty::AssociatedItem,
// `ObligationCause` (and the `FnCtxt`). This is what
// `regionck_item` expects.
let impl_m_node_id = tcx.hir().as_local_node_id(impl_m.def_id).unwrap();
+ let impl_m_hir_id = tcx.hir().node_to_hir_id(impl_m_node_id);
let cause = ObligationCause {
span: impl_m_span,
- body_id: impl_m_node_id,
+ body_id: impl_m_hir_id,
code: ObligationCauseCode::CompareImplMethodObligation {
item_name: impl_m.ident.name,
impl_item_def_id: impl_m.def_id,
// Construct trait parameter environment and then shift it into the placeholder viewpoint.
// The key step here is to update the caller_bounds's predicates to be
// the new hybrid bounds we computed.
- let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_node_id);
+ let normalize_cause = traits::ObligationCause::misc(impl_m_span, impl_m_hir_id);
let param_env = ty::ParamEnv::new(
tcx.intern_predicates(&hybrid_preds.predicates),
Reveal::UserFacing,
);
let impl_sig =
inh.normalize_associated_types_in(impl_m_span,
- impl_m_node_id,
+ impl_m_hir_id,
param_env,
&impl_sig);
let impl_fty = tcx.mk_fn_ptr(ty::Binder::bind(impl_sig));
trait_sig.subst(tcx, trait_to_skol_substs);
let trait_sig =
inh.normalize_associated_types_in(impl_m_span,
- impl_m_node_id,
+ impl_m_hir_id,
param_env,
&trait_sig);
let trait_fty = tcx.mk_fn_ptr(ty::Binder::bind(trait_sig));
// Finally, resolve all regions. This catches wily misuses of
// lifetime parameters.
- let fcx = FnCtxt::new(&inh, param_env, impl_m_node_id);
- fcx.regionck_item(impl_m_node_id, impl_m_span, &[]);
+ let fcx = FnCtxt::new(&inh, param_env, impl_m_hir_id);
+ fcx.regionck_item(impl_m_hir_id, impl_m_span, &[]);
Ok(())
})
// Create a parameter environment that represents the implementation's
// method.
let impl_c_node_id = tcx.hir().as_local_node_id(impl_c.def_id).unwrap();
+ let impl_c_hir_id = tcx.hir().node_to_hir_id(impl_c_node_id);
// Compute placeholder form of impl and trait const tys.
let impl_ty = tcx.type_of(impl_c.def_id);
let trait_ty = tcx.type_of(trait_c.def_id).subst(tcx, trait_to_impl_substs);
- let mut cause = ObligationCause::misc(impl_c_span, impl_c_node_id);
+ let mut cause = ObligationCause::misc(impl_c_span, impl_c_hir_id);
// There is no "body" here, so just pass dummy id.
let impl_ty = inh.normalize_associated_types_in(impl_c_span,
- impl_c_node_id,
+ impl_c_hir_id,
param_env,
&impl_ty);
debug!("compare_const_impl: impl_ty={:?}", impl_ty);
let trait_ty = inh.normalize_associated_types_in(impl_c_span,
- impl_c_node_id,
+ impl_c_hir_id,
param_env,
&trait_ty);
return;
}
- let fcx = FnCtxt::new(&inh, param_env, impl_c_node_id);
- fcx.regionck_item(impl_c_node_id, impl_c_span, &[]);
+ let fcx = FnCtxt::new(&inh, param_env, impl_c_hir_id);
+ fcx.regionck_item(impl_c_hir_id, impl_c_span, &[]);
});
}
/// ```
/// opt.map(|arg| { takes_ref(arg) });
/// ```
- fn can_use_as_ref(&self, expr: &hir::Expr) -> Option<(Span, &'static str, String)> {
+ fn can_use_as_ref(
+ &self,
+ expr: &hir::Expr,
+ ) -> Option<(Span, &'static str, String)> {
if let hir::ExprKind::Path(hir::QPath::Resolved(_, ref path)) = expr.node {
if let hir::def::Def::Local(id) = path.def {
let parent = self.tcx.hir().get_parent_node(id);
node: hir::ExprKind::MethodCall(path, span, expr),
..
})), 1) = (self.tcx.hir().find(parent), decl.inputs.len()) {
- let self_ty = self.tables.borrow().node_id_to_type(expr[0].hir_id);
+ let self_ty = self.tables.borrow().node_type(expr[0].hir_id);
let self_ty = format!("{:?}", self_ty);
let name = path.ident.as_str();
let is_as_ref_able = (
self_ty.starts_with("std::option::Option") ||
self_ty.starts_with("std::result::Result")
) && (name == "map" || name == "and_then");
- if is_as_ref_able {
- return Some((span.shrink_to_lo(),
- "consider using `as_ref` instead",
- "as_ref().".into()));
+ match (is_as_ref_able, self.sess().source_map().span_to_snippet(*span)) {
+ (true, Ok(src)) => {
+ return Some((*span, "consider using `as_ref` instead",
+ format!("as_ref().{}", src)));
+ },
+ _ => ()
}
}
}
match expr.node {
// All built-in range literals but `..=` and `..` desugar to Structs
- ExprKind::Struct(QPath::Resolved(None, ref path), _, _) |
+ ExprKind::Struct(ref qpath, _, _) => {
+ if let QPath::Resolved(None, ref path) = **qpath {
+ return is_range_path(&path) && span_is_range_literal(&expr.span);
+ }
+ }
// `..` desugars to its struct path
ExprKind::Path(QPath::Resolved(None, ref path)) => {
return is_range_path(&path) && span_is_range_literal(&expr.span);
use crate::check::regionck::RegionCtxt;
+use crate::hir;
use crate::hir::def_id::DefId;
use rustc::infer::outlives::env::OutlivesEnvironment;
use rustc::infer::{self, InferOk, SuppressRegionErrors};
use rustc::ty::{self, Ty, TyCtxt};
use crate::util::common::ErrorReported;
-use syntax::ast;
use syntax_pos::Span;
-/// check_drop_impl confirms that the Drop implementation identified by
+/// This function confirms that the `Drop` implementation identified by
/// `drop_impl_did` is not any more specialized than the type it is
/// attached to (Issue #8142).
///
/// 1. The self type must be nominal (this is already checked during
/// coherence),
///
-/// 2. The generic region/type parameters of the impl's self-type must
+/// 2. The generic region/type parameters of the impl's self type must
/// all be parameters of the Drop impl itself (i.e., no
/// specialization like `impl Drop for Foo<i32>`), and,
///
drop_impl_ty: Ty<'tcx>,
self_type_did: DefId,
) -> Result<(), ErrorReported> {
- let drop_impl_node_id = tcx.hir().as_local_node_id(drop_impl_did).unwrap();
+ let drop_impl_hir_id = tcx.hir().as_local_hir_id(drop_impl_did).unwrap();
// check that the impl type can be made to match the trait type.
let fresh_impl_substs = infcx.fresh_substs_for_item(drop_impl_span, drop_impl_did);
let fresh_impl_self_ty = drop_impl_ty.subst(tcx, fresh_impl_substs);
- let cause = &ObligationCause::misc(drop_impl_span, drop_impl_node_id);
+ let cause = &ObligationCause::misc(drop_impl_span, drop_impl_hir_id);
match infcx
.at(cause, impl_param_env)
.eq(named_type, fresh_impl_self_ty)
result
}
-/// check_safety_of_destructor_if_necessary confirms that the type
+/// This function confirms that the type
/// expression `typ` conforms to the "Drop Check Rule" from the Sound
-/// Generic Drop (RFC 769).
+/// Generic Drop RFC (#769).
///
/// ----
///
/// expected to break the needed parametricity property beyond
/// repair.)
///
-/// Therefore we have scaled back Drop-Check to a more conservative
+/// Therefore, we have scaled back Drop-Check to a more conservative
/// rule that does not attempt to deduce whether a `Drop`
/// implementation could not possible access data of a given lifetime;
/// instead Drop-Check now simply assumes that if a destructor has
/// this conservative assumption (and thus assume the obligation of
/// ensuring that they do not access data nor invoke methods of
/// values that have been previously dropped).
-///
pub fn check_safety_of_destructor_if_necessary<'a, 'gcx, 'tcx>(
rcx: &mut RegionCtxt<'a, 'gcx, 'tcx>,
ty: Ty<'tcx>,
span: Span,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
scope: region::Scope,
) -> Result<(), ErrorReported> {
debug!("check_safety_of_destructor_if_necessary typ: {:?} scope: {:?}",
safety,
abi
)));
- let cause = ObligationCause::new(it.span, it.id, ObligationCauseCode::IntrinsicType);
+ let cause = ObligationCause::new(it.span, it.hir_id, ObligationCauseCode::IntrinsicType);
require_same_types(tcx, &cause, tcx.mk_fn_ptr(tcx.fn_sig(def_id)), fty);
}
-/// Returns whether the given intrinsic is unsafe to call or not.
+/// Returns `true` if the given intrinsic is unsafe to call or not.
pub fn intrisic_operation_unsafety(intrinsic: &str) -> hir::Unsafety {
match intrinsic {
"size_of" | "min_align_of" | "needs_drop" |
}
}
- /// Add a suggestion to call the given method to the provided diagnostic.
+ /// Adds a suggestion to call the given method to the provided diagnostic.
crate fn suggest_method_call(
&self,
err: &mut DiagnosticBuilder<'a>,
/// `lookup_method_in_trait` is used for overloaded operators.
/// It does a very narrow slice of what the normal probe/confirm path does.
/// In particular, it doesn't really do any probing: it simply constructs
- /// an obligation for a particular trait with the given self-type and checks
+ /// an obligation for a particular trait with the given self type and checks
/// whether that trait is implemented.
- ///
- /// FIXME(#18741): it seems likely that we can consolidate some of this
- /// code with the other method-lookup code. In particular, the second half
- /// of this method is basically the same as confirmation.
+ //
+ // FIXME(#18741): it seems likely that we can consolidate some of this
+ // code with the other method-lookup code. In particular, the second half
+ // of this method is basically the same as confirmation.
pub fn lookup_method_in_trait(&self,
span: Span,
m_name: ast::Ident,
Ok(def)
}
- /// Find item with name `item_name` defined in impl/trait `def_id`
+ /// Finds item with name `item_name` defined in impl/trait `def_id`
/// and return it, or `None`, if no such item was defined there.
pub fn associated_item(&self, def_id: DefId, item_name: ast::Ident, ns: Namespace)
-> Option<ty::AssociatedItem> {
tcx.infer_ctxt().enter_with_canonical(DUMMY_SP, &goal, |ref infcx, goal, inference_vars| {
let ParamEnvAnd { param_env, value: self_ty } = goal;
- let mut autoderef = Autoderef::new(infcx, param_env, ast::DUMMY_NODE_ID, DUMMY_SP, self_ty)
+ let mut autoderef = Autoderef::new(infcx, param_env, hir::DUMMY_HIR_ID, DUMMY_SP, self_ty)
.include_raw_pointers()
.silence_errors();
let mut reached_raw_pointer = false;
stable_pick: &Pick,
unstable_candidates: &[(&Candidate<'tcx>, Symbol)],
) {
- let mut diag = self.tcx.struct_span_lint_node(
+ let mut diag = self.tcx.struct_span_lint_hir(
lint::builtin::UNSTABLE_NAME_COLLISIONS,
self.fcx.body_id,
self.span,
}
}
- /// Get the type of an impl and generate substitutions with placeholders.
+ /// Gets the type of an impl and generate substitutions with placeholders.
fn impl_ty_and_substs(&self, impl_def_id: DefId) -> (Ty<'tcx>, &'tcx Substs<'tcx>) {
(self.tcx.type_of(impl_def_id), self.fresh_item_substs(impl_def_id))
}
})
}
- /// Replace late-bound-regions bound by `value` with `'static` using
+ /// Replaces late-bound-regions bound by `value` with `'static` using
/// `ty::erase_late_bound_regions`.
///
/// This is only a reasonable thing to do during the *probe* phase, not the *confirm* phase, of
self.tcx.erase_late_bound_regions(value)
}
- /// Find the method with the appropriate name (or return type, as the case may be). If
+ /// Finds the method with the appropriate name (or return type, as the case may be). If
/// `allow_similar_names` is set, find methods with close-matching names.
fn impl_or_trait_item(&self, def_id: DefId) -> Vec<ty::AssociatedItem> {
if let Some(name) = self.method_name {
};
let field_ty = field.ty(tcx, substs);
- let scope = self.tcx.hir().get_module_parent(self.body_id);
+ let scope = self.tcx.hir().get_module_parent_by_hir_id(
+ self.body_id);
if field.vis.is_accessible_from(scope, self.tcx) {
if self.is_fn_ty(&field_ty, span) {
err.help(&format!("use `({0}.{1})(...)` if you \
err: &mut DiagnosticBuilder,
mut msg: String,
candidates: Vec<DefId>) {
- let module_did = self.tcx.hir().get_module_parent(self.body_id);
+ let module_did = self.tcx.hir().get_module_parent_by_hir_id(self.body_id);
let module_id = self.tcx.hir().as_local_node_id(module_did).unwrap();
let krate = self.tcx.hir().krate();
let (span, found_use) = UsePlacementFinder::check(self.tcx, krate, module_id);
}
}
-/// Retrieve all traits in this crate and any dependent crates.
+/// Retrieves all traits in this crate and any dependent crates.
pub fn all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<TraitInfo> {
tcx.all_traits(LOCAL_CRATE).iter().map(|&def_id| TraitInfo { def_id }).collect()
}
-/// Compute all traits in this crate and any dependent crates.
+/// Computes all traits in this crate and any dependent crates.
fn compute_all_traits<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Vec<DefId> {
use hir::itemlikevisit;
traits: &mut Vec<DefId>,
external_mods: &mut FxHashSet<DefId>,
def: Def) {
- let def_id = def.def_id();
match def {
- Def::Trait(..) => {
+ Def::Trait(def_id) => {
traits.push(def_id);
}
- Def::Mod(..) => {
+ Def::Mod(def_id) => {
if !external_mods.insert(def_id) {
return;
}
/*!
-# check.rs
+# typeck: check phase
Within the check phase of type check, we check each item one at a time
(bodies of function expressions are checked as part of the containing
revealed_ty: Ty<'tcx>
}
-/// A wrapper for InferCtxt's `in_progress_tables` field.
+/// A wrapper for `InferCtxt`'s `in_progress_tables` field.
#[derive(Copy, Clone)]
struct MaybeInProgressTables<'a, 'tcx: 'a> {
maybe_tables: Option<&'a RefCell<ty::TypeckTables<'tcx>>>,
}
}
-/// closures defined within the function. For example:
+/// Closures defined within the function. For example:
///
/// fn foo() {
/// bar(move|| { ... })
/// This expression is an `if` condition, it must resolve to `bool`.
ExpectIfCondition,
- /// This expression should have the type given (or some subtype)
+ /// This expression should have the type given (or some subtype).
ExpectHasType(Ty<'tcx>),
- /// This expression will be cast to the `Ty`
+ /// This expression will be cast to the `Ty`.
ExpectCastableToType(Ty<'tcx>),
/// This rvalue expression will be wrapped in `&` or `Box` and coerced
}
}
- /// Provide an expectation for an rvalue expression given an *optional*
+ /// Provides an expectation for an rvalue expression given an *optional*
/// hint, which is not required for type safety (the resulting type might
/// be checked higher up, as is the case with `&expr` and `box expr`), but
/// is useful in determining the concrete type.
Always,
/// Same as `Always` but with a reachability
- /// warning already emitted
+ /// warning already emitted.
WarnedAlways
}
}
pub struct FnCtxt<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
- body_id: ast::NodeId,
+ body_id: hir::HirId,
/// The parameter environment used for proving trait obligations
/// in this function. This can change when we descend into
ps: RefCell<UnsafetyState>,
/// Whether the last checked node generates a divergence (e.g.,
- /// `return` will set this to Always). In general, when entering
+ /// `return` will set this to `Always`). In general, when entering
/// an expression or other node in the tree, the initial value
/// indicates whether prior parts of the containing expression may
/// have diverged. It is then typically set to `Maybe` (and the
/// old value remembered) for processing the subparts of the
/// current expression. As each subpart is processed, they may set
- /// the flag to `Always` etc. Finally, at the end, we take the
+ /// the flag to `Always`, etc. Finally, at the end, we take the
/// result and "union" it with the original value, so that when we
/// return the flag indicates if any subpart of the parent
- /// expression (up to and including this part) has diverged. So,
+ /// expression (up to and including this part) has diverged. So,
/// if you read it after evaluating a subexpression `X`, the value
/// you get indicates whether any subexpression that was
/// evaluating up to and including `X` diverged.
/// foo();}` or `{return; 22}`, where we would warn on the
/// `foo()` or `22`.
///
- /// An expression represents dead-code if, after checking it,
+ /// An expression represents dead code if, after checking it,
/// the diverges flag is set to something other than `Maybe`.
diverges: Cell<Diverges>,
}
}
-/// Helper type of a temporary returned by Inherited::build(...).
+/// Helper type of a temporary returned by `Inherited::build(...)`.
/// Necessary because we can't write the following bound:
-/// F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>).
+/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(Inherited<'b, 'gcx, 'tcx>)`.
pub struct InheritedBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
infcx: infer::InferCtxtBuilder<'a, 'gcx, 'tcx>,
def_id: DefId,
fn normalize_associated_types_in<T>(&self,
span: Span,
- body_id: ast::NodeId,
+ body_id: hir::HirId,
param_env: ty::ParamEnv<'tcx>,
value: &T) -> T
where T : TypeFoldable<'tcx>
tcx.calculate_dtor(def_id, &mut dropck::check_drop_impl)
}
-/// If this def-id is a "primary tables entry", returns `Some((body_id, decl))`
+/// If this `DefId` is a "primary tables entry", returns `Some((body_id, decl))`
/// with information about it's body-id and fn-decl (if any). Otherwise,
/// returns `None`.
///
/// If this function returns "some", then `typeck_tables(def_id)` will
/// succeed; if it returns `None`, then `typeck_tables(def_id)` may or
-/// may not succeed. In some cases where this function returns `None`
+/// may not succeed. In some cases where this function returns `None`
/// (notably closures), `typeck_tables(def_id)` would wind up
/// redirecting to the owning function.
fn primary_body_of<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
tcx.liberate_late_bound_regions(def_id, &fn_sig);
let fn_sig =
inh.normalize_associated_types_in(body.value.span,
- body_id.node_id,
+ body_id.hir_id,
param_env,
&fn_sig);
let fcx = check_fn(&inh, param_env, fn_sig, decl, id, body, None).0;
fcx
} else {
- let fcx = FnCtxt::new(&inh, param_env, body.value.id);
+ let fcx = FnCtxt::new(&inh, param_env, body.value.hir_id);
let expected_type = tcx.type_of(def_id);
let expected_type = fcx.normalize_associated_types_in(body.value.span, &expected_type);
fcx.require_type_is_sized(expected_type, body.value.span, traits::ConstSized);
/// Types that are captured (see `GeneratorInterior` for more).
interior: ty::Ty<'tcx>,
- /// Indicates if the generator is movable or static (immovable)
+ /// Indicates if the generator is movable or static (immovable).
movability: hir::GeneratorMovability,
}
// Create the function context. This is either derived from scratch or,
// in the case of closures, based on the outer context.
- let mut fcx = FnCtxt::new(inherited, param_env, body.value.id);
+ let mut fcx = FnCtxt::new(inherited, param_env, body.value.hir_id);
*fcx.ps.borrow_mut() = UnsafetyState::function(fn_sig.unsafety, fn_id);
let declared_ret_ty = fn_sig.output();
let substs = fcx.tcx.mk_substs_trait(declared_ret_ty, &[]);
let trait_ref = ty::TraitRef::new(term_id, substs);
let return_ty_span = decl.output.span();
+ let fn_hir_id = fcx.tcx.hir().node_to_hir_id(fn_id);
let cause = traits::ObligationCause::new(
- return_ty_span, fn_id, ObligationCauseCode::MainFunctionType);
+ return_ty_span, fn_hir_id, ObligationCauseCode::MainFunctionType);
inherited.register_predicate(
traits::Obligation::new(
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
pub fn new(inh: &'a Inherited<'a, 'gcx, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
- body_id: ast::NodeId)
+ body_id: hir::HirId)
-> FnCtxt<'a, 'gcx, 'tcx> {
FnCtxt {
body_id,
self.tcx.sess.err_count() - self.err_count_on_creation
}
- /// Produce warning on the given node, if the current point in the
+ /// Produces warning on the given node, if the current point in the
/// function is unreachable, and there hasn't been another warning.
fn warn_if_unreachable(&self, id: ast::NodeId, span: Span, kind: &str) {
if self.diverges.get() == Diverges::Always {
result
}
- /// Replace the opaque types from the given value with type variables,
+ /// Replaces the opaque types from the given value with type variables,
/// and records the `OpaqueTypeMap` for later use during writeback. See
/// `InferCtxt::instantiate_opaque_types` for more details.
fn instantiate_opaque_types_from_value<T: TypeFoldable<'tcx>>(
}
}
- // Resolve associated value path into a base type and associated constant or method definition.
- // The newly resolved definition is written into `type_dependent_defs`.
+ /// Resolves associated value path into a base type and associated constant or method
+ /// definition. The newly resolved definition is written into `type_dependent_defs`.
pub fn resolve_ty_and_def_ufcs<'b>(&self,
qpath: &'b QPath,
node_id: ast::NodeId,
None
}
- /// Given a function block's `NodeId`, return its `FnDecl` if it exists, or `None` otherwise.
+ /// Given a function block's `NodeId`, returns its `FnDecl` if it exists, or `None` otherwise.
fn get_parent_fn_decl(&self, blk_id: ast::NodeId) -> Option<(hir::FnDecl, ast::Ident)> {
let parent = self.tcx.hir().get(self.tcx.hir().get_parent(blk_id));
self.get_node_fn_decl(parent).map(|(fn_decl, ident, _)| (fn_decl, ident))
})
}
- /// On implicit return expressions with mismatched types, provide the following suggestions:
+ /// On implicit return expressions with mismatched types, provides the following suggestions:
///
- /// - Point out the method's return type as the reason for the expected type
- /// - Possible missing semicolon
- /// - Possible missing return type if the return type is the default, and not `fn main()`
+ /// - Points out the method's return type as the reason for the expected type.
+ /// - Possible missing semicolon.
+ /// - Possible missing return type if the return type is the default, and not `fn main()`.
pub fn suggest_mismatched_types_on_tail(
&self,
err: &mut DiagnosticBuilder<'tcx>,
}
}
- /// A common error is to forget to add a semicolon at the end of a block:
+ /// A common error is to forget to add a semicolon at the end of a block, e.g.,
///
/// ```
/// fn foo() {
query_result)
}
- /// Returns whether an expression is contained inside the LHS of an assignment expression.
+ /// Returns `true` if an expression is contained inside the LHS of an assignment expression.
fn expr_in_place(&self, mut expr_id: ast::NodeId) -> bool {
let mut contained_in_place = false;
use rustc::hir;
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
- /// Check a `a <op>= b`
+ /// Checks a `a <op>= b`
pub fn check_binop_assign(&self,
expr: &'gcx hir::Expr,
op: hir::BinOp,
ty
}
- /// Check a potentially overloaded binary operator.
+ /// Checks a potentially overloaded binary operator.
pub fn check_binop(&self,
expr: &'gcx hir::Expr,
op: hir::BinOp,
Unary(hir::UnOp, Span),
}
-/// Returns true if this is a built-in arithmetic operation (e.g., u32
+/// Returns `true` if this is a built-in arithmetic operation (e.g., u32
/// + u32, i16x4 == i16x4) and false if these types would have to be
/// overloaded to be legal. There are two reasons that we distinguish
/// builtin operations from overloaded ones (vs trying to drive
///
/// 1. Builtin operations can trivially be evaluated in constants.
/// 2. For comparison operators applied to SIMD types the result is
-/// not of type `bool`. For example, `i16x4==i16x4` yields a
+/// not of type `bool`. For example, `i16x4 == i16x4` yields a
/// type like `i16x4`. This means that the overloaded trait
/// `PartialEq` is not applicable.
///
//! The region check is a final pass that runs over the AST after we have
//! inferred the type constraints but before we have actually finalized
-//! the types. Its purpose is to embed a variety of region constraints.
+//! the types. Its purpose is to embed a variety of region constraints.
//! Inserting these constraints as a separate pass is good because (1) it
//! localizes the code that has to do with region inference and (2) often
//! we cannot know what constraints are needed until the basic types have
//! #### Reborrows
//!
//! Generally speaking, `regionck` does NOT try to ensure that the data
-//! `data` will outlive the pointer `x`. That is the job of borrowck. The
+//! `data` will outlive the pointer `x`. That is the job of borrowck. The
//! one exception is when "re-borrowing" the contents of another borrowed
//! pointer. For example, imagine you have a borrowed pointer `b` with
-//! lifetime L1 and you have an expression `&*b`. The result of this
-//! expression will be another borrowed pointer with lifetime L2 (which is
+//! lifetime `L1` and you have an expression `&*b`. The result of this
+//! expression will be another borrowed pointer with lifetime `L2` (which is
//! an inference variable). The borrow checker is going to enforce the
-//! constraint that L2 < L1, because otherwise you are re-borrowing data
-//! for a lifetime larger than the original loan. However, without the
+//! constraint that `L2 < L1`, because otherwise you are re-borrowing data
+//! for a lifetime larger than the original loan. However, without the
//! routines in this module, the region inferencer would not know of this
-//! dependency and thus it might infer the lifetime of L2 to be greater
-//! than L1 (issue #3148).
+//! dependency and thus it might infer the lifetime of `L2` to be greater
+//! than `L1` (issue #3148).
//!
//! There are a number of troublesome scenarios in the tests
//! `region-dependent-*.rs`, but here is one example:
//!
//! The key point here is that when you are borrowing a value that
//! is "guaranteed" by a borrowed pointer, you must link the
-//! lifetime of that borrowed pointer (L1, here) to the lifetime of
-//! the borrow itself (L2). What do I mean by "guaranteed" by a
+//! lifetime of that borrowed pointer (`L1`, here) to the lifetime of
+//! the borrow itself (`L2`). What do I mean by "guaranteed" by a
//! borrowed pointer? I mean any data that is reached by first
//! dereferencing a borrowed pointer and then either traversing
-//! interior offsets or boxes. We say that the guarantor
+//! interior offsets or boxes. We say that the guarantor
//! of such data is the region of the borrowed pointer that was
-//! traversed. This is essentially the same as the ownership
+//! traversed. This is essentially the same as the ownership
//! relation, except that a borrowed pointer never owns its
//! contents.
impl<'a, 'gcx, 'tcx> FnCtxt<'a, 'gcx, 'tcx> {
pub fn regionck_expr(&self, body: &'gcx hir::Body) {
let subject = self.tcx.hir().body_owner_def_id(body.id());
- let id = body.value.id;
+ let id = body.value.hir_id;
let mut rcx = RegionCtxt::new(
self,
RepeatingScope(id),
/// Region checking during the WF phase for items. `wf_tys` are the
/// types from which we should derive implied bounds, if any.
- pub fn regionck_item(&self, item_id: ast::NodeId, span: Span, wf_tys: &[Ty<'tcx>]) {
+ pub fn regionck_item(&self, item_id: hir::HirId, span: Span, wf_tys: &[Ty<'tcx>]) {
debug!("regionck_item(item.id={:?}, wf_tys={:?})", item_id, wf_tys);
- let subject = self.tcx.hir().local_def_id(item_id);
+ let subject = self.tcx.hir().local_def_id_from_hir_id(item_id);
let mut rcx = RegionCtxt::new(
self,
RepeatingScope(item_id),
pub fn regionck_fn(&self, fn_id: ast::NodeId, body: &'gcx hir::Body) {
debug!("regionck_fn(id={})", fn_id);
let subject = self.tcx.hir().body_owner_def_id(body.id());
- let node_id = body.value.id;
+ let hir_id = body.value.hir_id;
let mut rcx = RegionCtxt::new(
self,
- RepeatingScope(node_id),
- node_id,
+ RepeatingScope(hir_id),
+ hir_id,
Subject(subject),
self.param_env,
);
if self.err_count_since_creation() == 0 {
+ let fn_hir_id = self.tcx.hir().node_to_hir_id(fn_id);
// regionck assumes typeck succeeded
- rcx.visit_fn_body(fn_id, body, self.tcx.hir().span(fn_id));
+ rcx.visit_fn_body(fn_hir_id, body, self.tcx.hir().span_by_hir_id(fn_hir_id));
}
rcx.resolve_regions_and_report_errors(SuppressRegionErrors::when_nll_is_enabled(self.tcx));
outlives_environment: OutlivesEnvironment<'tcx>,
// id of innermost fn body id
- body_id: ast::NodeId,
+ body_id: hir::HirId,
// call_site scope of innermost fn
call_site_scope: Option<region::Scope>,
// id of innermost fn or loop
- repeating_scope: ast::NodeId,
+ repeating_scope: hir::HirId,
// id of AST node being analyzed (the subject of the analysis).
subject_def_id: DefId,
}
}
-pub struct RepeatingScope(ast::NodeId);
+pub struct RepeatingScope(hir::HirId);
pub struct Subject(DefId);
impl<'a, 'gcx, 'tcx> RegionCtxt<'a, 'gcx, 'tcx> {
pub fn new(
fcx: &'a FnCtxt<'a, 'gcx, 'tcx>,
RepeatingScope(initial_repeating_scope): RepeatingScope,
- initial_body_id: ast::NodeId,
+ initial_body_id: hir::HirId,
Subject(subject): Subject,
param_env: ty::ParamEnv<'tcx>,
) -> RegionCtxt<'a, 'gcx, 'tcx> {
}
}
- fn set_repeating_scope(&mut self, scope: ast::NodeId) -> ast::NodeId {
+ fn set_repeating_scope(&mut self, scope: hir::HirId) -> hir::HirId {
mem::replace(&mut self.repeating_scope, scope)
}
- /// Try to resolve the type for the given node, returning t_err if an error results. Note that
+ /// Try to resolve the type for the given node, returning `t_err` if an error results. Note that
/// we never care about the details of the error, the same error will be detected and reported
/// in the writeback phase.
///
- /// Note one important point: we do not attempt to resolve *region variables* here. This is
+ /// Note one important point: we do not attempt to resolve *region variables* here. This is
/// because regionck is essentially adding constraints to those region variables and so may yet
/// influence how they are resolved.
///
/// }
/// ```
///
- /// Here, the region of `b` will be `<R0>`. `<R0>` is constrained to be some subregion of the
- /// block B and some superregion of the call. If we forced it now, we'd choose the smaller
- /// region (the call). But that would make the *b illegal. Since we don't resolve, the type
+ /// Here, the region of `b` will be `<R0>`. `<R0>` is constrained to be some subregion of the
+ /// block B and some superregion of the call. If we forced it now, we'd choose the smaller
+ /// region (the call). But that would make the *b illegal. Since we don't resolve, the type
/// of b will be `&<R0>.i32` and then `*b` will require that `<R0>` be bigger than the let and
/// the `*b` expression, so we will effectively resolve `<R0>` to be the block B.
pub fn resolve_type(&self, unresolved_ty: Ty<'tcx>) -> Ty<'tcx> {
/// `intravisit::Visitor` impl below.)
fn visit_fn_body(
&mut self,
- id: ast::NodeId, // the id of the fn itself
+ id: hir::HirId, // the id of the fn itself
body: &'gcx hir::Body,
span: Span,
) {
// When we enter a function, we can derive
- debug!("visit_fn_body(id={})", id);
+ debug!("visit_fn_body(id={:?})", id);
let body_id = body.id();
- self.body_id = body_id.node_id;
+ self.body_id = body_id.hir_id;
let call_site = region::Scope {
id: body.value.hir_id.local_id,
self.call_site_scope = Some(call_site);
let fn_sig = {
- let fn_hir_id = self.tcx.hir().node_to_hir_id(id);
- match self.tables.borrow().liberated_fn_sigs().get(fn_hir_id) {
+ match self.tables.borrow().liberated_fn_sigs().get(id) {
Some(f) => f.clone(),
None => {
- bug!("No fn-sig entry for id={}", id);
+ bug!("No fn-sig entry for id={:?}", id);
}
}
};
self.outlives_environment.add_implied_bounds(
self.fcx,
&fn_sig_tys[..],
- body_id.node_id,
+ body_id.hir_id,
span,
);
self.outlives_environment
- .save_implied_bounds(body_id.node_id);
+ .save_implied_bounds(body_id.hir_id);
self.link_fn_args(
region::Scope {
id: body.value.hir_id.local_id,
&body.arguments,
);
self.visit_body(body);
- self.visit_region_obligations(body_id.node_id);
+ self.visit_region_obligations(body_id.hir_id);
let call_site_scope = self.call_site_scope.unwrap();
debug!(
);
let call_site_region = self.tcx.mk_region(ty::ReScope(call_site_scope));
- let body_hir_id = self.tcx.hir().node_to_hir_id(body_id.node_id);
- self.type_of_node_must_outlive(infer::CallReturn(span), body_hir_id, call_site_region);
+ self.type_of_node_must_outlive(infer::CallReturn(span), body_id.hir_id, call_site_region);
self.constrain_opaque_types(
&self.fcx.opaque_types.borrow(),
);
}
- fn visit_region_obligations(&mut self, node_id: ast::NodeId) {
- debug!("visit_region_obligations: node_id={}", node_id);
+ fn visit_region_obligations(&mut self, hir_id: hir::HirId) {
+ debug!("visit_region_obligations: hir_id={:?}", hir_id);
// region checking can introduce new pending obligations
// which, when processed, might generate new region
let env_snapshot = self.outlives_environment.push_snapshot_pre_closure();
let body = self.tcx.hir().body(body_id);
- self.visit_fn_body(id, body, span);
+ let hir_id = self.tcx.hir().node_to_hir_id(id);
+ self.visit_fn_body(hir_id, body, span);
// Restore state from previous function.
self.outlives_environment
fn visit_expr(&mut self, expr: &'gcx hir::Expr) {
debug!(
- "regionck::visit_expr(e={:?}, repeating_scope={})",
+ "regionck::visit_expr(e={:?}, repeating_scope={:?})",
expr, self.repeating_scope
);
}
debug!(
- "regionck::visit_expr(e={:?}, repeating_scope={}) - visiting subexprs",
+ "regionck::visit_expr(e={:?}, repeating_scope={:?}) - visiting subexprs",
expr, self.repeating_scope
);
match expr.node {
}
hir::ExprKind::Loop(ref body, _, _) => {
- let repeating_scope = self.set_repeating_scope(body.id);
+ let repeating_scope = self.set_repeating_scope(body.hir_id);
intravisit::walk_expr(self, expr);
self.set_repeating_scope(repeating_scope);
}
hir::ExprKind::While(ref cond, ref body, _) => {
- let repeating_scope = self.set_repeating_scope(cond.id);
+ let repeating_scope = self.set_repeating_scope(cond.hir_id);
self.visit_expr(&cond);
- self.set_repeating_scope(body.id);
+ self.set_repeating_scope(body.hir_id);
self.visit_block(&body);
self.set_repeating_scope(repeating_scope);
}
fn check_expr_fn_block(&mut self, expr: &'gcx hir::Expr, body_id: hir::BodyId) {
- let repeating_scope = self.set_repeating_scope(body_id.node_id);
+ let repeating_scope = self.set_repeating_scope(body_id.hir_id);
intravisit::walk_expr(self, expr);
self.set_repeating_scope(repeating_scope);
}
}
}
- /// Create a temporary `MemCategorizationContext` and pass it to the closure.
+ /// Creates a temporary `MemCategorizationContext` and pass it to the closure.
fn with_mc<F, R>(&self, f: F) -> R
where
F: for<'b> FnOnce(mc::MemCategorizationContext<'b, 'gcx, 'tcx>) -> R,
/// `F: for<'b, 'tcx> where 'gcx: 'tcx FnOnce(FnCtxt<'b, 'gcx, 'tcx>)`.
struct CheckWfFcxBuilder<'a, 'gcx: 'a+'tcx, 'tcx: 'a> {
inherited: super::InheritedBuilder<'a, 'gcx, 'tcx>,
- id: ast::NodeId,
+ id: hir::HirId,
span: Span,
param_env: ty::ParamEnv<'tcx>,
}
fn for_id<'a, 'gcx, 'tcx>(tcx: TyCtxt<'a, 'gcx, 'gcx>, id: ast::NodeId, span: Span)
-> CheckWfFcxBuilder<'a, 'gcx, 'tcx> {
let def_id = tcx.hir().local_def_id(id);
+ let hir_id = tcx.hir().node_to_hir_id(id);
CheckWfFcxBuilder {
inherited: Inherited::build(tcx, def_id),
- id,
+ id: hir_id,
span,
param_env: tcx.param_env(def_id),
}
});
}
-/// Checks where clauses and inline bounds that are declared on def_id.
+/// Checks where-clauses and inline bounds that are declared on `def_id`.
fn check_where_clauses<'a, 'gcx, 'fcx, 'tcx>(
tcx: TyCtxt<'a, 'gcx, 'gcx>,
fcx: &FnCtxt<'fcx, 'gcx, 'tcx>,
/// through a `*const/mut T` raw pointer. If the feature is not enabled, the requirements are more
/// strict: `receiver_ty` must implement `Receiver` and directly implement `Deref<Target=self_ty>`.
///
-/// NB: there are cases this function returns `true` but causes an error to be emitted,
+/// N.B., there are cases this function returns `true` but causes an error to be emitted,
/// particularly when `receiver_ty` derefs to a type that is the same as `self_ty` but has the
/// wrong lifetime. Be careful of this if you are calling this function speculatively.
fn receiver_is_valid<'fcx, 'tcx, 'gcx>(
}
}
-/// Feature gates RFC 2056 - trivial bounds, checking for global bounds that
+/// Feature gates RFC 2056 -- trivial bounds, checking for global bounds that
/// aren't true.
fn check_false_global_bounds<'a, 'gcx, 'tcx>(
fcx: &FnCtxt<'a, 'gcx, 'tcx>,
span: Span,
- id: ast::NodeId)
+ id: hir::HirId)
{
use rustc::ty::TypeFoldable;
let empty_env = ty::ParamEnv::empty();
- let def_id = fcx.tcx.hir().local_def_id(id);
+ let def_id = fcx.tcx.hir().local_def_id_from_hir_id(id);
let predicates = fcx.tcx.predicates_of(def_id).predicates
.iter()
.map(|(p, _)| *p)
body: &'gcx hir::Body,
rustc_dump_user_substs: bool,
) -> WritebackCx<'cx, 'gcx, 'tcx> {
- let owner = fcx.tcx.hir().definitions().node_to_hir_id(body.id().node_id);
+ let owner = body.id().hir_id;
WritebackCx {
fcx,
}
struct ExternCrateToLint {
- /// def-id of the extern crate
+ /// `DefId` of the extern crate
def_id: DefId,
/// span from the item
if impl_did.is_local() {
let dispatch_from_dyn_trait = tcx.lang_items().dispatch_from_dyn_trait().unwrap();
- let impl_node_id = tcx.hir().as_local_node_id(impl_did).unwrap();
- let span = tcx.hir().span(impl_node_id);
+ let impl_hir_id = tcx.hir().as_local_hir_id(impl_did).unwrap();
+ let span = tcx.hir().span_by_hir_id(impl_hir_id);
let source = tcx.type_of(impl_did);
assert!(!source.has_escaping_bound_vars());
};
tcx.infer_ctxt().enter(|infcx| {
- let cause = ObligationCause::misc(span, impl_node_id);
+ let cause = ObligationCause::misc(span, impl_hir_id);
use ty::TyKind::*;
match (&source.sty, &target.sty) {
});
// this provider should only get invoked for local def-ids
- let impl_node_id = gcx.hir().as_local_node_id(impl_did).unwrap_or_else(|| {
+ let impl_hir_id = gcx.hir().as_local_hir_id(impl_did).unwrap_or_else(|| {
bug!("coerce_unsized_info: invoked for non-local def-id {:?}", impl_did)
});
source,
target);
- let span = gcx.hir().span(impl_node_id);
+ let span = gcx.hir().span_by_hir_id(impl_hir_id);
let param_env = gcx.param_env(impl_did);
assert!(!source.has_escaping_bound_vars());
target);
gcx.infer_ctxt().enter(|infcx| {
- let cause = ObligationCause::misc(span, impl_node_id);
+ let cause = ObligationCause::misc(span, impl_hir_id);
let check_mutbl = |mt_a: ty::TypeAndMut<'gcx>,
mt_b: ty::TypeAndMut<'gcx>,
mk_ptr: &dyn Fn(Ty<'gcx>) -> Ty<'gcx>| {
being coerced, none found");
return err_info;
} else if diff_fields.len() > 1 {
- let item = gcx.hir().expect_item(impl_node_id);
+ let item = gcx.hir().expect_item_by_hir_id(impl_hir_id);
let span = if let ItemKind::Impl(.., Some(ref t), _, _) = item.node {
t.path.span
} else {
- gcx.hir().span(impl_node_id)
+ gcx.hir().span_by_hir_id(impl_hir_id)
};
let mut err = struct_span_err!(gcx.sess,
let mut fulfill_cx = TraitEngine::new(infcx.tcx);
// Register an obligation for `A: Trait<B>`.
- let cause = traits::ObligationCause::misc(span, impl_node_id);
+ let cause = traits::ObligationCause::misc(span, impl_hir_id);
let predicate = gcx.predicate_for_trait_def(param_env,
cause,
trait_def_id,
tcx.ensure().crate_inherent_impls_overlap_check(LOCAL_CRATE);
}
-/// Overlap: No two impls for the same trait are implemented for the
+/// Overlap: no two impls for the same trait are implemented for the
/// same type. Likewise, no two inherent impls for a given type
/// constructor provide a method with the same name.
fn check_impl_overlap<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, node_id: ast::NodeId) {
impl<'cx, 'tcx, 'v> ItemLikeVisitor<'v> for OrphanChecker<'cx, 'tcx> {
/// Checks exactly one impl for orphan rules and other such
- /// restrictions. In this fn, it can happen that multiple errors
+ /// restrictions. In this fn, it can happen that multiple errors
/// apply to a specific impl, so just return after reporting one
/// to prevent inundating the user with a bunch of similar error
/// reports.
///////////////////////////////////////////////////////////////////////////
/// Context specific to some particular item. This is what implements
-/// AstConv. It has information about the predicates that are defined
+/// `AstConv`. It has information about the predicates that are defined
/// on the trait. Unfortunately, this predicate information is
/// available in various different forms at various points in the
/// process. So we can't just store a pointer to e.g., the AST or the
}
impl<'a, 'tcx> ItemCtxt<'a, 'tcx> {
- /// Find bounds from `hir::Generics`. This requires scanning through the
+ /// Finds bounds from `hir::Generics`. This requires scanning through the
/// AST. We do this to avoid having to convert *all* the bounds, which
/// would create artificial cycles. Instead we can only convert the
/// bounds for a type parameter `X` if `X::Foo` is used.
}
/// Tests whether this is the AST for a reference to the type
-/// parameter with id `param_id`. We use this so as to avoid running
+/// parameter with ID `param_id`. We use this so as to avoid running
/// `ast_ty_to_ty`, because we want to avoid triggering an all-out
/// conversion of the type to avoid inducing unnecessary cycles.
fn is_param<'a, 'tcx>(
tcx.alloc_adt_def(def_id, kind, variants, repr)
}
-/// Ensures that the super-predicates of the trait with def-id
+/// Ensures that the super-predicates of the trait with `DefId`
/// trait_def_id are converted and stored. This also ensures that
/// the transitive super-predicates are converted;
fn super_predicates_of<'a, 'tcx>(
}) => {
if gen.is_some() {
let hir_id = tcx.hir().node_to_hir_id(node_id);
- return tcx.typeck_tables_of(def_id).node_id_to_type(hir_id);
+ return tcx.typeck_tables_of(def_id).node_type(hir_id);
}
let substs = ty::ClosureSubsts {
}
/// Returns the early-bound lifetimes declared in this generics
-/// listing. For anything other than fns/methods, this is just all
+/// listing. For anything other than fns/methods, this is just all
/// the lifetimes that are declared. For fns or methods, we have to
/// screen out those that do not appear in any where-clauses etc using
/// `resolve_lifetime::early_bound_lifetimes`.
})
}
+/// Returns a list of type predicates for the definition with ID `def_id`, including inferred
+/// lifetime constraints. This includes all predicates returned by `explicit_predicates_of`, plus
+/// inferred constraints concerning which regions outlive other regions.
fn predicates_defined_on<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
result
}
+/// Returns a list of all type predicates (explicit and implicit) for the definition with
+/// ID `def_id`. This includes all predicates returned by `predicates_defined_on`, plus
+/// `Self: Trait` predicates for traits.
fn predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
result
}
+/// Returns a list of user-specified type predicates for the definition with ID `def_id`.
+/// N.B., this does not include any implied/inferred constraints.
fn explicit_predicates_of<'a, 'tcx>(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
}
/// Converts a specific `GenericBound` from the AST into a set of
-/// predicates that apply to the self-type. A vector is returned
-/// because this can be anywhere from zero predicates (`T : ?Sized` adds no
-/// predicates) to one (`T : Foo`) to many (`T : Bar<X=i32>` adds `T : Bar`
+/// predicates that apply to the self type. A vector is returned
+/// because this can be anywhere from zero predicates (`T: ?Sized` adds no
+/// predicates) to one (`T: Foo`) to many (`T: Bar<X=i32>` adds `T: Bar`
/// and `<T as Bar>::X == i32`).
fn predicates_from_bound<'tcx>(
astconv: &dyn AstConv<'tcx, 'tcx>,
fn from(param: ty::EarlyBoundRegion) -> Self { Parameter(param.index) }
}
-/// Return the set of parameters constrained by the impl header.
+/// Returns the set of parameters constrained by the impl header.
pub fn parameters_for_impl<'tcx>(impl_self_ty: Ty<'tcx>,
impl_trait_ref: Option<ty::TraitRef<'tcx>>)
-> FxHashSet<Parameter>
/// parameters so constrained to `input_parameters`. For example,
/// imagine the following impl:
///
-/// impl<T: Debug, U: Iterator<Item=T>> Trait for U
+/// impl<T: Debug, U: Iterator<Item = T>> Trait for U
///
/// The impl's predicates are collected from left to right. Ignoring
/// the implicit `Sized` bounds, these are
/// We *do* have to be somewhat careful when projection targets contain
/// projections themselves, for example in
/// impl<S,U,V,W> Trait for U where
-/// /* 0 */ S: Iterator<Item=U>,
+/// /* 0 */ S: Iterator<Item = U>,
/// /* - */ U: Iterator,
/// /* 1 */ <U as Iterator>::Item: ToOwned<Owned=(W,<V as Iterator>::Item)>
-/// /* 2 */ W: Iterator<Item=V>
+/// /* 2 */ W: Iterator<Item = V>
/// /* 3 */ V: Debug
/// we have to evaluate the projections in the order I wrote them:
/// `V: Debug` requires `V` to be evaluated. The only projection that
+// ignore-tidy-linelength
#![allow(non_snake_case)]
register_long_diagnostics! {
It is not possible to declare type parameters on a function that has the `start`
attribute. Such a function must have the following type signature (for more
-information: http://doc.rust-lang.org/stable/book/first-edition/no-stdlib.html):
+information, view [the unstable book][1]):
+
+[1]: https://doc.rust-lang.org/unstable-book/language-features/lang-items.html#writing-an-executable-without-stdlib
```
# let _:
E0374: r##"
A struct without a field containing an unsized type cannot implement
-`CoerceUnsized`. An
-[unsized type](https://doc.rust-lang.org/book/first-edition/unsized-types.html)
-is any type that the compiler doesn't know the length or alignment of at
-compile time. Any struct containing an unsized type is also unsized.
+`CoerceUnsized`. An [unsized type][1] is any type that the compiler
+doesn't know the length or alignment of at compile time. Any struct
+containing an unsized type is also unsized.
+
+[1]: https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait
Example of erroneous code:
`CoerceUnsized`. This only occurs when you are trying to coerce one of the
types in your struct to another type in the struct. In this case we try to
impl `CoerceUnsized` from `T` to `U` which are both types that the struct
-takes. An [unsized type] is any type that the compiler doesn't know the length
-or alignment of at compile time. Any struct containing an unsized type is also
-unsized.
+takes. An [unsized type][1] is any type that the compiler doesn't know the
+length or alignment of at compile time. Any struct containing an unsized type
+is also unsized.
Example of erroneous code:
}
```
-[unsized type]: https://doc.rust-lang.org/book/first-edition/unsized-types.html
+[1]: https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait
"##,
E0376: r##"
`CoerceUnsized` can only be implemented for a struct. Unsized types are
already able to be coerced without an implementation of `CoerceUnsized`
whereas a struct containing an unsized type needs to know the unsized type
-field it's containing is able to be coerced. An
-[unsized type](https://doc.rust-lang.org/book/first-edition/unsized-types.html)
+field it's containing is able to be coerced. An [unsized type][1]
is any type that the compiler doesn't know the length or alignment of at
compile time. Any struct containing an unsized type is also unsized.
+[1]: https://doc.rust-lang.org/book/ch19-04-advanced-types.html#dynamically-sized-types-and-the-sized-trait
+
Example of erroneous code:
```compile_fail,E0376
assert_eq!(c, 'V');
```
-For more information about casts, take a look at The Book:
-https://doc.rust-lang.org/book/first-edition/casting-between-types.html
+For more information about casts, take a look at the Type cast section in
+[The Reference Book][1].
+
+[1]: https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions
"##,
E0605: r##"
v as *const i8; // ok!
```
-For more information about casts, take a look at The Book:
-https://doc.rust-lang.org/book/first-edition/casting-between-types.html
+For more information about casts, take a look at the Type cast section in
+[The Reference Book][1].
+
+[1]: https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions
"##,
E0606: r##"
let y: u32 = *x as u32; // We dereference it first and then cast it.
```
-For more information about casts, take a look at The Book:
-https://doc.rust-lang.org/book/first-edition/casting-between-types.html
+For more information about casts, take a look at the Type cast section in
+[The Reference Book][1].
+
+[1]: https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions
"##,
E0607: r##"
To fix this error, don't try to cast directly between thin and fat pointers.
-For more information about casts, take a look at The Book:
-https://doc.rust-lang.org/book/first-edition/casting-between-types.html
+For more information about casts, take a look at the Type cast section in
+[The Reference Book][1].
+
+[1]: https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions
"##,
E0609: r##"
```
For more information about primitives and structs, take a look at The Book:
-https://doc.rust-lang.org/book/first-edition/primitive-types.html
-https://doc.rust-lang.org/book/first-edition/structs.html
+https://doc.rust-lang.org/book/ch03-02-data-types.html
+https://doc.rust-lang.org/book/ch05-00-structs.html
"##,
E0614: r##"
use syntax_pos::Span;
/// Checks that all the type/lifetime parameters on an impl also
-/// appear in the trait ref or self-type (or are constrained by a
+/// appear in the trait ref or self type (or are constrained by a
/// where-clause). These rules are needed to ensure that, given a
/// trait ref like `<T as Trait<U>>`, we can derive the values of all
/// parameters on the impl (which is needed to make specialization
/// possible).
///
/// However, in the case of lifetimes, we only enforce these rules if
-/// the lifetime parameter is used in an associated type. This is a
+/// the lifetime parameter is used in an associated type. This is a
/// concession to backwards compatibility; see comment at the end of
/// the fn for details.
///
/// impl<T> Trait<Foo<T>> for Bar { ... }
/// // ^ T appears in `Foo<T>`, ok.
///
-/// impl<T> Trait<Foo> for Bar where Bar: Iterator<Item=T> { ... }
+/// impl<T> Trait<Foo> for Bar where Bar: Iterator<Item = T> { ... }
/// // ^ T is bound to `<Bar as Iterator>::Item`, ok.
///
/// impl<'a> Trait<Foo> for Bar { }
/*!
-# typeck.rs
+# typeck
The type checker is responsible for:
}
fn check_main_fn_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, main_def_id: DefId) {
- let main_id = tcx.hir().as_local_node_id(main_def_id).unwrap();
+ let main_id = tcx.hir().as_local_hir_id(main_def_id).unwrap();
let main_span = tcx.def_span(main_def_id);
let main_t = tcx.type_of(main_def_id);
match main_t.sty {
ty::FnDef(..) => {
- if let Some(Node::Item(it)) = tcx.hir().find(main_id) {
+ if let Some(Node::Item(it)) = tcx.hir().find_by_hir_id(main_id) {
if let hir::ItemKind::Fn(.., ref generics, _) = it.node {
let mut error = false;
if !generics.params.is_empty() {
}
fn check_start_fn_ty<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, start_def_id: DefId) {
- let start_id = tcx.hir().as_local_node_id(start_def_id).unwrap();
+ let start_id = tcx.hir().as_local_hir_id(start_def_id).unwrap();
let start_span = tcx.def_span(start_def_id);
let start_t = tcx.type_of(start_def_id);
match start_t.sty {
ty::FnDef(..) => {
- if let Some(Node::Item(it)) = tcx.hir().find(start_id) {
+ if let Some(Node::Item(it)) = tcx.hir().find_by_hir_id(start_id) {
if let hir::ItemKind::Fn(.., ref generics, _) = it.node {
let mut error = false;
if !generics.params.is_empty() {
/// Infer predicates for the items in the crate.
///
-/// global_inferred_outlives: this is initially the empty map that
+/// `global_inferred_outlives`: this is initially the empty map that
/// was generated by walking the items in the crate. This will
/// now be filled with inferred predicates.
pub fn infer_predicates<'tcx>(
pointers.
For more information about casts, take a look at The Book:
-https://doc.rust-lang.org/book/first-edition/casting-between-types.html");
+https://doc.rust-lang.org/reference/expressions/operator-expr.html#type-cast-expressions");
err
}
}
/// }
///
/// then while we are visiting `Bar<T>`, the `CurrentItem` would have
-/// the def-id and the start of `Foo`'s inferreds.
+/// the `DefId` and the start of `Foo`'s inferreds.
pub struct CurrentItem {
inferred_start: InferredIndex,
}
-//! Representation of a `#[doc(cfg(...))]` attribute.
+//! The representation of a `#[doc(cfg(...))]` attribute.
// FIXME: Once the portability lint RFC is implemented (see tracking issue #41619),
// switch to use those structures instead.
False,
/// A generic configuration option, e.g., `test` or `target_os = "linux"`.
Cfg(Symbol, Option<Symbol>),
- /// Negate a configuration requirement, i.e., `not(x)`.
+ /// Negates a configuration requirement, i.e., `not(x)`.
Not(Box<Cfg>),
/// Union of a list of configuration requirements, i.e., `any(...)`.
Any(Vec<Cfg>),
}
pub trait NestedAttributesExt {
- /// Returns whether the attribute list contains a specific `Word`
+ /// Returns `true` if the attribute list contains a specific `Word`
fn has_word(self, word: &str) -> bool;
}
}
}
- /// Get links as a vector
+ /// Gets links as a vector
///
/// Cache must be populated before call
pub fn links(&self, krate: &CrateNum) -> Vec<(String, String)> {
/// it does not preserve mutability or boxes.
#[derive(Clone, RustcEncodable, RustcDecodable, PartialEq, Eq, Debug, Hash)]
pub enum Type {
- /// structs/enums/traits (most that'd be an hir::TyKind::Path)
+ /// Structs/enums/traits (most that'd be an `hir::TyKind::Path`).
ResolvedPath {
path: Path,
typarams: Option<Vec<GenericBound>>,
did: DefId,
- /// true if is a `T::Name` path for associated types
+ /// `true` if is a `T::Name` path for associated types.
is_generic: bool,
},
/// For parameterized types, so the consumer of the JSON don't go
}
fn print_const_expr(cx: &DocContext, body: hir::BodyId) -> String {
- cx.tcx.hir().node_to_pretty_string(body.node_id)
+ cx.tcx.hir().hir_to_pretty_string(body.hir_id)
}
/// Given a type Path, resolve it to a Type using the TyCtxt
}
}
-/// An equality constraint on an associated type, e.g., `A=Bar` in `Foo<A=Bar>`
+/// An equality constraint on an associated type, e.g., `A = Bar` in `Foo<A = Bar>`
#[derive(Clone, PartialEq, Eq, RustcDecodable, RustcEncodable, Debug, Hash)]
pub struct TypeBinding {
pub name: String,
-//! Simplification of where clauses and parameter bounds into a prettier and
+//! Simplification of where-clauses and parameter bounds into a prettier and
//! more canonical form.
//!
//! Currently all cross-crate-inlined function use `rustc::ty` to reconstruct
//! the AST (e.g., see all of `clean::inline`), but this is not always a
-//! non-lossy transformation. The current format of storage for where clauses
+//! non-lossy transformation. The current format of storage for where-clauses
//! for functions and such is simply a list of predicates. One example of this
-//! is that the AST predicate of: `where T: Trait<Foo=Bar>` is encoded as:
+//! is that the AST predicate of: `where T: Trait<Foo = Bar>` is encoded as:
//! `where T: Trait, <T as Trait>::Foo = Bar`.
//!
//! This module attempts to reconstruct the original where and/or parameter
pub playground_url: Option<String>,
/// Whether to sort modules alphabetically on a module page instead of using declaration order.
/// `true` by default.
- ///
- /// FIXME(misdreavus): the flag name is `--sort-modules-by-appearance` but the meaning is
- /// inverted once read
+ //
+ // FIXME(misdreavus): the flag name is `--sort-modules-by-appearance` but the meaning is
+ // inverted once read.
pub sort_modules_alphabetically: bool,
/// List of themes to extend the docs with. Original argument name is included to assist in
/// displaying errors if it fails a theme check.
pub resource_suffix: String,
/// Whether to run the static CSS/JavaScript through a minifier when outputting them. `true` by
/// default.
- ///
- /// FIXME(misdreavus): the flag name is `--disable-minification` but the meaning is inverted
- /// once read
+ //
+ // FIXME(misdreavus): the flag name is `--disable-minification` but the meaning is inverted
+ // once read.
pub enable_minification: bool,
/// Whether to create an index page in the root of the output directory. If this is true but
/// `enable_index_page` is None, generate a static listing of crates instead.
})
}
- /// Returns whether the file given as `self.input` is a Markdown file.
+ /// Returns `true` if the file given as `self.input` is a Markdown file.
pub fn markdown_input(&self) -> bool {
self.input.extension()
.map_or(false, |e| e == "md" || e == "markdown")
/// Table type parameter definition -> substituted type
pub ty_substs: RefCell<FxHashMap<Def, clean::Type>>,
- /// Table node id of lifetime parameter definition -> substituted lifetime
+ /// Table `NodeId` of lifetime parameter definition -> substituted lifetime
pub lt_substs: RefCell<FxHashMap<DefId, clean::Lifetime>>,
/// Table DefId of `impl Trait` in argument position -> bounds
pub impl_trait_bounds: RefCell<FxHashMap<DefId, Vec<clean::GenericBound>>>,
-//! HTML Escaping
+//! HTML escaping.
//!
-//! This module contains one unit-struct which can be used to HTML-escape a
+//! This module contains one unit struct, which can be used to HTML-escape a
//! string of text (for use in a format string).
use std::fmt;
pub asyncness: hir::IsAsync,
}
-/// Wrapper struct for emitting a where clause from Generics.
+/// Wrapper struct for emitting a where-clause from Generics.
pub struct WhereClause<'a>{
- /// The Generics from which to emit a where clause.
+ /// The Generics from which to emit a where-clause.
pub gens: &'a clean::Generics,
/// The number of spaces to indent each line with.
pub indent: usize,
- /// Whether the where clause needs to add a comma and newline after the last bound.
+ /// Whether the where-clause needs to add a comma and newline after the last bound.
pub end_newline: bool,
}
/// Called at the end of a span of highlighted text.
fn exit_span(&mut self) -> io::Result<()>;
- /// Called for a span of text. If the text should be highlighted differently from the
+ /// Called for a span of text. If the text should be highlighted differently from the
/// surrounding text, then the `Class` argument will be a value other than `None`.
///
/// The following sequences of callbacks are equivalent:
-//! Markdown formatting for rustdoc
+//! Markdown formatting for rustdoc.
//!
//! This module implements markdown formatting through the pulldown-cmark
//! rust-library. This module exposes all of the
-//! functionality through a unit-struct, `Markdown`, which has an implementation
+//! functionality through a unit struct, `Markdown`, which has an implementation
//! of `fmt::Display`. Example usage:
//!
//! ```
RefCell::new(None)
});
-/// Adds syntax highlighting and playground Run buttons to rust code blocks.
+/// Adds syntax highlighting and playground Run buttons to Rust code blocks.
struct CodeBlocks<'a, I: Iterator<Item = Event<'a>>> {
inner: I,
check_error_codes: ErrorCodes,
}
}
-/// Make headings links with anchor ids and build up TOC.
+/// Make headings links with anchor IDs and build up TOC.
struct LinkReplacer<'a, 'b, I: Iterator<Item = Event<'a>>> {
inner: I,
links: &'b [(String, String)],
}
}
-/// Make headings links with anchor ids and build up TOC.
+/// Make headings links with anchor IDs and build up TOC.
struct HeadingLinks<'a, 'b, 'ids, I: Iterator<Item = Event<'a>>> {
inner: I,
toc: Option<&'b mut TocBuilder>,
-//! Rustdoc's HTML Rendering module
+//! Rustdoc's HTML rendering module.
//!
//! This modules contains the bulk of the logic necessary for rendering a
//! rustdoc `clean::Crate` instance to a set of static HTML pages. This
}
impl SharedContext {
- /// Returns whether the `collapse-docs` pass was run on this crate.
+ /// Returns `true` if the `collapse-docs` pass was run on this crate.
pub fn was_collapsed(&self) -> bool {
self.passes.contains("collapse-docs")
}
#[derive(Default)]
pub struct Cache {
/// Mapping of typaram ids to the name of the type parameter. This is used
- /// when pretty-printing a type (so pretty printing doesn't have to
+ /// when pretty-printing a type (so pretty-printing doesn't have to
/// painfully maintain a context like this)
pub typarams: FxHashMap<DefId, String>,
- /// Maps a type id to all known implementations for that type. This is only
+ /// Maps a type ID to all known implementations for that type. This is only
/// recognized for intra-crate `ResolvedPath` types, and is used to print
/// out extra documentation on the page of an enum/struct.
///
/// found on that implementation.
pub impls: FxHashMap<DefId, Vec<Impl>>,
- /// Maintains a mapping of local crate node ids to the fully qualified name
+ /// Maintains a mapping of local crate `NodeId`s to the fully qualified name
/// and "short type description" of that node. This is used when generating
/// URLs when a type is being linked to. External paths are not located in
/// this map because the `External` type itself has all the information
/// generating explicit hyperlinks to other crates.
pub external_paths: FxHashMap<DefId, (Vec<String>, ItemType)>,
- /// Maps local def ids of exported types to fully qualified paths.
+ /// Maps local `DefId`s of exported types to fully qualified paths.
/// Unlike 'paths', this mapping ignores any renames that occur
/// due to 'use' statements.
///
cx.krate(krate)
}
-/// Build the search index from the collected metadata
+/// Builds the search index from the collected metadata
fn build_index(krate: &clean::Crate, cache: &mut Cache) -> String {
let mut nodeid_to_pathid = FxHashMap::default();
let mut crate_items = Vec::with_capacity(cache.search_index.len());
}
impl<'a> Item<'a> {
- /// Generate a url appropriate for an `href` attribute back to the source of
+ /// Generates a url appropriate for an `href` attribute back to the source of
/// this item.
///
/// The url generated, when clicked, will redirect the browser back to the
// The trailing space after each tag is to space it properly against the rest of the docs.
if item.deprecation().is_some() {
- tags += &tag_html("deprecated", "Deprecated");
+ let mut message = "Deprecated";
+ if let Some(ref stab) = item.stability {
+ if let Some(ref depr) = stab.deprecation {
+ if let Some(ref since) = depr.since {
+ if !stability::deprecation_in_effect(&since) {
+ message = "Deprecation planned";
+ }
+ }
+ }
+ }
+ tags += &tag_html("deprecated", message);
}
if let Some(stab) = item
let mut stability = vec![];
let error_codes = ErrorCodes::from(UnstableFeatures::from_environment().is_nightly_build());
- if let Some(Deprecation { since, note }) = &item.deprecation() {
+ if let Some(Deprecation { note, since }) = &item.deprecation() {
+ // We display deprecation messages for #[deprecated] and #[rustc_deprecated]
+ // but only display the future-deprecation messages for #[rustc_deprecated].
let mut message = if let Some(since) = since {
- if stability::deprecation_in_effect(since) {
- format!("Deprecated since {}", Escape(since))
- } else {
- format!("Deprecating in {}", Escape(since))
- }
+ format!("Deprecated since {}", Escape(since))
} else {
String::from("Deprecated")
};
+ if let Some(ref stab) = item.stability {
+ if let Some(ref depr) = stab.deprecation {
+ if let Some(ref since) = depr.since {
+ if !stability::deprecation_in_effect(&since) {
+ message = format!("Deprecating in {}", Escape(&since));
+ }
+ }
+ }
+ }
if let Some(note) = note {
let mut ids = cx.id_map.borrow_mut();
}
- /// Convert into a true `Toc` struct.
+ /// Converts into a true `Toc` struct.
pub fn into_toc(mut self) -> Toc {
// we know all levels are >= 1.
self.fold_until(0);
}
}
-/// Run any tests/code examples in the markdown file `input`.
+/// Runs any tests/code examples in the markdown file `input`.
pub fn test(mut options: Options, diag: &errors::Handler) -> isize {
let input_str = match load_string(&options.input, diag) {
Ok(s) => s,
}
}
- /// Resolve a given string as a path, along with whether or not it is
+ /// Resolves a given string as a path, along with whether or not it is
/// in the value namespace. Also returns an optional URL fragment in the case
/// of variants and methods.
fn resolve(&self,
}
}
-/// Resolve a string as a macro.
+/// Resolves a string as a macro.
fn macro_resolve(cx: &DocContext, path_str: &str) -> Option<Def> {
use syntax::ext::base::{MacroKind, SyntaxExtension};
let segment = ast::PathSegment::from_ident(Ident::from_str(path_str));
}
}
-/// Return a span encompassing all the given attributes.
+/// Returns a span encompassing all the given attributes.
crate fn span_of_attrs(attrs: &clean::Attributes) -> Span {
if attrs.doc_strings.is_empty() {
return DUMMY_SP;
/// Cross-crate inlining occurs later on during crate cleaning
/// and follows different rules.
///
- /// Returns true if the target has been inlined.
+ /// Returns `true` if the target has been inlined.
fn maybe_inline_local(&mut self,
id: ast::NodeId,
def: Def,
}
for item in self.cx.tcx.item_children(def_id).iter() {
- if self.cx.tcx.def_key(item.def.def_id()).parent.map_or(false, |d| d == def_id.index) ||
- item.vis == Visibility::Public {
- self.visit_item(item.def);
+ if let Some(def_id) = item.def.opt_def_id() {
+ if self.cx.tcx.def_key(def_id).parent.map_or(false, |d| d == def_id.index) ||
+ item.vis == Visibility::Public {
+ self.visit_item(item.def);
+ }
}
}
}
impl FromHex for str {
- /// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`)
+ /// Converts any hexadecimal encoded string (literal, `@`, `&`, or `~`)
/// to the byte values it encodes.
///
/// You can use the `String::from_utf8` function to turn a
}
}
- /// Set the number of spaces to indent for each level.
+ /// Sets the number of spaces to indent for each level.
/// This is safe to set during encoding.
pub fn set_indent(&mut self, indent: usize) {
// self.indent very well could be 0 so we need to use checked division.
}
}
-/// Create an `AsJson` wrapper which can be used to print a value as JSON
+/// Creates an `AsJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
pub fn as_json<T>(t: &T) -> AsJson<'_, T> {
AsJson { inner: t }
}
-/// Create an `AsPrettyJson` wrapper which can be used to print a value as JSON
+/// Creates an `AsPrettyJson` wrapper which can be used to print a value as JSON
/// on-the-fly via `write!`
pub fn as_pretty_json<T>(t: &T) -> AsPrettyJson<'_, T> {
AsPrettyJson { inner: t, indent: None }
}
/// Attempts to get a nested Json Object for each key in `keys`.
- /// If any key is found not to exist, find_path will return None.
+ /// If any key is found not to exist, `find_path` will return `None`.
/// Otherwise, it will return the Json value associated with the final key.
pub fn find_path<'a>(&'a self, keys: &[&str]) -> Option<&'a Json>{
let mut target = self;
/// If the Json value is an Object, performs a depth-first search until
/// a value associated with the provided key is found. If no value is found
- /// or the Json value is not an Object, returns None.
+ /// or the Json value is not an Object, returns `None`.
pub fn search<'a>(&'a self, key: &str) -> Option<&'a Json> {
match self {
&Json::Object(ref map) => {
}
}
- /// Returns true if the Json value is an Object. Returns false otherwise.
+ /// Returns `true` if the Json value is an `Object`.
pub fn is_object(&self) -> bool {
self.as_object().is_some()
}
- /// If the Json value is an Object, returns the associated BTreeMap.
- /// Returns None otherwise.
+ /// If the Json value is an `Object`, returns the associated `BTreeMap`;
+ /// returns `None` otherwise.
pub fn as_object(&self) -> Option<&Object> {
match *self {
Json::Object(ref map) => Some(map),
}
}
- /// Returns true if the Json value is an Array. Returns false otherwise.
+ /// Returns `true` if the Json value is an `Array`.
pub fn is_array(&self) -> bool {
self.as_array().is_some()
}
- /// If the Json value is an Array, returns the associated vector.
- /// Returns None otherwise.
+ /// If the Json value is an `Array`, returns the associated vector;
+ /// returns `None` otherwise.
pub fn as_array(&self) -> Option<&Array> {
match *self {
Json::Array(ref array) => Some(&*array),
}
}
- /// Returns true if the Json value is a String. Returns false otherwise.
+ /// Returns `true` if the Json value is a `String`.
pub fn is_string(&self) -> bool {
self.as_string().is_some()
}
- /// If the Json value is a String, returns the associated str.
- /// Returns None otherwise.
+ /// If the Json value is a `String`, returns the associated `str`;
+ /// returns `None` otherwise.
pub fn as_string(&self) -> Option<&str> {
match *self {
Json::String(ref s) => Some(&s[..]),
}
}
- /// Returns true if the Json value is a Number. Returns false otherwise.
+ /// Returns `true` if the Json value is a `Number`.
pub fn is_number(&self) -> bool {
match *self {
Json::I64(_) | Json::U64(_) | Json::F64(_) => true,
}
}
- /// Returns true if the Json value is a i64. Returns false otherwise.
+ /// Returns `true` if the Json value is a `i64`.
pub fn is_i64(&self) -> bool {
match *self {
Json::I64(_) => true,
}
}
- /// Returns true if the Json value is a u64. Returns false otherwise.
+ /// Returns `true` if the Json value is a `u64`.
pub fn is_u64(&self) -> bool {
match *self {
Json::U64(_) => true,
}
}
- /// Returns true if the Json value is a f64. Returns false otherwise.
+ /// Returns `true` if the Json value is a `f64`.
pub fn is_f64(&self) -> bool {
match *self {
Json::F64(_) => true,
}
}
- /// If the Json value is a number, return or cast it to a i64.
- /// Returns None otherwise.
+ /// If the Json value is a number, returns or cast it to a `i64`;
+ /// returns `None` otherwise.
pub fn as_i64(&self) -> Option<i64> {
match *self {
Json::I64(n) => Some(n),
}
}
- /// If the Json value is a number, return or cast it to a u64.
- /// Returns None otherwise.
+ /// If the Json value is a number, returns or cast it to a `u64`;
+ /// returns `None` otherwise.
pub fn as_u64(&self) -> Option<u64> {
match *self {
Json::I64(n) => Some(n as u64),
}
}
- /// If the Json value is a number, return or cast it to a f64.
- /// Returns None otherwise.
+ /// If the Json value is a number, returns or cast it to a `f64`;
+ /// returns `None` otherwise.
pub fn as_f64(&self) -> Option<f64> {
match *self {
Json::I64(n) => Some(n as f64),
}
}
- /// Returns true if the Json value is a Boolean. Returns false otherwise.
+ /// Returns `true` if the Json value is a `Boolean`.
pub fn is_boolean(&self) -> bool {
self.as_boolean().is_some()
}
- /// If the Json value is a Boolean, returns the associated bool.
- /// Returns None otherwise.
+ /// If the Json value is a `Boolean`, returns the associated `bool`;
+ /// returns `None` otherwise.
pub fn as_boolean(&self) -> Option<bool> {
match *self {
Json::Boolean(b) => Some(b),
}
}
- /// Returns true if the Json value is a Null. Returns false otherwise.
+ /// Returns `true` if the Json value is a `Null`.
pub fn is_null(&self) -> bool {
self.as_null().is_some()
}
- /// If the Json value is a Null, returns ().
- /// Returns None otherwise.
+ /// If the Json value is a `Null`, returns `()`;
+ /// returns `None` otherwise.
pub fn as_null(&self) -> Option<()> {
match *self {
Json::Null => Some(()),
/// Returns The number of elements in the Stack.
pub fn len(&self) -> usize { self.stack.len() }
- /// Returns true if the stack is empty.
+ /// Returns `true` if the stack is empty.
pub fn is_empty(&self) -> bool { self.stack.is_empty() }
/// Provides access to the StackElement at a given index.
true
}
- /// Returns true if the bottom-most elements of this stack are the same as
+ /// Returns `true` if the bottom-most elements of this stack are the same as
/// the ones passed as parameter.
pub fn starts_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() { return false; }
true
}
- /// Returns true if the top-most elements of this stack are the same as
+ /// Returns `true` if the top-most elements of this stack are the same as
/// the ones passed as parameter.
pub fn ends_with(&self, rhs: &[StackElement<'_>]) -> bool {
if self.stack.len() < rhs.len() { return false; }
}
impl<T: Iterator<Item=char>> Builder<T> {
- /// Create a JSON Builder.
+ /// Creates a JSON Builder.
pub fn new(src: T) -> Builder<T> {
Builder { parser: Parser::new(src), token: None, }
}
}
impl<'a, T> AsPrettyJson<'a, T> {
- /// Set the indentation level for the emitted JSON
+ /// Sets the indentation level for the emitted JSON
pub fn indent(mut self, indent: usize) -> AsPrettyJson<'a, T> {
self.indent = Some(indent);
self
/// Implement this trait on your `{Encodable,Decodable}::Error` types
/// to override the default panic behavior for missing specializations.
pub trait SpecializationError {
- /// Create an error for a missing method specialization.
+ /// Creates an error for a missing method specialization.
/// Defaults to panicking with type, trait & method names.
/// `S` is the encoder/decoder state type,
/// `T` is the type being encoded/decoded, and
/// }
///
/// impl Viking {
-/// /// Create a new Viking.
+/// /// Creates a new Viking.
/// fn new(name: &str, country: &str) -> Viking {
/// Viking { name: name.to_string(), country: country.to_string() }
/// }
(retkey, retval, gap.into_table())
}
-/// Perform robin hood bucket stealing at the given `bucket`. You must
+/// Performs robin hood bucket stealing at the given `bucket`. You must
/// also pass that bucket's displacement so we don't have to recalculate it.
///
/// `hash`, `key`, and `val` are the elements to "robin hood" into the hashtable.
self.table.size()
}
- /// Returns true if the map contains no elements.
+ /// Returns `true` if the map contains no elements.
///
/// # Examples
///
self.search(k).map(|bucket| bucket.into_refs())
}
- /// Returns true if the map contains a value for the specified key.
+ /// Returns `true` if the map contains a value for the specified key.
///
/// The key may be any borrowed form of the map's key type, but
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
where S: BuildHasher,
K: Eq + Hash,
{
- /// Create a `RawEntryMut` from the given key.
+ /// Creates a `RawEntryMut` from the given key.
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_key<Q: ?Sized>(self, k: &Q) -> RawEntryMut<'a, K, V, S>
where K: Borrow<Q>,
self.from_key_hashed_nocheck(hasher.finish(), k)
}
- /// Create a `RawEntryMut` from the given key and its hash.
+ /// Creates a `RawEntryMut` from the given key and its hash.
#[inline]
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_key_hashed_nocheck<Q: ?Sized>(self, hash: u64, k: &Q) -> RawEntryMut<'a, K, V, S>
}
}
}
- /// Create a `RawEntryMut` from the given hash.
+ /// Creates a `RawEntryMut` from the given hash.
#[inline]
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub fn from_hash<F>(self, hash: u64, is_match: F) -> RawEntryMut<'a, K, V, S>
self.map.len()
}
- /// Returns true if the set contains no elements.
+ /// Returns `true` if the set contains no elements.
///
/// # Examples
///
Recover::replace(&mut self.map, value)
}
- /// Removes a value from the set. Returns `true` if the value was
+ /// Removes a value from the set. Returns whether the value was
/// present in the set.
///
/// The value may be any borrowed form of the set's value type, but
pub fn into_table(self) -> M {
self.table
}
- /// Get the raw index.
+ /// Gets the raw index.
pub fn index(&self) -> usize {
self.raw.idx
}
- /// Get the raw bucket.
+ /// Gets the raw bucket.
pub fn raw(&self) -> RawBucket<K, V> {
self.raw
}
}
impl<K, V, M> Bucket<K, V, M> {
- /// Get the raw index.
+ /// Gets the raw index.
pub fn index(&self) -> usize {
self.raw.idx
}
}
}
- /// Get the distance between this bucket and the 'ideal' location
+ /// Gets the distance between this bucket and the 'ideal' location
/// as determined by the key's hash stored in it.
///
/// In the cited blog posts above, this is called the "distance to
}
}
- /// Set the table tag
+ /// Sets the table tag.
pub fn set_tag(&mut self, value: bool) {
self.hashes.set_tag(value)
}
- /// Get the table tag
+ /// Gets the table tag.
pub fn tag(&self) -> bool {
self.hashes.tag()
}
//! // A client of the bar. They have a blood alcohol level.
//! struct Person { blood_alcohol: f32 }
//!
-//! // All the orders made to the bar, by client id.
-//! let orders = vec![1,2,1,2,3,4,1,2,2,3,4,1,1,1];
+//! // All the orders made to the bar, by client ID.
+//! let orders = vec![1, 2, 1, 2, 3, 4, 1, 2, 2, 3, 4, 1, 1, 1];
//!
//! // Our clients.
//! let mut blood_alcohol = BTreeMap::new();
#[stable(feature = "error_source", since = "1.30.0")]
fn source(&self) -> Option<&(dyn Error + 'static)> { None }
- /// Get the `TypeId` of `self`
+ /// Gets the `TypeId` of `self`
#[doc(hidden)]
#[stable(feature = "error_type_id", since = "1.34.0")]
fn type_id(&self) -> TypeId where Self: 'static {
// copied from any.rs
impl dyn Error + 'static {
- /// Returns true if the boxed type is the same as `T`
+ /// Returns `true` if the boxed type is the same as `T`
#[stable(feature = "error_downcast", since = "1.3.0")]
#[inline]
pub fn is<T: Error + 'static>(&self) -> bool {
Err(self)
}
}
+
+ /// Returns an iterator starting with the current error and continuing with
+ /// recursively calling [`source`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(error_iter)]
+ /// use std::error::Error;
+ /// use std::fmt;
+ ///
+ /// #[derive(Debug)]
+ /// struct A;
+ ///
+ /// #[derive(Debug)]
+ /// struct B(Option<Box<dyn Error + 'static>>);
+ ///
+ /// impl fmt::Display for A {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "A")
+ /// }
+ /// }
+ ///
+ /// impl fmt::Display for B {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "B")
+ /// }
+ /// }
+ ///
+ /// impl Error for A {}
+ ///
+ /// impl Error for B {
+ /// fn source(&self) -> Option<&(dyn Error + 'static)> {
+ /// self.0.as_ref().map(|e| e.as_ref())
+ /// }
+ /// }
+ ///
+ /// let b = B(Some(Box::new(A)));
+ ///
+ /// // let err : Box<Error> = b.into(); // or
+ /// let err = &b as &(dyn Error);
+ ///
+ /// let mut iter = err.iter_chain();
+ ///
+ /// assert_eq!("B".to_string(), iter.next().unwrap().to_string());
+ /// assert_eq!("A".to_string(), iter.next().unwrap().to_string());
+ /// assert!(iter.next().is_none());
+ /// assert!(iter.next().is_none());
+ /// ```
+ ///
+ /// [`source`]: trait.Error.html#method.source
+ #[unstable(feature = "error_iter", issue = "58289")]
+ #[inline]
+ pub fn iter_chain(&self) -> ErrorIter {
+ ErrorIter {
+ current: Some(self),
+ }
+ }
+
+ /// Returns an iterator starting with the [`source`] of this error
+ /// and continuing with recursively calling [`source`].
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(error_iter)]
+ /// use std::error::Error;
+ /// use std::fmt;
+ ///
+ /// #[derive(Debug)]
+ /// struct A;
+ ///
+ /// #[derive(Debug)]
+ /// struct B(Option<Box<dyn Error + 'static>>);
+ ///
+ /// #[derive(Debug)]
+ /// struct C(Option<Box<dyn Error + 'static>>);
+ ///
+ /// impl fmt::Display for A {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "A")
+ /// }
+ /// }
+ ///
+ /// impl fmt::Display for B {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "B")
+ /// }
+ /// }
+ ///
+ /// impl fmt::Display for C {
+ /// fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
+ /// write!(f, "C")
+ /// }
+ /// }
+ ///
+ /// impl Error for A {}
+ ///
+ /// impl Error for B {
+ /// fn source(&self) -> Option<&(dyn Error + 'static)> {
+ /// self.0.as_ref().map(|e| e.as_ref())
+ /// }
+ /// }
+ ///
+ /// impl Error for C {
+ /// fn source(&self) -> Option<&(dyn Error + 'static)> {
+ /// self.0.as_ref().map(|e| e.as_ref())
+ /// }
+ /// }
+ ///
+ /// let b = B(Some(Box::new(A)));
+ /// let c = C(Some(Box::new(b)));
+ ///
+ /// // let err : Box<Error> = c.into(); // or
+ /// let err = &c as &(dyn Error);
+ ///
+ /// let mut iter = err.iter_sources();
+ ///
+ /// assert_eq!("B".to_string(), iter.next().unwrap().to_string());
+ /// assert_eq!("A".to_string(), iter.next().unwrap().to_string());
+ /// assert!(iter.next().is_none());
+ /// assert!(iter.next().is_none());
+ /// ```
+ ///
+ /// [`source`]: trait.Error.html#method.source
+ #[inline]
+ #[unstable(feature = "error_iter", issue = "58289")]
+ pub fn iter_sources(&self) -> ErrorIter {
+ ErrorIter {
+ current: self.source(),
+ }
+ }
+}
+
+/// An iterator over [`Error`]
+///
+/// [`Error`]: trait.Error.html
+#[unstable(feature = "error_iter", issue = "58289")]
+#[derive(Copy, Clone, Debug)]
+pub struct ErrorIter<'a> {
+ current: Option<&'a (dyn Error + 'static)>,
+}
+
+#[unstable(feature = "error_iter", issue = "58289")]
+impl<'a> Iterator for ErrorIter<'a> {
+ type Item = &'a (dyn Error + 'static);
+
+ fn next(&mut self) -> Option<Self::Item> {
+ let current = self.current;
+ self.current = self.current.and_then(Error::source);
+ current
+ }
}
impl dyn Error + Send {
/// Calculates the least nonnegative remainder of `self (mod rhs)`.
///
/// In particular, the return value `r` satisfies `0.0 <= r < rhs.abs()` in
- /// most cases. However, due to a floating point round-off error it can
+ /// most cases. However, due to a floating point round-off error it can
/// result in `r == rhs.abs()`, violating the mathematical definition, if
/// `self` is much smaller than `rhs.abs()` in magnitude and `self < 0.0`.
/// This result is not an element of the function's codomain, but it is the
///
/// # Examples
///
- /// Create a `CString`, pass ownership to an `extern` function (via raw pointer), then retake
+ /// Creates a `CString`, pass ownership to an `extern` function (via raw pointer), then retake
/// ownership with `from_raw`:
///
/// ```ignore (extern-declaration)
///
/// `OsString` and [`OsStr`] bridge this gap by simultaneously representing Rust
/// and platform-native string values, and in particular allowing a Rust string
-/// to be converted into an "OS" string with no cost if possible. A consequence
+/// to be converted into an "OS" string with no cost if possible. A consequence
/// of this is that `OsString` instances are *not* `NUL` terminated; in order
/// to pass to e.g., Unix system call, you should create a [`CStr`].
///
/// already sufficient.
///
/// Note that the allocator may give the collection more space than it
- /// requests. Therefore capacity can not be relied upon to be precisely
+ /// requests. Therefore, capacity can not be relied upon to be precisely
/// minimal. Prefer reserve if future insertions are expected.
///
/// # Examples
///
/// # Examples
///
-/// Create a new file and write bytes to it:
+/// Creates a new file and write bytes to it:
///
/// ```no_run
/// use std::fs::File;
/// Read the entire contents of a file into a bytes vector.
///
/// This is a convenience function for using [`File::open`] and [`read_to_end`]
-/// with fewer imports and without an intermediate variable. It pre-allocates a
+/// with fewer imports and without an intermediate variable. It pre-allocates a
/// buffer based on the file size when available, so it is generally faster than
/// reading into a vector created with `Vec::new()`.
///
/// Read the entire contents of a file into a string.
///
/// This is a convenience function for using [`File::open`] and [`read_to_string`]
-/// with fewer imports and without an intermediate variable. It pre-allocates a
+/// with fewer imports and without an intermediate variable. It pre-allocates a
/// buffer based on the file size when available, so it is generally faster than
/// reading into a string created with `String::new()`.
///
self.inner.file_attr().map(Metadata)
}
- /// Create a new `File` instance that shares the same underlying file handle
+ /// Creates a new `File` instance that shares the same underlying file handle
/// as the existing `File` instance. Reads, writes, and seeks will affect
/// both `File` instances simultaneously.
///
/// # Examples
///
- /// Create two handles for a file named `foo.txt`:
+ /// Creates two handles for a file named `foo.txt`:
///
/// ```no_run
/// use std::fs::File;
FileType(self.0.file_type())
}
- /// Returns whether this metadata is for a directory. The
+ /// Returns `true` if this metadata is for a directory. The
/// result is mutually exclusive to the result of
/// [`is_file`], and will be false for symlink metadata
/// obtained from [`symlink_metadata`].
#[stable(feature = "rust1", since = "1.0.0")]
pub fn is_dir(&self) -> bool { self.file_type().is_dir() }
- /// Returns whether this metadata is for a regular file. The
+ /// Returns `true` if this metadata is for a regular file. The
/// result is mutually exclusive to the result of
/// [`is_dir`], and will be false for symlink metadata
/// obtained from [`symlink_metadata`].
}
impl Permissions {
- /// Returns whether these permissions describe a readonly (unwritable) file.
+ /// Returns `true` if these permissions describe a readonly (unwritable) file.
///
/// # Examples
///
}
impl FileType {
- /// Test whether this file type represents a directory. The
+ /// Tests whether this file type represents a directory. The
/// result is mutually exclusive to the results of
/// [`is_file`] and [`is_symlink`]; only zero or one of these
/// tests may pass.
#[stable(feature = "file_type", since = "1.1.0")]
pub fn is_dir(&self) -> bool { self.0.is_dir() }
- /// Test whether this file type represents a regular file.
+ /// Tests whether this file type represents a regular file.
/// The result is mutually exclusive to the results of
/// [`is_dir`] and [`is_symlink`]; only zero or one of these
/// tests may pass.
#[stable(feature = "file_type", since = "1.1.0")]
pub fn is_file(&self) -> bool { self.0.is_file() }
- /// Test whether this file type represents a symbolic link.
+ /// Tests whether this file type represents a symbolic link.
/// The result is mutually exclusive to the results of
/// [`is_dir`] and [`is_file`]; only zero or one of these
/// tests may pass.
/// with the [`fs::symlink_metadata`] function and not the
/// [`fs::metadata`] function. The [`fs::metadata`] function
/// follows symbolic links, so [`is_symlink`] would always
- /// return false for the target file.
+ /// return `false` for the target file.
///
/// [`Metadata`]: struct.Metadata.html
/// [`fs::metadata`]: fn.metadata.html
#[stable(feature = "rust1", since = "1.0.0")]
pub fn path(&self) -> PathBuf { self.0.path() }
- /// Return the metadata for the file that this entry points at.
+ /// Returns the metadata for the file that this entry points at.
///
/// This function will not traverse symlinks if this entry points at a
/// symlink.
self.0.metadata().map(Metadata)
}
- /// Return the file type for the file that this entry points at.
+ /// Returns the file type for the file that this entry points at.
///
/// This function will not traverse symlinks if this entry points at a
/// symlink.
self
}
- /// Create the specified directory with the options configured in this
+ /// Creates the specified directory with the options configured in this
/// builder.
///
/// It is considered an error if the directory already exists unless
use core::pin::Pin;
use core::option::Option;
use core::ptr::NonNull;
-use core::task::{LocalWaker, Poll};
+use core::task::{Waker, Poll};
use core::ops::{Drop, Generator, GeneratorState};
#[doc(inline)]
#[unstable(feature = "gen_future", issue = "50547")]
impl<T: Generator<Yield = ()>> Future for GenFuture<T> {
type Output = T::Return;
- fn poll(self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output> {
+ fn poll(self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output> {
// Safe because we're !Unpin + !Drop mapping to a ?Unpin value
let gen = unsafe { Pin::map_unchecked_mut(self, |s| &mut s.0) };
- set_task_waker(lw, || match gen.resume() {
+ set_task_waker(waker, || match gen.resume() {
GeneratorState::Yielded(()) => Poll::Pending,
GeneratorState::Complete(x) => Poll::Ready(x),
})
}
thread_local! {
- static TLS_WAKER: Cell<Option<NonNull<LocalWaker>>> = Cell::new(None);
+ static TLS_WAKER: Cell<Option<NonNull<Waker>>> = Cell::new(None);
}
-struct SetOnDrop(Option<NonNull<LocalWaker>>);
+struct SetOnDrop(Option<NonNull<Waker>>);
impl Drop for SetOnDrop {
fn drop(&mut self) {
#[unstable(feature = "gen_future", issue = "50547")]
/// Sets the thread-local task context used by async/await futures.
-pub fn set_task_waker<F, R>(lw: &LocalWaker, f: F) -> R
+pub fn set_task_waker<F, R>(waker: &Waker, f: F) -> R
where
F: FnOnce() -> R
{
let old_waker = TLS_WAKER.with(|tls_waker| {
- tls_waker.replace(Some(NonNull::from(lw)))
+ tls_waker.replace(Some(NonNull::from(waker)))
});
let _reset_waker = SetOnDrop(old_waker);
f()
/// retrieved by a surrounding call to get_task_waker.
pub fn get_task_waker<F, R>(f: F) -> R
where
- F: FnOnce(&LocalWaker) -> R
+ F: FnOnce(&Waker) -> R
{
let waker_ptr = TLS_WAKER.with(|tls_waker| {
// Clear the entry so that nested `get_task_waker` calls
let _reset_waker = SetOnDrop(waker_ptr);
let waker_ptr = waker_ptr.expect(
- "TLS LocalWaker not set. This is a rustc bug. \
+ "TLS Waker not set. This is a rustc bug. \
Please file an issue on https://github.com/rust-lang/rust.");
unsafe { f(waker_ptr.as_ref()) }
}
where
F: Future
{
- get_task_waker(|lw| F::poll(f, lw))
+ get_task_waker(|waker| F::poll(f, waker))
}
/// the underlying [`Read`] and maintains an in-memory buffer of the results.
///
/// `BufReader` can improve the speed of programs that make *small* and
-/// *repeated* read calls to the same file or network socket. It does not
+/// *repeated* read calls to the same file or network socket. It does not
/// help when reading very large amounts at once, or reading just one or a few
-/// times. It also provides no advantage when reading from a source that is
+/// times. It also provides no advantage when reading from a source that is
/// already in memory, like a `Vec<u8>`.
///
/// [`Read`]: ../../std/io/trait.Read.html
/// writer in large, infrequent batches.
///
/// `BufWriter` can improve the speed of programs that make *small* and
-/// *repeated* write calls to the same file or network socket. It does not
+/// *repeated* write calls to the same file or network socket. It does not
/// help when writing very large amounts at once, or writing just one or a few
-/// times. It also provides no advantage when writing to a destination that is
+/// times. It also provides no advantage when writing to a destination that is
/// in memory, like a `Vec<u8>`.
///
/// When the `BufWriter` is dropped, the contents of its buffer will be written
#[derive(Copy, PartialEq, Eq, Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub enum SeekFrom {
- /// Set the offset to the provided number of bytes.
+ /// Sets the offset to the provided number of bytes.
#[stable(feature = "rust1", since = "1.0.0")]
Start(#[stable(feature = "rust1", since = "1.0.0")] u64),
- /// Set the offset to the size of this object plus the specified number of
+ /// Sets the offset to the size of this object plus the specified number of
/// bytes.
///
/// It is possible to seek beyond the end of an object, but it's an error to
#[stable(feature = "rust1", since = "1.0.0")]
End(#[stable(feature = "rust1", since = "1.0.0")] i64),
- /// Set the offset to the current position plus the specified number of
+ /// Sets the offset to the current position plus the specified number of
/// bytes.
///
/// It is possible to seek beyond the end of an object, but it's an error to
/// }
///
/// fn generic_where<T>(x: T) -> T
-/// where T: std::ops::Add<Output=T> + Copy
+/// where T: std::ops::Add<Output = T> + Copy
/// {
/// x + x + x
/// }
/// `for` is primarily used in for-in-loops, but it has a few other pieces of syntactic uses such as
/// `impl Trait for Type` (see [`impl`] for more info on that). for-in-loops, or to be more
/// precise, iterator loops, are a simple syntactic sugar over an exceedingly common practice
-/// within Rust, which is to loop over an iterator until that iterator returns None (or `break`
+/// within Rust, which is to loop over an iterator until that iterator returns `None` (or `break`
/// is called).
///
/// ```rust
/// directly accessed and modified.
///
/// Tuple structs are similar to regular structs, but its fields have no names. They are used like
-/// tuples, with deconstruction possible via `let TupleStruct(x, y) = foo;` syntax. For accessing
+/// tuples, with deconstruction possible via `let TupleStruct(x, y) = foo;` syntax. For accessing
/// individual variables, the same syntax is used as with regular tuples, namely `foo.0`, `foo.1`,
/// etc, starting at zero.
///
//! primitives](#primitives), [standard macros](#macros), [I/O] and
//! [multithreading], among [many other things][other].
//!
-//! `std` is available to all Rust crates by default. Therefore the
+//! `std` is available to all Rust crates by default. Therefore, the
//! standard library can be accessed in [`use`] statements through the path
//! `std`, as in [`use std::env`].
//!
#[cfg(test)] extern crate std as realstd;
#[cfg(all(target_vendor = "fortanix", target_env = "sgx"))]
-#[macro_use]
-#[allow(unused_imports)] // FIXME: without `#[macro_use]`, get error: “cannot
- // determine resolution for the macro `usercalls_asm`”
extern crate fortanix_sgx_abi;
// The standard macros that are not built-in to the compiler.
//! Types and Traits for working with asynchronous tasks.
#[doc(inline)]
pub use core::task::*;
- #[doc(inline)]
- pub use alloc_crate::task::*;
}
#[unstable(feature = "futures_api",
///
/// This allows a program to terminate immediately and provide feedback
/// to the caller of the program. `panic!` should be used when a program reaches
-/// an unrecoverable problem.
+/// an unrecoverable state.
///
/// This macro is the perfect way to assert conditions in example code and in
-/// tests. `panic!` is closely tied with the `unwrap` method of both [`Option`]
-/// and [`Result`][runwrap] enums. Both implementations call `panic!` when they are set
+/// tests. `panic!` is closely tied with the `unwrap` method of both [`Option`]
+/// and [`Result`][runwrap] enums. Both implementations call `panic!` when they are set
/// to None or Err variants.
///
/// This macro is used to inject panic into a Rust thread, causing the thread to
/// is transmitted.
///
/// [`Result`] enum is often a better solution for recovering from errors than
-/// using the `panic!` macro. This macro should be used to avoid proceeding using
+/// using the `panic!` macro. This macro should be used to avoid proceeding using
/// incorrect values, such as from external sources. Detailed information about
/// error handling is found in the [book].
///
/// necessary to use [`io::stdout().flush()`][flush] to ensure the output is emitted
/// immediately.
///
-/// Use `print!` only for the primary output of your program. Use
+/// Use `print!` only for the primary output of your program. Use
/// [`eprint!`] instead to print error and progress messages.
///
/// [`println!`]: ../std/macro.println.html
/// Use the [`format!`] syntax to write data to the standard output.
/// See [`std::fmt`] for more information.
///
-/// Use `println!` only for the primary output of your program. Use
+/// Use `println!` only for the primary output of your program. Use
/// [`eprintln!`] instead to print error and progress messages.
///
/// [`format!`]: ../std/macro.format.html
/// Macro for printing to the standard error.
///
/// Equivalent to the [`print!`] macro, except that output goes to
-/// [`io::stderr`] instead of `io::stdout`. See [`print!`] for
+/// [`io::stderr`] instead of `io::stdout`. See [`print!`] for
/// example usage.
///
-/// Use `eprint!` only for error and progress messages. Use `print!`
+/// Use `eprint!` only for error and progress messages. Use `print!`
/// instead for the primary output of your program.
///
/// [`io::stderr`]: ../std/io/struct.Stderr.html
/// Macro for printing to the standard error, with a newline.
///
/// Equivalent to the [`println!`] macro, except that output goes to
-/// [`io::stderr`] instead of `io::stdout`. See [`println!`] for
+/// [`io::stderr`] instead of `io::stdout`. See [`println!`] for
/// example usage.
///
-/// Use `eprintln!` only for error and progress messages. Use `println!`
+/// Use `eprintln!` only for error and progress messages. Use `println!`
/// instead for the primary output of your program.
///
/// [`io::stderr`]: ../std/io/struct.Stderr.html
/// The core macro for formatted string creation & output.
///
/// This macro functions by taking a formatting string literal containing
- /// `{}` for each additional argument passed. `format_args!` prepares the
+ /// `{}` for each additional argument passed. `format_args!` prepares the
/// additional parameters to ensure the output can be interpreted as a string
- /// and canonicalizes the arguments into a single type. Any value that implements
+ /// and canonicalizes the arguments into a single type. Any value that implements
/// the [`Display`] trait can be passed to `format_args!`, as can any
/// [`Debug`] implementation be passed to a `{:?}` within the formatting string.
///
/// This macro produces a value of type [`fmt::Arguments`]. This value can be
/// passed to the macros within [`std::fmt`] for performing useful redirection.
/// All other formatting macros ([`format!`], [`write!`], [`println!`], etc) are
- /// proxied through this one. `format_args!`, unlike its derived macros, avoids
+ /// proxied through this one. `format_args!`, unlike its derived macros, avoids
/// heap allocations.
///
/// You can use the [`fmt::Arguments`] value that `format_args!` returns
/// If the named environment variable is present at compile time, this will
/// expand into an expression of type `Option<&'static str>` whose value is
/// `Some` of the value of the environment variable. If the environment
- /// variable is not present, then this will expand to `None`. See
+ /// variable is not present, then this will expand to `None`. See
/// [`Option<T>`][option] for more information on this type.
///
/// A compile time error is never emitted when using this macro regardless
/// # Custom Messages
///
/// This macro has a second form, where a custom panic message can
- /// be provided with or without arguments for formatting. See [`std::fmt`]
+ /// be provided with or without arguments for formatting. See [`std::fmt`]
/// for syntax for this form.
///
/// [`panic!`]: macro.panic.html
self.inner.sin6_scope_id
}
- /// Change the scope ID associated with this socket address.
+ /// Changes the scope ID associated with this socket address.
///
/// See the [`scope_id`] method's documentation for more details.
///
/// [`SocketAddr`] values.
///
/// This trait is used for generic address resolution when constructing network
-/// objects. By default it is implemented for the following types:
+/// objects. By default it is implemented for the following types:
///
/// * [`SocketAddr`]: [`to_socket_addrs`] is the identity function.
///
#[stable(feature = "ip_u32", since = "1.1.0")]
impl From<Ipv4Addr> for u32 {
- /// Convert an `Ipv4Addr` into a host byte order `u32`.
+ /// Converts an `Ipv4Addr` into a host byte order `u32`.
///
/// # Examples
///
#[stable(feature = "ip_u32", since = "1.1.0")]
impl From<u32> for Ipv4Addr {
- /// Convert a host byte order `u32` into an `Ipv4Addr`.
+ /// Converts a host byte order `u32` into an `Ipv4Addr`.
///
/// # Examples
///
#[stable(feature = "ip_from_slice", since = "1.17.0")]
impl From<[u8; 4]> for IpAddr {
- /// Create an `IpAddr::V4` from a four element byte array.
+ /// Creates an `IpAddr::V4` from a four element byte array.
///
/// # Examples
///
#[stable(feature = "ip_from_slice", since = "1.17.0")]
impl From<[u8; 16]> for IpAddr {
- /// Create an `IpAddr::V6` from a sixteen element byte array.
+ /// Creates an `IpAddr::V6` from a sixteen element byte array.
///
/// # Examples
///
#[stable(feature = "ip_from_slice", since = "1.17.0")]
impl From<[u16; 8]> for IpAddr {
- /// Create an `IpAddr::V6` from an eight element 16-bit array.
+ /// Creates an `IpAddr::V6` from an eight element 16-bit array.
///
/// # Examples
///
self.0.ttl()
}
- /// Get the value of the `SO_ERROR` option on this socket.
+ /// Gets the value of the `SO_ERROR` option on this socket.
///
/// This will retrieve the stored error in the underlying socket, clearing
/// the field in the process. This can be useful for checking errors between
///
/// # Examples
///
- /// Create a TCP listener bound to `127.0.0.1:80`:
+ /// Creates a TCP listener bound to `127.0.0.1:80`:
///
/// ```no_run
/// use std::net::TcpListener;
/// let listener = TcpListener::bind("127.0.0.1:80").unwrap();
/// ```
///
- /// Create a TCP listener bound to `127.0.0.1:80`. If that fails, create a
+ /// Creates a TCP listener bound to `127.0.0.1:80`. If that fails, create a
/// TCP listener bound to `127.0.0.1:443`:
///
/// ```no_run
self.0.only_v6()
}
- /// Get the value of the `SO_ERROR` option on this socket.
+ /// Gets the value of the `SO_ERROR` option on this socket.
///
/// This will retrieve the stored error in the underlying socket, clearing
/// the field in the process. This can be useful for checking errors between
///
/// # Examples
///
- /// Create a UDP socket bound to `127.0.0.1:3400`:
+ /// Creates a UDP socket bound to `127.0.0.1:3400`:
///
/// ```no_run
/// use std::net::UdpSocket;
/// let socket = UdpSocket::bind("127.0.0.1:3400").expect("couldn't bind to address");
/// ```
///
- /// Create a UDP socket bound to `127.0.0.1:3400`. If the socket cannot be
+ /// Creates a UDP socket bound to `127.0.0.1:3400`. If the socket cannot be
/// bound to that address, create a UDP socket bound to `127.0.0.1:3401`:
///
/// ```no_run
/// This will return an error when the IP version of the local socket
/// does not match that returned from [`ToSocketAddrs`].
///
- /// See <https://github.com/rust-lang/rust/issues/34202> for more details.
+ /// See issue #34202 for more details.
///
/// [`ToSocketAddrs`]: ../../std/net/trait.ToSocketAddrs.html
///
self.0.leave_multicast_v6(multiaddr, interface)
}
- /// Get the value of the `SO_ERROR` option on this socket.
+ /// Gets the value of the `SO_ERROR` option on this socket.
///
/// This will retrieve the stored error in the underlying socket, clearing
/// the field in the process. This can be useful for checking errors between
///
/// # Examples
///
- /// Create a UDP socket bound to `127.0.0.1:3400` and connect the socket to
+ /// Creates a UDP socket bound to `127.0.0.1:3400` and connect the socket to
/// `127.0.0.1:8080`:
///
/// ```no_run
///
/// # Examples
///
- /// Create a UDP socket bound to `127.0.0.1:7878` and read bytes in
+ /// Creates a UDP socket bound to `127.0.0.1:7878` and read bytes in
/// nonblocking mode:
///
/// ```no_run
use ptr::{Unique, NonNull};
use rc::Rc;
use sync::{Arc, Mutex, RwLock, atomic};
-use task::{LocalWaker, Poll};
+use task::{Waker, Poll};
use thread::Result;
#[stable(feature = "panic_hooks", since = "1.10.0")]
impl<'a, F: Future> Future for AssertUnwindSafe<F> {
type Output = F::Output;
- fn poll(self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output> {
+ fn poll(self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output> {
let pinned_field = unsafe { Pin::map_unchecked_mut(self, |x| &mut x.0) };
- F::poll(pinned_field, lw)
+ F::poll(pinned_field, waker)
}
}
fs::read_dir(self)
}
- /// Returns whether the path points at an existing entity.
+ /// Returns `true` if the path points at an existing entity.
///
/// This function will traverse symbolic links to query information about the
/// destination file. In case of broken symbolic links this will return `false`.
fs::metadata(self).is_ok()
}
- /// Returns whether the path exists on disk and is pointing at a regular file.
+ /// Returns `true` if the path exists on disk and is pointing at a regular file.
///
/// This function will traverse symbolic links to query information about the
/// destination file. In case of broken symbolic links this will return `false`.
fs::metadata(self).map(|m| m.is_file()).unwrap_or(false)
}
- /// Returns whether the path exists on disk and is pointing at a directory.
+ /// Returns `true` if the path exists on disk and is pointing at a directory.
///
/// This function will traverse symbolic links to query information about the
/// destination file. In case of broken symbolic links this will return `false`.
//! [`std::string`]: ../string/index.html
//! [`std::vec`]: ../vec/index.html
//! [`to_owned`]: ../borrow/trait.ToOwned.html#tymethod.to_owned
-//! [book-closures]: ../../book/first-edition/closures.html
-//! [book-dtor]: ../../book/first-edition/drop.html
-//! [book-enums]: ../../book/first-edition/enums.html
-//! [book-iter]: ../../book/first-edition/iterators.html
+//! [book-closures]: ../../book/ch13-01-closures.html
+//! [book-dtor]: ../../book/ch15-03-drop.html
+//! [book-enums]: ../../book/ch06-01-defining-an-enum.html
+//! [book-iter]: ../../book/ch13-02-iterators.html
#![stable(feature = "rust1", since = "1.0.0")]
///
/// ```ignore (string-from-str-error-type-is-not-never-yet)
/// #[feature(exhaustive_patterns)]
-/// // NOTE: This does not work today!
+/// // NOTE: this does not work today!
/// let Ok(s) = String::from_str("hello");
/// ```
///
/// On top of that, function pointers can vary based on what ABI they use. This is achieved by
/// adding the `extern` keyword to the type name, followed by the ABI in question. For example,
/// `fn()` is different from `extern "C" fn()`, which itself is different from `extern "stdcall"
-/// fn()`, and so on for the various ABIs that Rust supports. Non-`extern` functions have an ABI
+/// fn()`, and so on for the various ABIs that Rust supports. Non-`extern` functions have an ABI
/// of `"Rust"`, and `extern` functions without an explicit ABI have an ABI of `"C"`. For more
/// information, see [the nomicon's section on foreign calling conventions][nomicon-abi].
///
/// The search path to be used may be controlled by setting the
/// `PATH` environment variable on the Command,
/// but this has some implementation limitations on Windows
- /// (see <https://github.com/rust-lang/rust/issues/37519>).
+ /// (see issue #37519).
///
/// # Examples
///
Command { inner: imp::Command::new(program.as_ref()) }
}
- /// Add an argument to pass to the program.
+ /// Adds an argument to pass to the program.
///
/// Only one argument can be passed per use. So instead of:
///
self
}
- /// Add multiple arguments to pass to the program.
+ /// Adds multiple arguments to pass to the program.
///
/// To pass a single argument see [`arg`].
///
self
}
- /// Add or update multiple environment variable mappings.
+ /// Adds or updates multiple environment variable mappings.
///
/// # Examples
///
///
/// let mut command = Command::new("ls");
/// if let Ok(child) = command.spawn() {
- /// println!("Child's id is {}", child.id());
+ /// println!("Child's ID is {}", child.id());
/// } else {
/// println!("ls command didn't start");
/// }
///
/// This function will not block the calling thread and will only
/// check to see if the child process has exited or not. If the child has
- /// exited then on Unix the process id is reaped. This function is
+ /// exited then on Unix the process ID is reaped. This function is
/// guaranteed to repeatedly return a successful exit status so long as the
/// child has already exited.
///
}
}
- /// Test that process creation flags work by debugging a process.
+ /// Tests that process creation flags work by debugging a process.
/// Other creation flags make it hard or impossible to detect
/// behavioral changes in the process.
#[test]
}
impl BarrierWaitResult {
- /// Returns whether this thread from [`wait`] is the "leader thread".
+ /// Returns `true` if this thread from [`wait`] is the "leader thread".
///
/// Only one thread will have `true` returned from their result, all other
/// threads will have `false` returned.
pub struct WaitTimeoutResult(bool);
impl WaitTimeoutResult {
- /// Returns whether the wait was known to have timed out.
+ /// Returns `true` if the wait was known to have timed out.
///
/// # Examples
///
///
/// Note that the best effort is made to ensure that the time waited is
/// measured with a monotonic clock, and not affected by the changes made to
- /// the system time. This function is susceptible to spurious wakeups.
+ /// the system time. This function is susceptible to spurious wakeups.
/// Condition variables normally have a boolean predicate associated with
/// them, and the predicate must always be checked each time this function
- /// returns to protect against spurious wakeups. Additionally, it is
+ /// returns to protect against spurious wakeups. Additionally, it is
/// typically desirable for the time-out to not exceed some duration in
/// spite of spurious wakes, thus the sleep-duration is decremented by the
- /// amount slept. Alternatively, use the `wait_timeout_until` method
+ /// amount slept. Alternatively, use the `wait_timeout_until` method
/// to wait until a condition is met with a total time-out regardless
/// of spurious wakes.
///
}
/// Waits on this condition variable for a notification, timing out after a
- /// specified duration. Spurious wakes will not cause this function to
+ /// specified duration. Spurious wakes will not cause this function to
/// return.
///
/// The semantics of this function are equivalent to [`wait_until`] except
wake
}
- /// Convert to an unsafe usize value. Useful for storing in a pipe's state
+ /// Converts to an unsafe usize value. Useful for storing in a pipe's state
/// flag.
#[inline]
pub unsafe fn cast_to_usize(self) -> usize {
mem::transmute(self.inner)
}
- /// Convert from an unsafe usize value. Useful for retrieving a pipe's state
+ /// Converts from an unsafe usize value. Useful for retrieving a pipe's state
/// flag.
#[inline]
pub unsafe fn cast_from_usize(signal_ptr: usize) -> SignalToken {
}
}
- /// Returns true if we wake up normally, false otherwise.
+ /// Returns `true` if we wake up normally.
pub fn wait_max_until(self, end: Instant) -> bool {
while !self.inner.woken.load(Ordering::SeqCst) {
let now = Instant::now();
/// where the corresponding receiver has already been deallocated. Note
/// that a return value of [`Err`] means that the data will never be
/// received, but a return value of [`Ok`] does *not* mean that the data
- /// will be received. It is possible for the corresponding receiver to
+ /// will be received. It is possible for the corresponding receiver to
/// hang up immediately after this function returns [`Ok`].
///
/// [`Err`]: ../../../std/result/enum.Result.html#variant.Err
impl !marker::Send for Select {}
/// A handle to a receiver which is currently a member of a `Select` set of
-/// receivers. This handle is used to keep the receiver in the set as well as
+/// receivers. This handle is used to keep the receiver in the set as well as
/// interact with the underlying receiver.
pub struct Handle<'rx, T:Send+'rx> {
/// The ID of this handle, used to compare against the return value of
- /// `Select::wait()`
+ /// `Select::wait()`.
id: usize,
selector: *mut SelectInner,
next: *mut Handle<'static, ()>,
}
/// Waits for an event on this receiver set. The returned value is *not* an
- /// index, but rather an id. This id can be queried against any active
+ /// index, but rather an ID. This ID can be queried against any active
/// `Handle` structures (each one has an `id` method). The handle with
/// the matching `id` will have some sort of event available on it. The
/// event could either be that data is available or the corresponding
}
impl<'rx, T: Send> Handle<'rx, T> {
- /// Retrieves the id of this handle.
+ /// Retrieves the ID of this handle.
#[inline]
pub fn id(&self) -> usize { self.id }
-/// Shared channels
+/// Shared channels.
///
/// This is the flavor of channels which are not necessarily optimized for any
/// particular use case, but are the most general in how they are used. Shared
/// Returns a mutable reference to the underlying data.
///
/// Since this call borrows the `Mutex` mutably, no actual locking needs to
- /// take place---the mutable borrow statically guarantees no locks exist.
+ /// take place -- the mutable borrow statically guarantees no locks exist.
///
/// # Errors
///
/// result in an immediate panic. If `f` panics, the `Once` will remain
/// in a poison state. If `f` does _not_ panic, the `Once` will no
/// longer be in a poison state and all future calls to `call_once` or
- /// `call_one_force` will no-op.
+ /// `call_one_force` will be no-ops.
///
/// The closure `f` is yielded a [`OnceState`] structure which can be used
/// to query the poison status of the `Once`.
});
}
- /// Returns true if some `call_once` call has completed
+ /// Returns `true` if some `call_once` call has completed
/// successfully. Specifically, `is_completed` will return false in
/// the following situations:
/// * `call_once` was not called at all,
}
impl OnceState {
- /// Returns whether the associated [`Once`] was poisoned prior to the
+ /// Returns `true` if the associated [`Once`] was poisoned prior to the
/// invocation of the closure passed to [`call_once_force`].
///
/// [`call_once_force`]: struct.Once.html#method.call_once_force
/// Determines whether the lock is poisoned.
///
/// If another thread is active, the lock can still become poisoned at any
- /// time. You should not trust a `false` value for program correctness
+ /// time. You should not trust a `false` value for program correctness
/// without additional synchronization.
///
/// # Examples
/// Returns a mutable reference to the underlying data.
///
/// Since this call borrows the `RwLock` mutably, no actual locking needs to
- /// take place---the mutable borrow statically guarantees no locks exist.
+ /// take place -- the mutable borrow statically guarantees no locks exist.
///
/// # Errors
///
/// Methods of synchronizing memory with physical storage.
#[repr(C)]
pub struct msflags: u8 {
- /// Perform asynchronous writes.
+ /// Performs asynchronous writes.
const ASYNC = 0x01;
- /// Invalidate cached data.
+ /// Invalidates cached data.
const INVALIDATE = 0x02;
- /// Perform synchronous writes.
+ /// Performs synchronous writes.
const SYNC = 0x04;
}
}
/// Entry point for additionally created threads.
///
-/// **tid**:
-/// Thread ID of the current thread.
+/// `tid`: thread ID of the current thread.
///
-/// **aux**:
-/// Copy of the value stored in
+/// `aux`: copy of the value stored in
/// [`threadattr.argument`](struct.threadattr.html#structfield.argument).
pub type threadentry = unsafe extern "C" fn(
tid: tid,
cloudabi_sys_mem_map(addr_, len_, prot_, flags_, fd_, off_, mem_)
}
-/// Change the protection of a memory mapping.
+/// Changes the protection of a memory mapping.
///
/// ## Parameters
///
cloudabi_sys_mem_protect(mapping_.as_mut_ptr() as *mut (), mapping_.len(), prot_)
}
-/// Synchronize a region of memory with its physical storage.
+/// Synchronizes a region of memory with its physical storage.
///
/// ## Parameters
///
-//! Platform-dependent platform abstraction
+//! Platform-dependent platform abstraction.
//!
//! The `std::sys` module is the abstracted interface through which
//! `std` talks to the underlying operating system. It has different
#[stable(feature = "fs_ext", since = "1.1.0")]
fn mode(&mut self, mode: u32) -> &mut Self;
- /// Pass custom flags to the `flags` argument of `open`.
+ /// Passes custom flags to the `flags` argument of `open`.
///
/// The bits that define the access mode are masked out with `O_ACCMODE`, to
/// ensure they do not interfere with the access mode set by Rusts options.
/// # Note
///
/// On Windows, you must specify whether a symbolic link points to a file
-/// or directory. Use `os::windows::fs::symlink_file` to create a
+/// or directory. Use `os::windows::fs::symlink_file` to create a
/// symbolic link to a file, or `os::windows::fs::symlink_dir` to create a
-/// symbolic link to a directory. Additionally, the process must have
+/// symbolic link to a directory. Additionally, the process must have
/// `SeCreateSymbolicLinkPrivilege` in order to be able to create a
/// symbolic link.
///
None
}
- /// Returns true if and only if the address is unnamed.
+ /// Returns `true` if the address is unnamed.
///
/// # Examples
///
/// ```
///
/// # Platform specific
- /// On Redox this always returns None.
+ /// On Redox this always returns `None`.
#[stable(feature = "unix_socket_redox", since = "1.29")]
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
Ok(None)
/// ```
///
/// # Platform specific
- /// On Redox this always returns None.
+ /// On Redox this always returns `None`.
#[stable(feature = "unix_socket_redox", since = "1.29")]
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
Ok(None)
/// [`process::Command`]: ../../../../std/process/struct.Command.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait CommandExt {
- /// Sets the child process's user id. This translates to a
+ /// Sets the child process's user ID. This translates to a
/// `setuid` call in the child process. Failure in the `setuid`
/// call will cause the spawn to fail.
#[stable(feature = "rust1", since = "1.0.0")]
fn uid(&mut self, id: u32) -> &mut process::Command;
- /// Similar to `uid`, but sets the group id of the child process. This has
+ /// Similar to `uid`, but sets the group ID of the child process. This has
/// the same semantics as the `uid` field.
#[stable(feature = "rust1", since = "1.0.0")]
fn gid(&mut self, id: u32) -> &mut process::Command;
}
impl Mutex {
- /// Create a new mutex.
+ /// Creates a new mutex.
pub const fn new() -> Self {
Mutex {
lock: UnsafeCell::new(0),
}
}
-/// The unique id of the process (this should never be negative).
+/// The unique ID of the process (this should never be negative).
pub struct Process {
pid: usize,
status: Option<ExitStatus>,
syscall1(SYS_BRK, addr)
}
-/// Change the process's working directory
+/// Changes the process's working directory.
///
/// This function will attempt to set the process's working directory to `path`, which can be
/// either a relative, scheme relative, or absolute path.
unsafe { syscall3(SYS_CHMOD, path.as_ref().as_ptr() as usize, path.as_ref().len(), mode) }
}
-/// Produce a fork of the current process, or a new process thread
+/// Produces a fork of the current process, or a new process thread.
pub unsafe fn clone(flags: usize) -> Result<usize> {
syscall1_clobber(SYS_CLONE, flags)
}
-/// Close a file
+/// Closes a file.
pub fn close(fd: usize) -> Result<usize> {
unsafe { syscall1(SYS_CLOSE, fd) }
}
-/// Get the current system time
+/// Gets the current system time.
pub fn clock_gettime(clock: usize, tp: &mut TimeSpec) -> Result<usize> {
unsafe { syscall2(SYS_CLOCK_GETTIME, clock, tp as *mut TimeSpec as usize) }
}
-/// Copy and transform a file descriptor
+/// Copies and transforms a file descriptor.
pub fn dup(fd: usize, buf: &[u8]) -> Result<usize> {
unsafe { syscall3(SYS_DUP, fd, buf.as_ptr() as usize, buf.len()) }
}
-/// Copy and transform a file descriptor
+/// Copies and transforms a file descriptor.
pub fn dup2(fd: usize, newfd: usize, buf: &[u8]) -> Result<usize> {
unsafe { syscall4(SYS_DUP2, fd, newfd, buf.as_ptr() as usize, buf.len()) }
}
-/// Exit the current process
+/// Exits the current process.
pub fn exit(status: usize) -> Result<usize> {
unsafe { syscall1(SYS_EXIT, status) }
}
-/// Change file permissions
+/// Changes file permissions.
pub fn fchmod(fd: usize, mode: u16) -> Result<usize> {
unsafe { syscall2(SYS_FCHMOD, fd, mode as usize) }
}
-/// Change file ownership
+/// Changes file ownership.
pub fn fchown(fd: usize, uid: u32, gid: u32) -> Result<usize> {
unsafe { syscall3(SYS_FCHOWN, fd, uid as usize, gid as usize) }
}
-/// Change file descriptor flags
+/// Changes file descriptor flags.
pub fn fcntl(fd: usize, cmd: usize, arg: usize) -> Result<usize> {
unsafe { syscall3(SYS_FCNTL, fd, cmd, arg) }
}
-/// Replace the current process with a new executable
+/// Replaces the current process with a new executable.
pub fn fexec(fd: usize, args: &[[usize; 2]], vars: &[[usize; 2]]) -> Result<usize> {
unsafe { syscall5(SYS_FEXEC, fd, args.as_ptr() as usize, args.len(),
vars.as_ptr() as usize, vars.len()) }
}
-/// Map a file into memory
+/// Maps a file into memory.
pub unsafe fn fmap(fd: usize, offset: usize, size: usize) -> Result<usize> {
syscall3(SYS_FMAP, fd, offset, size)
}
-/// Unmap a memory-mapped file
+/// Unmaps a memory-mapped file.
pub unsafe fn funmap(addr: usize) -> Result<usize> {
syscall1(SYS_FUNMAP, addr)
}
-/// Retrieve the canonical path of a file
+/// Retrieves the canonical path of a file.
pub fn fpath(fd: usize, buf: &mut [u8]) -> Result<usize> {
unsafe { syscall3(SYS_FPATH, fd, buf.as_mut_ptr() as usize, buf.len()) }
}
-/// Rename a file
+/// Renames a file.
pub fn frename<T: AsRef<[u8]>>(fd: usize, path: T) -> Result<usize> {
unsafe { syscall3(SYS_FRENAME, fd, path.as_ref().as_ptr() as usize, path.as_ref().len()) }
}
-/// Get metadata about a file
+/// Gets metadata about a file.
pub fn fstat(fd: usize, stat: &mut Stat) -> Result<usize> {
unsafe { syscall3(SYS_FSTAT, fd, stat as *mut Stat as usize, mem::size_of::<Stat>()) }
}
-/// Get metadata about a filesystem
+/// Gets metadata about a filesystem.
pub fn fstatvfs(fd: usize, stat: &mut StatVfs) -> Result<usize> {
unsafe { syscall3(SYS_FSTATVFS, fd, stat as *mut StatVfs as usize, mem::size_of::<StatVfs>()) }
}
-/// Sync a file descriptor to its underlying medium
+/// Syncs a file descriptor to its underlying medium.
pub fn fsync(fd: usize) -> Result<usize> {
unsafe { syscall1(SYS_FSYNC, fd) }
}
syscall5(SYS_FUTEX, addr as usize, op, (val as isize) as usize, val2, addr2 as usize)
}
-/// Get the current working directory
+/// Gets the current working directory.
pub fn getcwd(buf: &mut [u8]) -> Result<usize> {
unsafe { syscall2(SYS_GETCWD, buf.as_mut_ptr() as usize, buf.len()) }
}
-/// Get the effective group ID
+/// Gets the effective group ID.
pub fn getegid() -> Result<usize> {
unsafe { syscall0(SYS_GETEGID) }
}
-/// Get the effective namespace
+/// Gets the effective namespace.
pub fn getens() -> Result<usize> {
unsafe { syscall0(SYS_GETENS) }
}
-/// Get the effective user ID
+/// Gets the effective user ID.
pub fn geteuid() -> Result<usize> {
unsafe { syscall0(SYS_GETEUID) }
}
-/// Get the current group ID
+/// Gets the current group ID.
pub fn getgid() -> Result<usize> {
unsafe { syscall0(SYS_GETGID) }
}
-/// Get the current namespace
+/// Gets the current namespace.
pub fn getns() -> Result<usize> {
unsafe { syscall0(SYS_GETNS) }
}
-/// Get the current process ID
+/// Gets the current process ID.
pub fn getpid() -> Result<usize> {
unsafe { syscall0(SYS_GETPID) }
}
-/// Get the process group ID
+/// Gets the process group ID.
pub fn getpgid(pid: usize) -> Result<usize> {
unsafe { syscall1(SYS_GETPGID, pid) }
}
-/// Get the parent process ID
+/// Gets the parent process ID.
pub fn getppid() -> Result<usize> {
unsafe { syscall0(SYS_GETPPID) }
}
-/// Get the current user ID
+/// Gets the current user ID.
pub fn getuid() -> Result<usize> {
unsafe { syscall0(SYS_GETUID) }
}
-/// Set the I/O privilege level
+/// Sets the I/O privilege level
pub unsafe fn iopl(level: usize) -> Result<usize> {
syscall1(SYS_IOPL, level)
}
-/// Send a signal `sig` to the process identified by `pid`
+/// Sends a signal `sig` to the process identified by `pid`.
pub fn kill(pid: usize, sig: usize) -> Result<usize> {
unsafe { syscall2(SYS_KILL, pid, sig) }
}
-/// Create a link to a file
+/// Creates a link to a file.
pub unsafe fn link(old: *const u8, new: *const u8) -> Result<usize> {
syscall2(SYS_LINK, old as usize, new as usize)
}
-/// Seek to `offset` bytes in a file descriptor
+/// Seeks to `offset` bytes in a file descriptor.
pub fn lseek(fd: usize, offset: isize, whence: usize) -> Result<usize> {
unsafe { syscall3(SYS_LSEEK, fd, offset as usize, whence) }
}
-/// Make a new scheme namespace
+/// Makes a new scheme namespace.
pub fn mkns(schemes: &[[usize; 2]]) -> Result<usize> {
unsafe { syscall2(SYS_MKNS, schemes.as_ptr() as usize, schemes.len()) }
}
-/// Sleep for the time specified in `req`
+/// Sleeps for the time specified in `req`.
pub fn nanosleep(req: &TimeSpec, rem: &mut TimeSpec) -> Result<usize> {
unsafe { syscall2(SYS_NANOSLEEP, req as *const TimeSpec as usize,
rem as *mut TimeSpec as usize) }
}
-/// Open a file
+/// Opens a file.
pub fn open<T: AsRef<[u8]>>(path: T, flags: usize) -> Result<usize> {
unsafe { syscall3(SYS_OPEN, path.as_ref().as_ptr() as usize, path.as_ref().len(), flags) }
}
-/// Allocate pages, linearly in physical memory
+/// Allocates pages, linearly in physical memory.
pub unsafe fn physalloc(size: usize) -> Result<usize> {
syscall1(SYS_PHYSALLOC, size)
}
-/// Free physically allocated pages
+/// Frees physically allocated pages.
pub unsafe fn physfree(physical_address: usize, size: usize) -> Result<usize> {
syscall2(SYS_PHYSFREE, physical_address, size)
}
-/// Map physical memory to virtual memory
+/// Maps physical memory to virtual memory.
pub unsafe fn physmap(physical_address: usize, size: usize, flags: usize) -> Result<usize> {
syscall3(SYS_PHYSMAP, physical_address, size, flags)
}
-/// Unmap previously mapped physical memory
+/// Unmaps previously mapped physical memory.
pub unsafe fn physunmap(virtual_address: usize) -> Result<usize> {
syscall1(SYS_PHYSUNMAP, virtual_address)
}
-/// Create a pair of file descriptors referencing the read and write ends of a pipe
+/// Creates a pair of file descriptors referencing the read and write ends of a pipe.
pub fn pipe2(fds: &mut [usize; 2], flags: usize) -> Result<usize> {
unsafe { syscall2(SYS_PIPE2, fds.as_ptr() as usize, flags) }
}
unsafe { syscall3(SYS_READ, fd, buf.as_mut_ptr() as usize, buf.len()) }
}
-/// Remove a directory
+/// Removes a directory.
pub fn rmdir<T: AsRef<[u8]>>(path: T) -> Result<usize> {
unsafe { syscall2(SYS_RMDIR, path.as_ref().as_ptr() as usize, path.as_ref().len()) }
}
-/// Set the process group ID
+/// Sets the process group ID.
pub fn setpgid(pid: usize, pgid: usize) -> Result<usize> {
unsafe { syscall2(SYS_SETPGID, pid, pgid) }
}
-/// Set the current process group IDs
+/// Sets the current process group IDs.
pub fn setregid(rgid: usize, egid: usize) -> Result<usize> {
unsafe { syscall2(SYS_SETREGID, rgid, egid) }
}
-/// Make a new scheme namespace
+/// Makes a new scheme namespace.
pub fn setrens(rns: usize, ens: usize) -> Result<usize> {
unsafe { syscall2(SYS_SETRENS, rns, ens) }
}
-/// Set the current process user IDs
+/// Sets the current process user IDs.
pub fn setreuid(ruid: usize, euid: usize) -> Result<usize> {
unsafe { syscall2(SYS_SETREUID, ruid, euid) }
}
-/// Set up a signal handler
+/// Sets up a signal handler.
pub fn sigaction(sig: usize, act: Option<&SigAction>, oldact: Option<&mut SigAction>)
-> Result<usize> {
unsafe { syscall4(SYS_SIGACTION, sig,
restorer as usize) }
}
-// Return from signal handler
+/// Returns from signal handler.
pub fn sigreturn() -> Result<usize> {
unsafe { syscall0(SYS_SIGRETURN) }
}
-/// Remove a file
+/// Removes a file.
pub fn unlink<T: AsRef<[u8]>>(path: T) -> Result<usize> {
unsafe { syscall2(SYS_UNLINK, path.as_ref().as_ptr() as usize, path.as_ref().len()) }
}
-/// Convert a virtual address to a physical one
+/// Converts a virtual address to a physical one.
pub unsafe fn virttophys(virtual_address: usize) -> Result<usize> {
syscall1(SYS_VIRTTOPHYS, virtual_address)
}
-/// Check if a child process has exited or received a signal
+/// Checks if a child process has exited or received a signal.
pub fn waitpid(pid: usize, status: &mut usize, options: usize) -> Result<usize> {
unsafe { syscall3(SYS_WAITPID, pid, status as *mut usize as usize, options) }
}
-/// Write a buffer to a file descriptor
+/// Writes a buffer to a file descriptor.
///
/// The kernel will attempt to write the bytes in `buf` to the file descriptor `fd`, returning
/// either an `Err`, explained below, or `Ok(count)` where `count` is the number of bytes which
unsafe { syscall3(SYS_WRITE, fd, buf.as_ptr() as usize, buf.len()) }
}
-/// Yield the process's time slice to the kernel
+/// Yields the process's time slice to the kernel.
///
/// This function will return Ok(0) on success
pub fn sched_yield() -> Result<usize> {
pub const WUNTRACED: usize = 0x02;
pub const WCONTINUED: usize = 0x08;
-/// True if status indicates the child is stopped.
+/// Returns `true` if status indicates the child is stopped.
pub fn wifstopped(status: usize) -> bool {
(status & 0xff) == 0x7f
}
-/// If wifstopped(status), the signal that stopped the child.
+/// If wifstopped(status), returns the signal that stopped the child.
pub fn wstopsig(status: usize) -> usize {
(status >> 8) & 0xff
}
-/// True if status indicates the child continued after a stop.
+/// Returns `true` if status indicates the child continued after a stop.
pub fn wifcontinued(status: usize) -> bool {
status == 0xffff
}
-/// True if STATUS indicates termination by a signal.
+/// Returns `true` if status indicates termination by a signal.
pub fn wifsignaled(status: usize) -> bool {
((status & 0x7f) + 1) as i8 >= 2
}
-/// If wifsignaled(status), the terminating signal.
+/// If wifsignaled(status), returns the terminating signal.
pub fn wtermsig(status: usize) -> usize {
status & 0x7f
}
-/// True if status indicates normal termination.
+/// Returns `true` if status indicates normal termination.
pub fn wifexited(status: usize) -> bool {
wtermsig(status) == 0
}
-/// If wifexited(status), the exit status.
+/// If wifexited(status), returns the exit status.
pub fn wexitstatus(status: usize) -> usize {
(status >> 8) & 0xff
}
-/// True if status indicates a core dump was created.
+/// Returns `true` if status indicates a core dump was created.
pub fn wcoredump(status: usize) -> bool {
(status & 0x80) != 0
}
.asciz "Re-entered aborted enclave!"
.Lreentry_panic_msg_end:
-.Lusercall_panic_msg:
- .asciz "Invalid usercall#!"
-.Lusercall_panic_msg_end:
-
.org .Lxsave_clear+512
.Lxsave_header:
.int 0, 0 /* XSTATE_BV */
orq $8,%rsp
jmp panic_msg
-.Lusercall_panic:
- lea .Lusercall_panic_msg(%rip),%rdi
- mov $.Lusercall_panic_msg_end-.Lusercall_panic_msg,%esi
- orq $8,%rsp
- jmp panic_msg
-
-.macro push_callee_saved_registers
+/* This *MUST* be called with 6 parameters, otherwise register information */
+/* might leak! */
+.global usercall
+usercall:
+ test %rcx,%rcx /* check `abort` function argument */
+ jnz .Lusercall_abort /* abort is set, jump to abort code (unlikely forward conditional) */
+ jmp .Lusercall_save_state /* non-aborting usercall */
+.Lusercall_abort:
+/* set aborted bit */
+ movb $1,.Laborted(%rip)
+/* save registers in DEBUG mode, so that debugger can reconstruct the stack */
+ testb $0xff,DEBUG(%rip)
+ jz .Lusercall_noreturn
+.Lusercall_save_state:
+/* save callee-saved state */
push %r15
push %r14
push %r13
sub $8, %rsp
fstcw 4(%rsp)
stmxcsr (%rsp)
-.endm
-
-.global usercall_exit
-usercall_exit:
-/* save registers in DEBUG mode, so that debugger can reconstruct the stack */
- testb $0xff,DEBUG(%rip)
- jz .Lskip_save_registers
- push_callee_saved_registers
- movq %rsp,%gs:tcsls_panic_last_rsp
-.Lskip_save_registers:
-/* set aborted bit */
- movb $1,.Laborted(%rip)
-/* call usercall exit(true) */
- /* NOP: mov %rsi,%rsi */ /* RSI = usercall() argument: panic */
- xor %rdx,%rdx /* RDX cleared */
- movq $usercall_nr_exit,%rdi /* RDI = usercall exit */
- jmp .Lexit
-
-/* This *MUST* be called with 6 parameters, otherwise register information */
-/* might leak! */
-.global usercall
-usercall:
- test %rdi,%rdi
- jle .Lusercall_panic
-/* save callee-saved state */
- push_callee_saved_registers
movq %rsp,%gs:tcsls_last_rsp
+.Lusercall_noreturn:
/* clear general purpose register state */
/* RAX overwritten by ENCLU */
/* RBX set by sgx_exit */
jmp .Lsgx_exit
.Lusercall_ret:
movq $0,%gs:tcsls_last_rsp
-/* restore callee-saved state, cf. push_callee_saved_registers */
+/* restore callee-saved state, cf. "save" above */
mov %r11,%rsp
ldmxcsr (%rsp)
fldcw 4(%rsp)
#[macro_use]
pub mod usercalls;
-global_asm!(concat!(usercalls_asm!(), include_str!("entry.S")));
+global_asm!(include_str!("entry.S"));
#[no_mangle]
unsafe extern "C" fn tcs_init(secondary: bool) {
-use super::usercalls::alloc::UserRef;
+use super::usercalls::{alloc::UserRef, self};
use cmp;
use io::{self, Write};
use mem;
#[no_mangle]
pub extern "C" fn panic_msg(msg: &str) -> ! {
let _ = SgxPanicOutput::new().map(|mut out| out.write(msg.as_bytes()));
- unsafe { usercall_exit(true); }
+ usercalls::exit(true)
}
-
-extern "C" { pub fn usercall_exit(panic: bool) -> !; }
use fortanix_sgx_abi::Tcs;
-/// Get the ID for the current thread. The ID is guaranteed to be unique among
+/// Gets the ID for the current thread. The ID is guaranteed to be unique among
/// all currently running threads in the enclave, and it is guaranteed to be
/// constant for the lifetime of the thread. More specifically for SGX, there
/// is a one-to-one correspondence of the ID to the address of the TCS.
self.0[hi].fetch_and(!lo, Ordering::Relaxed);
}
- /// Set any unset bit. Not atomic. Returns `None` if all bits were
+ /// Sets any unset bit. Not atomic. Returns `None` if all bits were
/// observed to be set.
pub fn set(&self) -> Option<usize> {
'elems: for (idx, elem) in self.0.iter().enumerate() {
/// Construct a pointer to `Self` given a memory range in user space.
///
- /// NB. This takes a size, not a length!
+ /// N.B., this takes a size, not a length!
///
/// # Safety
+ ///
/// The caller must ensure the memory range is in user memory, is the
/// correct size and is correctly aligned and points to the right type.
unsafe fn from_raw_sized_unchecked(ptr: *mut u8, size: usize) -> *mut Self;
/// Construct a pointer to `Self` given a memory range.
///
- /// NB. This takes a size, not a length!
+ /// N.B., this takes a size, not a length!
///
/// # Safety
+ ///
/// The caller must ensure the memory range points to the correct type.
///
/// # Panics
+ ///
/// This function panics if:
///
- /// * The pointer is not aligned
- /// * The pointer is null
- /// * The pointed-to range is not in user memory
+ /// * the pointer is not aligned.
+ /// * the pointer is null.
+ /// * the pointed-to range is not in user memory.
unsafe fn from_raw_sized(ptr: *mut u8, size: usize) -> NonNull<Self> {
let ret = Self::from_raw_sized_unchecked(ptr, size);
Self::check_ptr(ret);
NonNull::new_unchecked(ret as _)
}
- /// Check if a pointer may point to Self in user memory.
+ /// Checks if a pointer may point to `Self` in user memory.
///
/// # Safety
+ ///
/// The caller must ensure the memory range points to the correct type and
/// length (if this is a slice).
///
/// # Panics
+ ///
/// This function panics if:
///
- /// * The pointer is not aligned
- /// * The pointer is null
- /// * The pointed-to range is not in user memory
+ /// * the pointer is not aligned.
+ /// * the pointer is null.
+ /// * the pointed-to range is not in user memory.
unsafe fn check_ptr(ptr: *const Self) {
let is_aligned = |p| -> bool {
0 == (p as usize) & (Self::align_of() - 1)
}
}
- /// Copy `val` into freshly allocated space in user memory.
+ /// Copies `val` into freshly allocated space in user memory.
pub fn new_from_enclave(val: &T) -> Self {
unsafe {
let ret = Self::new_uninit_bytes(mem::size_of_val(val));
}
}
- /// Create an owned `User<T>` from a raw pointer.
+ /// Creates an owned `User<T>` from a raw pointer.
///
/// # Safety
/// The caller must ensure `ptr` points to `T`, is freeable with the `free`
User(NonNull::new_userref(ptr))
}
- /// Convert this value into a raw pointer. The value will no longer be
+ /// Converts this value into a raw pointer. The value will no longer be
/// automatically freed.
pub fn into_raw(self) -> *mut T {
let ret = self.0;
Self::new_uninit_bytes(n * mem::size_of::<T>())
}
- /// Create an owned `User<[T]>` from a raw thin pointer and a slice length.
+ /// Creates an owned `User<[T]>` from a raw thin pointer and a slice length.
///
/// # Safety
/// The caller must ensure `ptr` points to `len` elements of `T`, is
#[unstable(feature = "sgx_platform", issue = "56975")]
impl<T: ?Sized> UserRef<T> where T: UserSafe {
- /// Create a `&UserRef<[T]>` from a raw pointer.
+ /// Creates a `&UserRef<[T]>` from a raw pointer.
///
/// # Safety
/// The caller must ensure `ptr` points to `T`.
&*(ptr as *const Self)
}
- /// Create a `&mut UserRef<[T]>` from a raw pointer. See the struct
+ /// Creates a `&mut UserRef<[T]>` from a raw pointer. See the struct
/// documentation for the nuances regarding a `&mut UserRef<T>`.
///
/// # Safety
&mut*(ptr as *mut Self)
}
- /// Copy `val` into user memory.
+ /// Copies `val` into user memory.
///
/// # Panics
/// This function panics if the destination doesn't have the same size as
}
}
- /// Copy the value from user memory and place it into `dest`.
+ /// Copies the value from user memory and place it into `dest`.
///
/// # Panics
/// This function panics if the destination doesn't have the same size as
#[unstable(feature = "sgx_platform", issue = "56975")]
impl<T> UserRef<T> where T: UserSafe {
- /// Copy the value from user memory into enclave memory.
+ /// Copies the value from user memory into enclave memory.
pub fn to_enclave(&self) -> T {
unsafe { ptr::read(self.0.get()) }
}
#[unstable(feature = "sgx_platform", issue = "56975")]
impl<T> UserRef<[T]> where [T]: UserSafe {
- /// Create a `&UserRef<[T]>` from a raw thin pointer and a slice length.
+ /// Creates a `&UserRef<[T]>` from a raw thin pointer and a slice length.
///
/// # Safety
/// The caller must ensure `ptr` points to `n` elements of `T`.
&*(<[T]>::from_raw_sized(ptr as _, len * mem::size_of::<T>()).as_ptr() as *const Self)
}
- /// Create a `&mut UserRef<[T]>` from a raw thin pointer and a slice length.
+ /// Creates a `&mut UserRef<[T]>` from a raw thin pointer and a slice length.
/// See the struct documentation for the nuances regarding a
/// `&mut UserRef<T>`.
///
unsafe { (*self.0.get()).len() }
}
- /// Copy the value from user memory and place it into `dest`. Afterwards,
+ /// Copies the value from user memory and place it into `dest`. Afterwards,
/// `dest` will contain exactly `self.len()` elements.
///
/// # Panics
}
}
- /// Copy the value from user memory into a vector in enclave memory.
+ /// Copies the value from user memory into a vector in enclave memory.
pub fn to_enclave(&self) -> Vec<T> {
let mut ret = Vec::with_capacity(self.len());
self.copy_to_enclave_vec(&mut ret);
#[unstable(feature = "sgx_platform", issue = "56975")]
impl UserRef<super::raw::ByteBuffer> {
- /// Copy the user memory range pointed to by the user `ByteBuffer` to
+ /// Copies the user memory range pointed to by the user `ByteBuffer` to
/// enclave memory.
///
/// # Panics
/// Usercall `exit`. See the ABI documentation for more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
pub fn exit(panic: bool) -> ! {
- unsafe { super::panic::usercall_exit(panic) }
+ unsafe { raw::exit(panic) }
}
/// Usercall `wait`. See the ABI documentation for more information.
pub use fortanix_sgx_abi::*;
use ptr::NonNull;
+use num::NonZeroU64;
#[repr(C)]
struct UsercallReturn(u64, u64);
extern "C" {
- fn usercall(nr: u64, p1: u64, p2: u64, _ignore: u64, p3: u64, p4: u64) -> UsercallReturn;
+ fn usercall(nr: NonZeroU64, p1: u64, p2: u64, abort: u64, p3: u64, p4: u64) -> UsercallReturn;
}
-/// Perform the raw usercall operation as defined in the ABI calling convention.
+/// Performs the raw usercall operation as defined in the ABI calling convention.
///
/// # Safety
+///
/// The caller must ensure to pass parameters appropriate for the usercall `nr`
/// and to observe all requirements specified in the ABI.
///
/// # Panics
-/// Panics if `nr` is 0.
+///
+/// Panics if `nr` is `0`.
#[unstable(feature = "sgx_platform", issue = "56975")]
-pub unsafe fn do_usercall(nr: u64, p1: u64, p2: u64, p3: u64, p4: u64) -> (u64, u64) {
- if nr==0 { panic!("Invalid usercall number {}",nr) }
- let UsercallReturn(a, b) = usercall(nr,p1,p2,0,p3,p4);
+#[inline]
+pub unsafe fn do_usercall(nr: NonZeroU64, p1: u64, p2: u64, p3: u64, p4: u64, abort: bool)
+ -> (u64, u64)
+{
+ let UsercallReturn(a, b) = usercall(nr, p1, p2, abort as _, p3, p4);
(a, b)
}
}
macro_rules! define_usercalls {
- // Using `$r:tt` because `$r:ty` doesn't match ! in `clobber_diverging`
($(fn $f:ident($($n:ident: $t:ty),*) $(-> $r:tt)*; )*) => {
/// Usercall numbers as per the ABI.
#[repr(u64)]
};
}
-macro_rules! define_usercalls_asm {
- ($(fn $f:ident($($n:ident: $t:ty),*) $(-> $r:ty)*; )*) => {
- macro_rules! usercalls_asm {
- () => {
- concat!(
- ".equ usercall_nr_LAST, 0\n",
- $(
- ".equ usercall_nr_", stringify!($f), ", usercall_nr_LAST+1\n",
- ".equ usercall_nr_LAST, usercall_nr_", stringify!($f), "\n"
- ),*
- )
- }
- }
- };
-}
-
macro_rules! define_ra {
(< $i:ident > $t:ty) => {
impl<$i> RegisterArgument for $t {
}
}
+macro_rules! return_type_is_abort {
+ (!) => { true };
+ ($r:ty) => { false };
+}
+
+// In this macro: using `$r:tt` because `$r:ty` doesn't match ! in `return_type_is_abort`
macro_rules! enclave_usercalls_internal_define_usercalls {
(def fn $f:ident($n1:ident: $t1:ty, $n2:ident: $t2:ty,
- $n3:ident: $t3:ty, $n4:ident: $t4:ty) -> $r:ty) => (
+ $n3:ident: $t3:ty, $n4:ident: $t4:ty) -> $r:tt) => (
/// This is the raw function definition, see the ABI documentation for
/// more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
#[inline(always)]
pub unsafe fn $f($n1: $t1, $n2: $t2, $n3: $t3, $n4: $t4) -> $r {
ReturnValue::from_registers(stringify!($f), do_usercall(
- Usercalls::$f as Register,
+ NonZeroU64::new(Usercalls::$f as Register)
+ .expect("Usercall number must be non-zero"),
RegisterArgument::into_register($n1),
RegisterArgument::into_register($n2),
RegisterArgument::into_register($n3),
RegisterArgument::into_register($n4),
+ return_type_is_abort!($r)
))
}
);
- (def fn $f:ident($n1:ident: $t1:ty, $n2:ident: $t2:ty, $n3:ident: $t3:ty) -> $r:ty) => (
+ (def fn $f:ident($n1:ident: $t1:ty, $n2:ident: $t2:ty, $n3:ident: $t3:ty) -> $r:tt) => (
/// This is the raw function definition, see the ABI documentation for
/// more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
#[inline(always)]
pub unsafe fn $f($n1: $t1, $n2: $t2, $n3: $t3) -> $r {
ReturnValue::from_registers(stringify!($f), do_usercall(
- Usercalls::$f as Register,
+ NonZeroU64::new(Usercalls::$f as Register)
+ .expect("Usercall number must be non-zero"),
RegisterArgument::into_register($n1),
RegisterArgument::into_register($n2),
RegisterArgument::into_register($n3),
- 0
+ 0,
+ return_type_is_abort!($r)
))
}
);
- (def fn $f:ident($n1:ident: $t1:ty, $n2:ident: $t2:ty) -> $r:ty) => (
+ (def fn $f:ident($n1:ident: $t1:ty, $n2:ident: $t2:ty) -> $r:tt) => (
/// This is the raw function definition, see the ABI documentation for
/// more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
#[inline(always)]
pub unsafe fn $f($n1: $t1, $n2: $t2) -> $r {
ReturnValue::from_registers(stringify!($f), do_usercall(
- Usercalls::$f as Register,
+ NonZeroU64::new(Usercalls::$f as Register)
+ .expect("Usercall number must be non-zero"),
RegisterArgument::into_register($n1),
RegisterArgument::into_register($n2),
- 0,0
+ 0,0,
+ return_type_is_abort!($r)
))
}
);
- (def fn $f:ident($n1:ident: $t1:ty) -> $r:ty) => (
+ (def fn $f:ident($n1:ident: $t1:ty) -> $r:tt) => (
/// This is the raw function definition, see the ABI documentation for
/// more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
#[inline(always)]
pub unsafe fn $f($n1: $t1) -> $r {
ReturnValue::from_registers(stringify!($f), do_usercall(
- Usercalls::$f as Register,
+ NonZeroU64::new(Usercalls::$f as Register)
+ .expect("Usercall number must be non-zero"),
RegisterArgument::into_register($n1),
- 0,0,0
+ 0,0,0,
+ return_type_is_abort!($r)
))
}
);
- (def fn $f:ident() -> $r:ty) => (
+ (def fn $f:ident() -> $r:tt) => (
/// This is the raw function definition, see the ABI documentation for
/// more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
#[inline(always)]
pub unsafe fn $f() -> $r {
ReturnValue::from_registers(stringify!($f), do_usercall(
- Usercalls::$f as Register,
- 0,0,0,0
+ NonZeroU64::new(Usercalls::$f as Register)
+ .expect("Usercall number must be non-zero"),
+ 0,0,0,0,
+ return_type_is_abort!($r)
))
}
);
}
invoke_with_usercalls!(define_usercalls);
-invoke_with_usercalls!(define_usercalls_asm);
}
pub unsafe fn abort_internal() -> ! {
- abi::panic::usercall_exit(true)
+ abi::usercalls::exit(true)
}
pub fn hashmap_random_keys() -> (u64, u64) {
/// This queue is used to implement condition variable and mutexes.
///
/// Users of this API are expected to use the `WaitVariable<T>` type. Since
-/// that type is not `Sync`, it needs to be protected by e.g. a `SpinMutex` to
+/// that type is not `Sync`, it needs to be protected by e.g., a `SpinMutex` to
/// allow shared access.
///
/// Since userspace may send spurious wake-ups, the wakeup event state is
self.inner.is_empty()
}
- /// Add the calling thread to the WaitVariable's wait queue, then wait
+ /// Adds the calling thread to the `WaitVariable`'s wait queue, then wait
/// until a wakeup event.
///
/// This function does not return until this thread has been awoken.
/// [`FileType`]: ../../../../std/fs/struct.FileType.html
#[stable(feature = "file_type_ext", since = "1.5.0")]
pub trait FileTypeExt {
- /// Returns whether this file type is a block device.
+ /// Returns `true` if this file type is a block device.
///
/// # Examples
///
/// ```
#[stable(feature = "file_type_ext", since = "1.5.0")]
fn is_block_device(&self) -> bool;
- /// Returns whether this file type is a char device.
+ /// Returns `true` if this file type is a char device.
///
/// # Examples
///
/// ```
#[stable(feature = "file_type_ext", since = "1.5.0")]
fn is_char_device(&self) -> bool;
- /// Returns whether this file type is a fifo.
+ /// Returns `true` if this file type is a fifo.
///
/// # Examples
///
/// ```
#[stable(feature = "file_type_ext", since = "1.5.0")]
fn is_fifo(&self) -> bool;
- /// Returns whether this file type is a socket.
+ /// Returns `true` if this file type is a socket.
///
/// # Examples
///
/// # Note
///
/// On Windows, you must specify whether a symbolic link points to a file
-/// or directory. Use `os::windows::fs::symlink_file` to create a
+/// or directory. Use `os::windows::fs::symlink_file` to create a
/// symbolic link to a file, or `os::windows::fs::symlink_dir` to create a
-/// symbolic link to a directory. Additionally, the process must have
+/// symbolic link to a directory. Additionally, the process must have
/// `SeCreateSymbolicLinkPrivilege` in order to be able to create a
/// symbolic link.
///
})
}
- /// Returns true if and only if the address is unnamed.
+ /// Returns `true` if the address is unnamed.
///
/// # Examples
///
/// ```
///
/// # Platform specific
- /// On Redox this always returns None.
+ /// On Redox this always returns `None`.
#[stable(feature = "unix_socket", since = "1.10.0")]
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.0.take_error()
/// ```
///
/// # Platform specific
- /// On Redox this always returns None.
+ /// On Redox this always returns `None`.
#[stable(feature = "unix_socket", since = "1.10.0")]
pub fn take_error(&self) -> io::Result<Option<io::Error>> {
self.0.take_error()
Ok(UnixDatagram(inner))
}
- /// Create an unnamed pair of connected sockets.
+ /// Creates an unnamed pair of connected sockets.
///
/// Returns two `UnixDatagrams`s which are connected to each other.
///
/// [`process::Command`]: ../../../../std/process/struct.Command.html
#[stable(feature = "rust1", since = "1.0.0")]
pub trait CommandExt {
- /// Sets the child process's user id. This translates to a
+ /// Sets the child process's user ID. This translates to a
/// `setuid` call in the child process. Failure in the `setuid`
/// call will cause the spawn to fail.
#[stable(feature = "rust1", since = "1.0.0")]
fn uid(&mut self, id: u32) -> &mut process::Command;
- /// Similar to `uid`, but sets the group id of the child process. This has
+ /// Similar to `uid`, but sets the group ID of the child process. This has
/// the same semantics as the `uid` field.
#[stable(feature = "rust1", since = "1.0.0")]
fn gid(&mut self, id: u32) -> &mut process::Command;
// Processes
////////////////////////////////////////////////////////////////////////////////
-/// The unique id of the process (this should never be negative).
+/// The unique ID of the process (this should never be negative).
pub struct Process {
pid: pid_t,
status: Option<ExitStatus>,
//! # Overview
//!
//! For historical reasons, the Windows API uses a form of potentially
-//! ill-formed UTF-16 encoding for strings. Specifically, the 16-bit
+//! ill-formed UTF-16 encoding for strings. Specifically, the 16-bit
//! code units in Windows strings may contain [isolated surrogate code
-//! points which are not paired together][ill-formed-utf-16]. The
+//! points which are not paired together][ill-formed-utf-16]. The
//! Unicode standard requires that surrogate code points (those in the
//! range U+D800 to U+DFFF) always be *paired*, because in the UTF-16
//! encoding a *surrogate code unit pair* is used to encode a single
-//! character. For compatibility with code that does not enforce
+//! character. For compatibility with code that does not enforce
//! these pairings, Windows does not enforce them, either.
//!
//! While it is not always possible to convert such a string losslessly into
//! a valid UTF-16 string (or even UTF-8), it is often desirable to be
//! able to round-trip such a string from and to Windows APIs
-//! losslessly. For example, some Rust code may be "bridging" some
+//! losslessly. For example, some Rust code may be "bridging" some
//! Windows APIs together, just passing `WCHAR` strings among those
//! APIs without ever really looking into the strings.
//!
//! # `OsStringExt` and `OsStrExt`
//!
//! [`OsString`] is the Rust wrapper for owned strings in the
-//! preferred representation of the operating system. On Windows,
+//! preferred representation of the operating system. On Windows,
//! this struct gets augmented with an implementation of the
-//! [`OsStringExt`] trait, which has a [`from_wide`] method. This
+//! [`OsStringExt`] trait, which has a [`from_wide`] method. This
//! lets you create an [`OsString`] from a `&[u16]` slice; presumably
//! you get such a slice out of a `WCHAR` Windows API.
//!
//! Similarly, [`OsStr`] is the Rust wrapper for borrowed strings from
-//! preferred representation of the operating system. On Windows, the
+//! preferred representation of the operating system. On Windows, the
//! [`OsStrExt`] trait provides the [`encode_wide`] method, which
-//! outputs an [`EncodeWide`] iterator. You can [`collect`] this
+//! outputs an [`EncodeWide`] iterator. You can [`collect`] this
//! iterator, for example, to obtain a `Vec<u16>`; you can later get a
//! pointer to this vector's contents and feed it to Windows APIs.
//!
/// [`FileType`]: ../../../../std/fs/struct.FileType.html
#[unstable(feature = "windows_file_type_ext", issue = "0")]
pub trait FileTypeExt {
- /// Returns whether this file type is a symbolic link that is also a directory.
+ /// Returns `true` if this file type is a symbolic link that is also a directory.
#[unstable(feature = "windows_file_type_ext", issue = "0")]
fn is_symlink_dir(&self) -> bool;
- /// Returns whether this file type is a symbolic link that is also a file.
+ /// Returns `true` if this file type is a symbolic link that is also a file.
#[unstable(feature = "windows_file_type_ext", issue = "0")]
fn is_symlink_file(&self) -> bool;
}
#[stable(feature = "rust1", since = "1.0.0")]
pub type RawSocket = raw::SOCKET;
-/// Extract raw handles.
+/// Extracts raw handles.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsRawHandle {
/// Extracts the raw handle, without taking any ownership.
}
}
-/// Extract raw sockets.
+/// Extracts raw sockets.
#[stable(feature = "rust1", since = "1.0.0")]
pub trait AsRawSocket {
/// Extracts the underlying raw socket from this object.
fn as_raw_socket(&self) -> RawSocket;
}
-/// Create I/O objects from raw sockets.
+/// Creates I/O objects from raw sockets.
#[stable(feature = "from_raw_os", since = "1.1.0")]
pub trait FromRawSocket {
/// Creates a new I/O object from the given raw socket.
-//! Implementation of `std::os` functionality for Windows
+//! Implementation of `std::os` functionality for Windows.
#![allow(nonstandard_style)]
/// Takes a parameter `wait` which indicates if this pipe is currently being
/// read whether the function should block waiting for the read to complete.
///
- /// Return values:
+ /// Returns values:
///
/// * `true` - finished any pending read and the pipe is not at EOF (keep
/// going)
val
}
-/// Print the symbol of the backtrace frame.
+/// Prints the symbol of the backtrace frame.
///
/// These output functions should now be used everywhere to ensure consistency.
/// You may want to also use `output_fileline`.
w.write_all(b"\n")
}
-/// Print the filename and line number of the backtrace frame.
+/// Prints the filename and line number of the backtrace frame.
///
/// See also `output`.
#[allow(dead_code)]
/// A Unicode code point: from U+0000 to U+10FFFF.
///
-/// Compare with the `char` type,
+/// Compares with the `char` type,
/// which represents a Unicode scalar value:
/// a code point that is not a surrogate (U+D800 to U+DFFF).
#[derive(Eq, PartialEq, Ord, PartialOrd, Clone, Copy)]
}
}
-/// Create a new WTF-8 string from an iterator of code points.
+/// Creates a new WTF-8 string from an iterator of code points.
///
/// This replaces surrogate code point pairs with supplementary code points,
/// like concatenating ill-formed UTF-16 strings effectively would.
}
-/// Return a slice of the given string for the byte range [`begin`..`end`).
+/// Returns a slice of the given string for the byte range [`begin`..`end`).
///
/// # Panics
///
}
}
-/// Return a slice of the given string from byte `begin` to its end.
+/// Returns a slice of the given string from byte `begin` to its end.
///
/// # Panics
///
}
}
-/// Return a slice of the given string from its beginning to byte `end`.
+/// Returns a slice of the given string from its beginning to byte `end`.
///
/// # Panics
///
/// let flag2 = Arc::clone(&flag);
///
/// let parked_thread = thread::spawn(move || {
-/// // We want to wait until the flag is set. We *could* just spin, but using
+/// // We want to wait until the flag is set. We *could* just spin, but using
/// // park/unpark is more efficient.
/// while !flag2.load(Ordering::Acquire) {
/// println!("Parking thread");
/// instant when created, and are often useful for tasks such as measuring
/// benchmarks or timing how long an operation takes.
///
-/// Note, however, that instants are not guaranteed to be **steady**. In other
+/// Note, however, that instants are not guaranteed to be **steady**. In other
/// words, each tick of the underlying clock may not be the same length (e.g.
/// some seconds may be longer than others). An instant may jump forwards or
/// experience time dilation (slow down or speed up), but it will never go
/// Returns `Some(t)` where `t` is the time `self + duration` if `t` can be represented as
/// `Instant` (which means it's inside the bounds of the underlying data structure), `None`
/// otherwise.
- #[unstable(feature = "time_checked_add", issue = "55940")]
+ #[stable(feature = "time_checked_add", since = "1.34.0")]
pub fn checked_add(&self, duration: Duration) -> Option<Instant> {
- self.0.checked_add_duration(&duration).map(|t| Instant(t))
+ self.0.checked_add_duration(&duration).map(Instant)
}
/// Returns `Some(t)` where `t` is the time `self - duration` if `t` can be represented as
/// `Instant` (which means it's inside the bounds of the underlying data structure), `None`
/// otherwise.
- #[unstable(feature = "time_checked_add", issue = "55940")]
+ #[stable(feature = "time_checked_add", since = "1.34.0")]
pub fn checked_sub(&self, duration: Duration) -> Option<Instant> {
- self.0.checked_sub_duration(&duration).map(|t| Instant(t))
+ self.0.checked_sub_duration(&duration).map(Instant)
}
}
/// Returns `Some(t)` where `t` is the time `self + duration` if `t` can be represented as
/// `SystemTime` (which means it's inside the bounds of the underlying data structure), `None`
/// otherwise.
- #[unstable(feature = "time_checked_add", issue = "55940")]
+ #[stable(feature = "time_checked_add", since = "1.34.0")]
pub fn checked_add(&self, duration: Duration) -> Option<SystemTime> {
- self.0.checked_add_duration(&duration).map(|t| SystemTime(t))
+ self.0.checked_add_duration(&duration).map(SystemTime)
}
/// Returns `Some(t)` where `t` is the time `self - duration` if `t` can be represented as
/// `SystemTime` (which means it's inside the bounds of the underlying data structure), `None`
/// otherwise.
- #[unstable(feature = "time_checked_add", issue = "55940")]
+ #[stable(feature = "time_checked_add", since = "1.34.0")]
pub fn checked_sub(&self, duration: Duration) -> Option<SystemTime> {
- self.0.checked_sub_duration(&duration).map(|t| SystemTime(t))
+ self.0.checked_sub_duration(&duration).map(SystemTime)
}
}
log = "0.4"
scoped-tls = "0.1"
syntax_pos = { path = "../libsyntax_pos" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
rustc_data_structures = { path = "../librustc_data_structures" }
rustc_target = { path = "../librustc_target" }
smallvec = { version = "0.6.7", features = ["union", "may_dangle"] }
}
}
-/// Arguments of a path segment.
+/// The arguments of a path segment.
///
/// E.g., `<A, B>` as in `Foo<A, B>` or `(A, B)` as in `Foo(A, B)`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum GenericArgs {
- /// The `<'a, A,B,C>` in `foo::bar::baz::<'a, A,B,C>`
+ /// The `<'a, A, B, C>` in `foo::bar::baz::<'a, A, B, C>`.
AngleBracketed(AngleBracketedArgs),
- /// The `(A,B)` and `C` in `Foo(A,B) -> C`
+ /// The `(A, B)` and `C` in `Foo(A, B) -> C`.
Parenthesized(ParenthesizedArgs),
}
}
}
-/// A path like `Foo<'a, T>`
+/// A path like `Foo<'a, T>`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, Default)]
pub struct AngleBracketedArgs {
- /// Overall span
+ /// The overall span.
pub span: Span,
/// The arguments for this path segment.
pub args: Vec<GenericArg>,
/// Bindings (equality constraints) on associated types, if present.
- ///
- /// E.g., `Foo<A=Bar>`.
+ /// E.g., `Foo<A = Bar>`.
pub bindings: Vec<TypeBinding>,
}
}
}
-/// A path like `Foo(A,B) -> C`
+/// A path like `Foo(A, B) -> C`.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct ParenthesizedArgs {
/// Overall span
}
}
-/// Node id used to represent the root of the crate.
+/// `NodeId` used to represent the root of the crate.
pub const CRATE_NODE_ID: NodeId = NodeId::from_u32_const(0);
/// When parsing and doing expansions, we initially give all AST nodes this AST
}
}
-/// A `where` clause in a definition
+/// A where-clause in a definition.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct WhereClause {
pub id: NodeId,
pub span: Span,
}
-/// A single predicate in a `where` clause
+/// A single predicate in a where-clause.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum WherePredicate {
/// A type binding (e.g., `for<'c> Foo: Send + Clone + 'c`).
pub decl: P<FnDecl>,
}
-/// The different kinds of types recognized by the compiler.
+/// The various kinds of type recognized by the compiler.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum TyKind {
/// A variable-length slice (`[T]`).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub enum FunctionRetTy {
- /// Return type is not specified.
+ /// Returns type is not specified.
///
/// Functions default to `()` and closures default to inference.
/// Span points to where return type would be inserted.
/// `TraitRef`s appear in impls.
///
-/// Resolve maps each `TraitRef`'s `ref_id` to its defining trait; that's all
+/// Resolution maps each `TraitRef`'s `ref_id` to its defining trait; that's all
/// that the `ref_id` is for. The `impl_id` maps to the "self type" of this impl.
/// If this impl is an `ItemKind::Impl`, the `impl_id` is redundant (it could be the
-/// same as the impl's node-id).
+/// same as the impl's `NodeId`).
#[derive(Clone, RustcEncodable, RustcDecodable, Debug)]
pub struct TraitRef {
pub path: Path,
//! Parsing and validation of builtin attributes
use crate::ast::{self, Attribute, MetaItem, Name, NestedMetaItemKind};
-use crate::errors::{Applicability, Handler};
use crate::feature_gate::{Features, GatedCfg};
use crate::parse::ParseSess;
+use errors::{Applicability, Handler};
use syntax_pos::{symbol::Symbol, Span};
use super::{list_contains_name, mark_used, MetaItemKind};
pub suggestion: Option<Symbol>,
}
-/// Check if `attrs` contains an attribute like `#![feature(feature_name)]`.
+/// Checks if `attrs` contains an attribute like `#![feature(feature_name)]`.
/// This will not perform any "sanity checks" on the form of the attributes.
pub fn contains_feature_attr(attrs: &[Attribute], feature_name: &str) -> bool {
attrs.iter().any(|item| {
})
}
-/// Find the first stability attribute. `None` if none exists.
+/// Finds the first stability attribute. `None` if none exists.
pub fn find_stability(sess: &ParseSess, attrs: &[Attribute],
item_sp: Span) -> Option<Stability> {
find_stability_generic(sess, attrs.iter(), item_sp)
pub note: Option<Symbol>,
}
-/// Find the deprecation attribute. `None` if none exists.
+/// Finds the deprecation attribute. `None` if none exists.
pub fn find_deprecation(sess: &ParseSess, attrs: &[Attribute],
item_sp: Span) -> Option<Deprecation> {
find_deprecation_generic(sess, attrs.iter(), item_sp)
let diagnostic = &sess.span_diagnostic;
'outer: for attr in attrs_iter {
- if attr.path != "deprecated" {
- continue
+ if !attr.check_name("deprecated") {
+ continue;
}
- mark_used(attr);
-
if depr.is_some() {
span_err!(diagnostic, item_sp, E0550, "multiple deprecated attributes");
break
}
- depr = if let Some(metas) = attr.meta_item_list() {
- let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
- if item.is_some() {
- handle_errors(sess, meta.span, AttrError::MultipleItem(meta.name()));
- return false
- }
- if let Some(v) = meta.value_str() {
- *item = Some(v);
- true
- } else {
- if let Some(lit) = meta.name_value_literal() {
- handle_errors(
- sess,
- lit.span,
- AttrError::UnsupportedLiteral(
- "literal in `deprecated` \
- value must be a string",
- lit.node.is_bytestr()
- ),
- );
- } else {
- span_err!(diagnostic, meta.span, E0551, "incorrect meta item");
+ let meta = attr.meta().unwrap();
+ depr = match &meta.node {
+ MetaItemKind::Word => Some(Deprecation { since: None, note: None }),
+ MetaItemKind::NameValue(..) => {
+ meta.value_str().map(|note| {
+ Deprecation { since: None, note: Some(note) }
+ })
+ }
+ MetaItemKind::List(list) => {
+ let get = |meta: &MetaItem, item: &mut Option<Symbol>| {
+ if item.is_some() {
+ handle_errors(sess, meta.span, AttrError::MultipleItem(meta.name()));
+ return false
}
+ if let Some(v) = meta.value_str() {
+ *item = Some(v);
+ true
+ } else {
+ if let Some(lit) = meta.name_value_literal() {
+ handle_errors(
+ sess,
+ lit.span,
+ AttrError::UnsupportedLiteral(
+ "literal in `deprecated` \
+ value must be a string",
+ lit.node.is_bytestr()
+ ),
+ );
+ } else {
+ span_err!(diagnostic, meta.span, E0551, "incorrect meta item");
+ }
- false
- }
- };
+ false
+ }
+ };
- let mut since = None;
- let mut note = None;
- for meta in metas {
- match &meta.node {
- NestedMetaItemKind::MetaItem(mi) => {
- match &*mi.name().as_str() {
- "since" => if !get(mi, &mut since) { continue 'outer },
- "note" => if !get(mi, &mut note) { continue 'outer },
- _ => {
- handle_errors(
- sess,
- meta.span,
- AttrError::UnknownMetaItem(mi.name(), &["since", "note"]),
- );
- continue 'outer
+ let mut since = None;
+ let mut note = None;
+ for meta in list {
+ match &meta.node {
+ NestedMetaItemKind::MetaItem(mi) => {
+ match &*mi.name().as_str() {
+ "since" => if !get(mi, &mut since) { continue 'outer },
+ "note" => if !get(mi, &mut note) { continue 'outer },
+ _ => {
+ handle_errors(
+ sess,
+ meta.span,
+ AttrError::UnknownMetaItem(mi.name(), &["since", "note"]),
+ );
+ continue 'outer
+ }
}
}
- }
- NestedMetaItemKind::Literal(lit) => {
- handle_errors(
- sess,
- lit.span,
- AttrError::UnsupportedLiteral(
- "item in `deprecated` must be a key/value pair",
- false,
- ),
- );
- continue 'outer
+ NestedMetaItemKind::Literal(lit) => {
+ handle_errors(
+ sess,
+ lit.span,
+ AttrError::UnsupportedLiteral(
+ "item in `deprecated` must be a key/value pair",
+ false,
+ ),
+ );
+ continue 'outer
+ }
}
}
- }
- Some(Deprecation {since: since, note: note})
- } else {
- Some(Deprecation{since: None, note: None})
- }
+ Some(Deprecation { since, note })
+ }
+ };
}
depr
self.span
}
- /// Returns true if this list item is a MetaItem with a name of `name`.
+ /// Returns `true` if this list item is a MetaItem with a name of `name`.
pub fn check_name(&self, name: &str) -> bool {
self.meta_item().map_or(false, |meta_item| meta_item.check_name(name))
}
}
impl Attribute {
+ /// Returns `true` if the attribute's path matches the argument. If it matches, then the
+ /// attribute is marked as used.
+ ///
+ /// To check the attribute name without marking it used, use the `path` field directly.
pub fn check_name(&self, name: &str) -> bool {
let matches = self.path == name;
if matches {
}
impl Attribute {
- /// Extract the MetaItem from inside this Attribute.
+ /// Extracts the MetaItem from inside this Attribute.
pub fn meta(&self) -> Option<MetaItem> {
let mut tokens = self.tokens.trees().peekable();
Some(MetaItem {
})
}
- /// Convert self to a normal #[doc="foo"] comment, if it is a
+ /// Converts self to a normal #[doc="foo"] comment, if it is a
/// comment like `///` or `/** */`. (Returns self unchanged for
/// non-sugared doc attributes.)
pub fn with_desugared_doc<T, F>(&self, f: F) -> T where
match *self {
LitKind::Str(string, ast::StrStyle::Cooked) => {
- let escaped = string.as_str().escape_default();
+ let escaped = string.as_str().escape_default().to_string();
Token::Literal(token::Lit::Str_(Symbol::intern(&escaped)), None)
}
LitKind::Str(string, ast::StrStyle::Raw(n)) => {
use crate::attr;
use crate::ast;
use crate::edition::Edition;
-use crate::errors::Applicability;
use crate::mut_visit::*;
use crate::parse::{token, ParseSess};
use crate::ptr::P;
use crate::util::map_in_place::MapInPlace;
+use errors::Applicability;
use smallvec::SmallVec;
/// A folder that strips out items that do not belong in the current configuration.
///
/// Gives a compiler warning when the `cfg_attr` contains no attributes and
/// is in the original source file. Gives a compiler error if the syntax of
- /// the attribute is incorrect
+ /// the attribute is incorrect.
fn process_cfg_attr(&mut self, attr: ast::Attribute) -> Vec<ast::Attribute> {
if !attr.check_name("cfg_attr") {
return vec![attr];
}
}
- /// Determine if a node with the given attributes should be included in this configuration.
+ /// Determines if a node with the given attributes should be included in this configuration.
pub fn in_cfg(&mut self, attrs: &[ast::Attribute]) -> bool {
attrs.iter().all(|attr| {
if !is_cfg(attr) {
}
}
- // deny #[cfg] on generic parameters until we decide what to do with it.
- // see issue #51279.
+ /// Denies `#[cfg]` on generic parameters until we decide what to do with it.
+ /// See issue #51279.
pub fn disallow_cfg_on_generic_param(&mut self, param: &ast::GenericParam) {
for attr in param.attrs() {
let offending_attr = if attr.check_name("cfg") {
}
impl ErrorLocation {
- /// Create an error location from a span.
+ /// Creates an error location from a span.
pub fn from_span(ecx: &ExtCtxt<'_>, sp: Span) -> ErrorLocation {
let loc = ecx.source_map().lookup_char_pos_adj(sp.lo());
ErrorLocation {
}
}
-/// Get the directory where metadata for a given `prefix` should be stored.
+/// Gets the directory where metadata for a given `prefix` should be stored.
///
/// See `output_metadata`.
pub fn get_metadata_dir(prefix: &str) -> PathBuf {
use crate::diagnostics::metadata::output_metadata;
-pub use crate::errors::*;
+pub use errors::*;
// Maximum width of any line in an extended error description (inclusive).
const MAX_DESCRIPTION_WIDTH: usize = 80;
use crate::attr::HasAttrs;
use crate::source_map::{SourceMap, Spanned, respan};
use crate::edition::Edition;
-use crate::errors::{DiagnosticBuilder, DiagnosticId};
use crate::ext::expand::{self, AstFragment, Invocation};
use crate::ext::hygiene::{self, Mark, SyntaxContext, Transparency};
use crate::mut_visit::{self, MutVisitor};
use crate::ThinVec;
use crate::tokenstream::{self, TokenStream};
+use errors::{DiagnosticBuilder, DiagnosticId};
use smallvec::{smallvec, SmallVec};
use syntax_pos::{Span, MultiSpan, DUMMY_SP};
/// The result of a macro expansion. The return values of the various
/// methods are spliced into the AST at the callsite of the macro.
pub trait MacResult {
- /// Create an expression.
+ /// Creates an expression.
fn make_expr(self: Box<Self>) -> Option<P<ast::Expr>> {
None
}
- /// Create zero or more items.
+ /// Creates zero or more items.
fn make_items(self: Box<Self>) -> Option<SmallVec<[P<ast::Item>; 1]>> {
None
}
- /// Create zero or more impl items.
+ /// Creates zero or more impl items.
fn make_impl_items(self: Box<Self>) -> Option<SmallVec<[ast::ImplItem; 1]>> {
None
}
- /// Create zero or more trait items.
+ /// Creates zero or more trait items.
fn make_trait_items(self: Box<Self>) -> Option<SmallVec<[ast::TraitItem; 1]>> {
None
}
- /// Create zero or more items in an `extern {}` block
+ /// Creates zero or more items in an `extern {}` block
fn make_foreign_items(self: Box<Self>) -> Option<SmallVec<[ast::ForeignItem; 1]>> { None }
- /// Create a pattern.
+ /// Creates a pattern.
fn make_pat(self: Box<Self>) -> Option<P<ast::Pat>> {
None
}
- /// Create zero or more statements.
+ /// Creates zero or more statements.
///
/// By default this attempts to create an expression statement,
/// returning None if that fails.
}
impl DummyResult {
- /// Create a default MacResult that can be anything.
+ /// Creates a default MacResult that can be anything.
///
/// Use this as a return value after hitting any errors and
/// calling `span_err`.
Box::new(DummyResult { expr_only: false, is_error: false, span })
}
- /// Create a default MacResult that can only be an expression.
+ /// Creates a default MacResult that can only be an expression.
///
/// Use this for macros that must expand to an expression, so even
/// if an error is encountered internally, the user will receive
}
impl SyntaxExtension {
- /// Return which kind of macro calls this syntax extension.
+ /// Returns which kind of macro calls this syntax extension.
pub fn kind(&self) -> MacroKind {
match *self {
SyntaxExtension::DeclMacro { .. } |
expand::MacroExpander::new(self, false)
}
- /// Returns a `Folder` that deeply expands all macros and assigns all node ids in an AST node.
- /// Once node ids are assigned, the node may not be expanded, removed, or otherwise modified.
+ /// Returns a `Folder` that deeply expands all macros and assigns all `NodeId`s in an AST node.
+ /// Once `NodeId`s are assigned, the node may not be expanded, removed, or otherwise modified.
pub fn monotonic_expander<'b>(&'b mut self) -> expand::MacroExpander<'b, 'a> {
expand::MacroExpander::new(self, true)
}
}
}
-/// Extract a string literal from the macro expanded version of `expr`,
+/// Extracts a string literal from the macro expanded version of `expr`,
/// emitting `err_msg` if `expr` is not a string literal. This does not stop
-/// compilation on error, merely emits a non-fatal error and returns None.
+/// compilation on error, merely emits a non-fatal error and returns `None`.
pub fn expr_to_spanned_string<'a>(
cx: &'a mut ExtCtxt<'_>,
mut expr: P<ast::Expr>,
}
/// Interpreting `tts` as a comma-separated sequence of expressions,
-/// expect exactly one string literal, or emit an error and return None.
+/// expect exactly one string literal, or emit an error and return `None`.
pub fn get_single_str_from_tts(cx: &mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree],
})
}
-/// Extract comma-separated expressions from `tts`. If there is a
-/// parsing error, emit a non-fatal error and return None.
+/// Extracts comma-separated expressions from `tts`. If there is a
+/// parsing error, emit a non-fatal error and return `None`.
pub fn get_exprs_from_tts(cx: &mut ExtCtxt<'_>,
sp: Span,
tts: &[tokenstream::TokenTree]) -> Option<Vec<P<ast::Expr>>> {
/// Constructs a qualified path.
///
- /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A=Bar>`.
+ /// Constructs a path like `<self_type as trait_path>::ident<'a, T, A = Bar>`.
fn qpath_all(&self,
self_type: P<ast::Ty>,
trait_path: ast::Path,
use crate::attr::{self, HasAttrs};
use crate::source_map::{ExpnInfo, MacroBang, MacroAttribute, dummy_spanned, respan};
use crate::config::StripUnconfigured;
-use crate::errors::{Applicability, FatalError};
use crate::ext::base::*;
use crate::ext::derive::{add_derived_markers, collect_derives};
use crate::ext::hygiene::{self, Mark, SyntaxContext};
use crate::visit::{self, Visitor};
use crate::util::map_in_place::MapInPlace;
+use errors::{Applicability, FatalError};
use smallvec::{smallvec, SmallVec};
use syntax_pos::{Span, DUMMY_SP, FileName};
use syntax_pos::hygiene::ExpnFormat;
}
}
- /// Collect all macro invocations reachable at this time in this AST fragment, and replace
+ /// Collects all macro invocations reachable at this time in this AST fragment, and replace
/// them with "placeholders" - dummy macro invocations with specially crafted `NodeId`s.
/// Then call into resolver that builds a skeleton ("reduced graph") of the fragment and
/// prepares data for resolving paths of macro invocations.
-//! This is an NFA-based parser, which calls out to the main rust parser for named nonterminals
+//! This is an NFA-based parser, which calls out to the main rust parser for named non-terminals
//! (which it commits to fully when it hits one in a grammar). There's a set of current NFA threads
//! and a set of next ones. Instead of NTs, we have a special case for Kleene star. The big-O, in
//! pathological cases, is worse than traditional use of NFA or Earley parsing, but it's an easier
//!
//! As it processes them, it fills up `eof_items` with threads that would be valid if
//! the macro invocation is now over, `bb_items` with threads that are waiting on
-//! a Rust nonterminal like `$e:expr`, and `next_items` with threads that are waiting
+//! a Rust non-terminal like `$e:expr`, and `next_items` with threads that are waiting
//! on a particular token. Most of the logic concerns moving the · through the
//! repetitions indicated by Kleene stars. The rules for moving the · without
//! consuming any input are called epsilon transitions. It only advances or calls
use TokenTreeOrTokenTreeSlice::*;
use crate::ast::Ident;
-use crate::errors::FatalError;
use crate::ext::tt::quoted::{self, TokenTree};
use crate::parse::{Directory, ParseSess};
use crate::parse::parser::{Parser, PathStyle};
use crate::symbol::keywords;
use crate::tokenstream::{DelimSpan, TokenStream};
+use errors::FatalError;
use smallvec::{smallvec, SmallVec};
-use syntax_pos::{self, Span};
+use syntax_pos::Span;
use rustc_data_structures::fx::FxHashMap;
use std::collections::hash_map::Entry::{Occupied, Vacant};
}
impl<'root, 'tt> MatcherPos<'root, 'tt> {
- /// Add `m` as a named match for the `idx`-th metavar.
+ /// Adds `m` as a named match for the `idx`-th metavar.
fn push_match(&mut self, idx: usize, m: NamedMatch) {
let matches = Rc::make_mut(&mut self.matches[idx]);
matches.push(m);
}.into_boxed_slice()
}
-/// Generate the top-level matcher position in which the "dot" is before the first token of the
+/// Generates the top-level matcher position in which the "dot" is before the first token of the
/// matcher `ms` and we are going to start matching at the span `open` in the source.
fn initial_matcher_pos<'root, 'tt>(ms: &'tt [TokenTree], open: Span) -> MatcherPos<'root, 'tt> {
let match_idx_hi = count_names(ms);
/// `NamedMatch` is a pattern-match result for a single `token::MATCH_NONTERMINAL`:
/// so it is associated with a single ident in a parse, and all
-/// `MatchedNonterminal`s in the `NamedMatch` have the same nonterminal type
+/// `MatchedNonterminal`s in the `NamedMatch` have the same non-terminal type
/// (expr, item, etc). Each leaf in a single `NamedMatch` corresponds to a
/// single `token::MATCH_NONTERMINAL` in the `TokenTree` that produced it.
///
Success(ret_val)
}
-/// Generate an appropriate parsing failure message. For EOF, this is "unexpected end...". For
+/// Generates an appropriate parsing failure message. For EOF, this is "unexpected end...". For
/// other tokens, this is "unexpected token...".
pub fn parse_failure_msg(tok: Token) -> String {
match tok {
}
}
-/// Perform a token equality check, ignoring syntax context (that is, an unhygienic comparison)
+/// Performs a token equality check, ignoring syntax context (that is, an unhygienic comparison)
fn token_name_eq(t1: &Token, t2: &Token) -> bool {
if let (Some((id1, is_raw1)), Some((id2, is_raw2))) = (t1.ident(), t2.ident()) {
id1.name == id2.name && is_raw1 == is_raw2
}
}
-/// A call to the "black-box" parser to parse some rust nonterminal.
+/// A call to the "black-box" parser to parse some Rust non-terminal.
///
/// # Parameters
///
///
/// # Returns
///
-/// The parsed nonterminal.
+/// The parsed non-terminal.
fn parse_nt<'a>(p: &mut Parser<'a>, sp: Span, name: &str) -> Nonterminal {
if name == "tt" {
return token::NtTT(p.parse_token_tree());
use crate::{ast, attr};
use crate::edition::Edition;
-use crate::errors::FatalError;
use crate::ext::base::{DummyResult, ExtCtxt, MacResult, SyntaxExtension};
use crate::ext::base::{NormalTT, TTMacroExpander};
use crate::ext::expand::{AstFragment, AstFragmentKind};
use crate::symbol::Symbol;
use crate::tokenstream::{DelimSpan, TokenStream, TokenTree};
+use errors::FatalError;
use syntax_pos::{Span, DUMMY_SP, symbol::Ident};
use log::debug;
use std::collections::hash_map::Entry;
use rustc_data_structures::sync::Lrc;
-use crate::errors::Applicability;
+use errors::Applicability;
const VALID_FRAGMENT_NAMES_MSG: &str = "valid fragment specifiers are \
`ident`, `block`, `stmt`, `expr`, `pat`, `ty`, `lifetime`, `literal`, \
// after parsing/expansion. we can report every error in every macro this way.
}
-/// Check that the lhs contains no repetition which could match an empty token
+/// Checks that the lhs contains no repetition which could match an empty token
/// tree, because then the matcher would hang indefinitely.
fn check_lhs_no_empty_seq(sess: &ParseSess, tts: &[quoted::TokenTree]) -> bool {
use quoted::TokenTree;
}
}
-/// True if a fragment of type `frag` can be followed by any sort of
-/// token. We use this (among other things) as a useful approximation
+/// Returns `true` if a fragment of type `frag` can be followed by any sort of
+/// token. We use this (among other things) as a useful approximation
/// for when `frag` can be followed by a repetition like `$(...)*` or
/// `$(...)+`. In general, these can be a bit tricky to reason about,
/// so we adopt a conservative position that says that any fragment
Invalid(String, &'static str),
}
-/// True if `frag` can legally be followed by the token `tok`. For
+/// Returns `true` if `frag` can legally be followed by the token `tok`. For
/// fragments that can consume an unbounded number of tokens, `tok`
/// must be within a well-defined follow set. This is intended to
/// guarantee future compatibility: for example, without this rule, if
}
impl Delimited {
- /// Return the opening delimiter (possibly `NoDelim`).
+ /// Returns the opening delimiter (possibly `NoDelim`).
pub fn open_token(&self) -> token::Token {
token::OpenDelim(self.delim)
}
- /// Return the closing delimiter (possibly `NoDelim`).
+ /// Returns the closing delimiter (possibly `NoDelim`).
pub fn close_token(&self) -> token::Token {
token::CloseDelim(self.delim)
}
- /// Return a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
+ /// Returns a `self::TokenTree` with a `Span` corresponding to the opening delimiter.
pub fn open_tt(&self, span: Span) -> TokenTree {
let open_span = if span.is_dummy() {
span
TokenTree::Token(open_span, self.open_token())
}
- /// Return a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
+ /// Returns a `self::TokenTree` with a `Span` corresponding to the closing delimiter.
pub fn close_tt(&self, span: Span) -> TokenTree {
let close_span = if span.is_dummy() {
span
}
}
- /// Returns true if the given token tree contains no other tokens. This is vacuously true for
+ /// Returns `true` if the given token tree contains no other tokens. This is vacuously true for
/// single tokens or metavar/decls, but may be false for delimited trees or sequences.
pub fn is_empty(&self) -> bool {
match *self {
}
}
- /// Get the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
+ /// Gets the `index`-th sub-token-tree. This only makes sense for delimited trees and sequences.
pub fn get_tt(&self, index: usize) -> TokenTree {
match (self, index) {
(&TokenTree::Delimited(_, ref delimed), _) if delimed.delim == token::NoDelim => {
}
}
- /// Retrieve the `TokenTree`'s span.
+ /// Retrieves the `TokenTree`'s span.
pub fn span(&self) -> Span {
match *self {
TokenTree::Token(sp, _)
/// operator and separator, then a tuple with `(separator, KleeneOp)` is returned. Otherwise, an
/// error with the appropriate span is emitted to `sess` and a dummy value is returned.
///
-/// NOTE: In 2015 edition, * and + are the only Kleene operators and `?` is a separator. In 2018,
-/// `?` is a Kleene op and not a separator.
+/// N.B., in the 2015 edition, `*` and `+` are the only Kleene operators, and `?` is a separator.
+/// In the 2018 edition however, `?` is a Kleene operator, and not a separator.
fn parse_sep_and_kleene_op<I>(
input: &mut Peekable<I>,
span: Span,
use crate::early_buffered_lints::BufferedEarlyLintId;
use crate::source_map::Spanned;
use crate::edition::{ALL_EDITIONS, Edition};
-use crate::errors::{DiagnosticBuilder, Handler};
use crate::visit::{self, FnKind, Visitor};
use crate::parse::ParseSess;
use crate::symbol::Symbol;
+use errors::{DiagnosticBuilder, Handler};
use rustc_data_structures::fx::FxHashMap;
use rustc_target::spec::abi::Abi;
use syntax_pos::{Span, DUMMY_SP};
}
impl AttributeTemplate {
- /// Check that the given meta-item is compatible with this template.
+ /// Checks that the given meta-item is compatible with this template.
fn compatible(&self, meta_item_kind: &ast::MetaItemKind) -> bool {
match meta_item_kind {
ast::MetaItemKind::Word => self.word,
}
/// A convenience macro for constructing attribute templates.
-/// E.g. `template!(Word, List: "description")` means that the attribute
+/// E.g., `template!(Word, List: "description")` means that the attribute
/// supports forms `#[attr]` and `#[attr(description)]`.
macro_rules! template {
(Word) => { template!(@ true, None, None) };
("stable", Whitelisted, template!(List: r#"feature = "name", since = "version""#), Ungated),
("unstable", Whitelisted, template!(List: r#"feature = "name", reason = "...", issue = "N""#),
Ungated),
- ("deprecated", Normal, template!(Word, List: r#"/*opt*/ since = "version",
- /*opt*/ note = "reason"#,
- NameValueStr: "reason"), Ungated),
+ ("deprecated",
+ Normal,
+ template!(
+ Word,
+ List: r#"/*opt*/ since = "version", /*opt*/ note = "reason"#,
+ NameValueStr: "reason"
+ ),
+ Ungated
+ ),
("rustc_paren_sugar", Normal, template!(Word), Gated(Stability::Unstable,
"unboxed_closures",
#[derive(Clone, Copy, Hash)]
pub enum UnstableFeatures {
- /// Hard errors for unstable features are active, as on
- /// beta/stable channels.
+ /// Hard errors for unstable features are active, as on beta/stable channels.
Disallow,
/// Allow features to be activated, as on nightly.
Allow,
// FIXME: spec the JSON output properly.
use crate::source_map::{SourceMap, FilePathMapping};
-use crate::errors::registry::Registry;
-use crate::errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
-use crate::errors::{DiagnosticId, Applicability};
-use crate::errors::emitter::{Emitter, EmitterWriter};
-use syntax_pos::{self, MacroBacktrace, Span, SpanLabel, MultiSpan};
+use errors::registry::Registry;
+use errors::{DiagnosticBuilder, SubDiagnostic, CodeSuggestion, SourceMapper};
+use errors::{DiagnosticId, Applicability};
+use errors::emitter::{Emitter, EmitterWriter};
+
+use syntax_pos::{MacroBacktrace, Span, SpanLabel, MultiSpan};
use rustc_data_structures::sync::{self, Lrc};
use std::io::{self, Write};
use std::vec;
}
}
- /// Create a list of DiagnosticSpanLines from span - each line with any part
+ /// Creates a list of DiagnosticSpanLines from span - each line with any part
/// of `span` gets a DiagnosticSpanLine, with the highlight indicating the
/// `span` within the line.
fn from_span(span: Span, je: &JsonEmitter) -> Vec<DiagnosticSpanLine> {
#![feature(rustc_attrs)]
#![feature(rustc_diagnostic_macros)]
#![feature(slice_sort_by_cached_key)]
-#![feature(str_escape)]
#![feature(step_trait)]
#![feature(try_trait)]
#![feature(unicode_internals)]
#[allow(unused_extern_crates)]
extern crate serialize as rustc_serialize; // used by deriving
-pub use rustc_errors as errors;
+pub use errors;
use rustc_data_structures::sync::Lock;
use rustc_data_structures::bit_set::GrowableBitSet;
pub use rustc_data_structures::thin_vec::ThinVec;
macro_rules! panictry {
($e:expr) => ({
use std::result::Result::{Ok, Err};
- use crate::errors::FatalError;
+ use errors::FatalError;
match $e {
Ok(e) => e,
Err(mut e) => {
macro_rules! panictry_buffer {
($handler:expr, $e:expr) => ({
use std::result::Result::{Ok, Err};
- use crate::errors::{FatalError, DiagnosticBuilder};
+ use errors::{FatalError, DiagnosticBuilder};
match $e {
Ok(e) => e,
Err(errs) => {
}
}
-/// Returns None if the first col chars of s contain a non-whitespace char.
-/// Otherwise returns Some(k) where k is first char offset after that leading
-/// whitespace. Note k may be outside bounds of s.
+/// Returns `None` if the first `col` chars of `s` contain a non-whitespace char.
+/// Otherwise returns `Some(k)` where `k` is first char offset after that leading
+/// whitespace. Note that `k` may be outside bounds of `s`.
fn all_whitespace(s: &str, col: CharPos) -> Option<usize> {
let mut idx = 0;
for (i, ch) in s.char_indices().take(col.to_usize()) {
use crate::ast::{self, Ident};
use crate::source_map::{SourceMap, FilePathMapping};
-use crate::errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
use crate::parse::{token, ParseSess};
use crate::symbol::{Symbol, keywords};
-use syntax_pos::{self, BytePos, CharPos, Pos, Span, NO_EXPANSION};
+use errors::{Applicability, FatalError, Diagnostic, DiagnosticBuilder};
+use syntax_pos::{BytePos, CharPos, Pos, Span, NO_EXPANSION};
use core::unicode::property::Pattern_White_Space;
use std::borrow::Cow;
self.unwrap_or_abort(res)
}
- /// Return the next token. EFFECT: advances the string_reader.
+ /// Returns the next token. EFFECT: advances the string_reader.
pub fn try_next_token(&mut self) -> Result<TokenAndSpan, ()> {
assert!(self.fatal_errs.is_empty());
let ret_val = TokenAndSpan {
self.with_str_from_to(start, self.pos, f)
}
- /// Create a Name from a given offset to the current offset, each
+ /// Creates a Name from a given offset to the current offset, each
/// adjusted 1 towards each other (assumes that on either side there is a
/// single-byte delimiter).
fn name_from(&self, start: BytePos) -> ast::Name {
}
/// If there is whitespace, shebang, or a comment, scan it. Otherwise,
- /// return None.
+ /// return `None`.
fn scan_whitespace_or_comment(&mut self) -> Option<TokenAndSpan> {
match self.ch.unwrap_or('\0') {
// # to handle shebang at start of file -- this is the entry point
/// in a byte, (non-raw) byte string, char, or (non-raw) string literal.
/// `start` is the position of `first_source_char`, which is already consumed.
///
- /// Returns true if there was a valid char/byte, false otherwise.
+ /// Returns `true` if there was a valid char/byte.
fn scan_char_or_byte(&mut self,
start: BytePos,
first_source_char: char,
}
}
- /// Check that a base is valid for a floating literal, emitting a nice
+ /// Checks that a base is valid for a floating literal, emitting a nice
/// error if it isn't.
fn check_float_base(&mut self, start_bpos: BytePos, last_bpos: BytePos, base: usize) {
match base {
}
}
- /// Return the next token from the string, advances the input past that
+ /// Returns the next token from the string, advances the input past that
/// token, and updates the interner
fn next_token_inner(&mut self) -> Result<token::Token, ()> {
let c = self.ch;
use crate::ast::{Ident, CrateConfig};
use crate::symbol::Symbol;
use crate::source_map::SourceMap;
- use crate::errors;
use crate::feature_gate::UnstableFeatures;
use crate::parse::token;
use crate::diagnostics::plugin::ErrorMap;
// http://www.unicode.org/Public/security/10.0.0/confusables.txt
use syntax_pos::{Span, NO_EXPANSION};
-use crate::errors::{Applicability, DiagnosticBuilder};
+use errors::{Applicability, DiagnosticBuilder};
use super::StringReader;
const UNICODE_ARRAY: &[(char, &str, char)] = &[
-//! The main parser interface
+//! The main parser interface.
use crate::ast::{self, CrateConfig, NodeId};
use crate::early_buffered_lints::{BufferedEarlyLint, BufferedEarlyLintId};
use crate::source_map::{SourceMap, FilePathMapping};
-use crate::errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
use crate::feature_gate::UnstableFeatures;
use crate::parse::parser::Parser;
use crate::symbol::Symbol;
use crate::diagnostics::plugin::ErrorMap;
use crate::print::pprust::token_to_string;
+use errors::{FatalError, Level, Handler, ColorConfig, Diagnostic, DiagnosticBuilder};
use rustc_data_structures::sync::{Lrc, Lock};
use syntax_pos::{Span, SourceFile, FileName, MultiSpan};
use log::debug;
pub unstable_features: UnstableFeatures,
pub config: CrateConfig,
pub missing_fragment_specifiers: Lock<FxHashSet<Span>>,
- /// Places where raw identifiers were used. This is used for feature gating
- /// raw identifiers
+ /// Places where raw identifiers were used. This is used for feature-gating raw identifiers.
pub raw_identifier_spans: Lock<Vec<Span>>,
- /// The registered diagnostics codes
+ /// The registered diagnostics codes.
crate registered_diagnostics: Lock<ErrorMap>,
- /// Used to determine and report recursive mod inclusions
+ /// Used to determine and report recursive module inclusions.
included_mod_stack: Lock<Vec<PathBuf>>,
source_map: Lrc<SourceMap>,
pub buffered_lints: Lock<Vec<BufferedEarlyLint>>,
source_file_to_stream(sess, sess.source_map().new_source_file(name, source), override_span)
}
-/// Create a new parser from a source string
+/// Creates a new parser from a source string.
pub fn new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String) -> Parser<'_> {
panictry_buffer!(&sess.span_diagnostic, maybe_new_parser_from_source_str(sess, name, source))
}
-/// Create a new parser from a source string. Returns any buffered errors from lexing the initial
+/// Creates a new parser from a source string. Returns any buffered errors from lexing the initial
/// token stream.
pub fn maybe_new_parser_from_source_str(sess: &ParseSess, name: FileName, source: String)
-> Result<Parser<'_>, Vec<Diagnostic>>
Ok(parser)
}
-/// Create a new parser, handling errors as appropriate
+/// Creates a new parser, handling errors as appropriate
/// if the file doesn't exist
pub fn new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path) -> Parser<'a> {
source_file_to_parser(sess, file_to_source_file(sess, path, None))
}
-/// Create a new parser, returning buffered diagnostics if the file doesn't
+/// Creates a new parser, returning buffered diagnostics if the file doesn't
/// exist or from lexing the initial token stream.
pub fn maybe_new_parser_from_file<'a>(sess: &'a ParseSess, path: &Path)
-> Result<Parser<'a>, Vec<Diagnostic>> {
}
/// Given a session and a path and an optional span (for error reporting),
-/// add the path to the session's source_map and return the new source_file.
+/// add the path to the session's `source_map` and return the new `source_file`.
fn file_to_source_file(sess: &ParseSess, path: &Path, spanopt: Option<Span>)
-> Lrc<SourceFile> {
match try_file_to_source_file(sess, path, spanopt) {
}
}
-/// Given a source_file, produce a sequence of token-trees
+/// Given a source_file, produces a sequence of token trees.
pub fn source_file_to_stream(
sess: &ParseSess,
source_file: Lrc<SourceFile>,
panictry_buffer!(&sess.span_diagnostic, maybe_file_to_stream(sess, source_file, override_span))
}
-/// Given a source file, produce a sequence of token-trees. Returns any buffered errors from
+/// Given a source file, produces a sequence of token trees. Returns any buffered errors from
/// parsing the token tream.
pub fn maybe_file_to_stream(
sess: &ParseSess,
}
}
-/// Given stream and the `ParseSess`, produce a parser
+/// Given stream and the `ParseSess`, produces a parser.
pub fn stream_to_parser(sess: &ParseSess, stream: TokenStream) -> Parser<'_> {
Parser::new(sess, stream, None, true, false)
}
-/// Parse a string representing a character literal into its final form.
+/// Parses a string representing a character literal into its final form.
/// Rather than just accepting/rejecting a given literal, unescapes it as
/// well. Can take any slice prefixed by a character escape. Returns the
/// character and the number of characters consumed.
}
}
-/// Parse a string representing a string literal into its final form. Does
-/// unescaping.
+/// Parses a string representing a string literal into its final form. Does unescaping.
pub fn str_lit(lit: &str, diag: Option<(Span, &Handler)>) -> String {
debug!("str_lit: given {}", lit.escape_default());
let mut res = String::with_capacity(lit.len());
let error = |i| format!("lexer should have rejected {} at {}", lit, i);
- /// Eat everything up to a non-whitespace
+ /// Eat everything up to a non-whitespace.
fn eat<'a>(it: &mut iter::Peekable<str::CharIndices<'a>>) {
loop {
match it.peek().map(|x| x.1) {
res
}
-/// Parse a string representing a raw string literal into its final form. The
+/// Parses a string representing a raw string literal into its final form. The
/// only operation this does is convert embedded CRLF into a single LF.
fn raw_str_lit(lit: &str) -> String {
debug!("raw_str_lit: given {}", lit.escape_default());
filtered_float_lit(Symbol::intern(s), suffix, diag)
}
-/// Parse a string representing a byte literal into its final form. Similar to `char_lit`
+/// Parses a string representing a byte literal into its final form. Similar to `char_lit`.
fn byte_lit(lit: &str) -> (u8, usize) {
let err = |i| format!("lexer accepted invalid byte literal {} step {}", lit, i);
let error = |i| panic!("lexer should have rejected {} at {}", lit, i);
- /// Eat everything up to a non-whitespace
+ /// Eat everything up to a non-whitespace.
fn eat<I: Iterator<Item=(usize, u8)>>(it: &mut iter::Peekable<I>) {
loop {
match it.peek().map(|x| x.1) {
})
}
-/// `SeqSep` : a sequence separator (token)
-/// and whether a trailing separator is allowed.
+/// A sequence separator.
pub struct SeqSep {
+ /// The seperator token.
pub sep: Option<token::Token>,
+ /// `true` if a trailing separator is allowed.
pub trailing_sep_allowed: bool,
}
use crate::{ast, attr};
use crate::ext::base::DummyResult;
use crate::source_map::{self, SourceMap, Spanned, respan};
-use crate::errors::{self, Applicability, DiagnosticBuilder, DiagnosticId};
use crate::parse::{self, SeqSep, classify, token};
use crate::parse::lexer::{TokenAndSpan, UnmatchedBrace};
use crate::parse::lexer::comments::{doc_comment_style, strip_doc_comment_decoration};
use crate::tokenstream::{self, DelimSpan, TokenTree, TokenStream, TreeAndJoint};
use crate::symbol::{Symbol, keywords};
+use errors::{Applicability, DiagnosticBuilder, DiagnosticId};
use rustc_target::spec::abi::{self, Abi};
-use syntax_pos::{self, Span, MultiSpan, BytePos, FileName};
+use syntax_pos::{Span, MultiSpan, BytePos, FileName};
use log::{debug, trace};
use std::borrow::Cow;
type ItemInfo = (Ident, ItemKind, Option<Vec<Attribute>>);
-/// How to parse a path.
+/// Specifies how to parse a path.
#[derive(Copy, Clone, PartialEq)]
pub enum PathStyle {
/// In some contexts, notably in expressions, paths with generic arguments are ambiguous
Ignore,
}
-/// Possibly accept an `token::Interpolated` expression (a pre-parsed expression
+/// Possibly accepts an `token::Interpolated` expression (a pre-parsed expression
/// dropped into the token stream, which happens while parsing the result of
/// macro expansion). Placement of these is not as complex as I feared it would
/// be. The important thing is to make sure that lookahead doesn't balk at
}
}
-/// Returns true if `IDENT t` can start a type - `IDENT::a::b`, `IDENT<u8, u8>`,
+/// Returns `true` if `IDENT t` can start a type -- `IDENT::a::b`, `IDENT<u8, u8>`,
/// `IDENT<<u8 as Trait>::AssocTy>`.
///
/// Types can also be of the form `IDENT(u8, u8) -> u8`, however this assumes
-/// that IDENT is not the ident of a fn trait
+/// that `IDENT` is not the ident of a fn trait.
fn can_continue_type_after_non_fn_ident(t: &token::Token) -> bool {
t == &token::ModSep || t == &token::Lt ||
t == &token::BinOp(token::Shl)
}
}
-/// Create a placeholder argument.
+/// Creates a placeholder argument.
fn dummy_arg(span: Span) -> Arg {
let ident = Ident::new(keywords::Invalid.name(), span);
let pat = P(Pat {
next
}
- /// Convert the current token to a string using self's reader
+ /// Converts the current token to a string using `self`'s reader.
pub fn this_token_to_string(&self) -> String {
pprust::token_to_string(&self.token)
}
}
}
- /// Expect and consume the token t. Signal an error if
- /// the next token is not t.
+ /// Expects and consumes the token `t`. Signals an error if the next token is not `t`.
pub fn expect(&mut self, t: &token::Token) -> PResult<'a, bool /* recovered */> {
if self.expected_tokens.is_empty() {
if self.token == *t {
}
}
- /// returns the span of expr, if it was not interpolated or the span of the interpolated token
+ /// Returns the span of expr, if it was not interpolated or the span of the interpolated token.
fn interpolated_or_expr_span(&self,
expr: PResult<'a, P<Expr>>)
-> PResult<'a, (Span, P<Expr>)> {
}
}
- /// Check if the next token is `tok`, and return `true` if so.
+ /// Checks if the next token is `tok`, and returns `true` if so.
///
/// This method will automatically add `tok` to `expected_tokens` if `tok` is not
/// encountered.
is_present
}
- /// Consume token 'tok' if it exists. Returns true if the given
- /// token was present, false otherwise.
+ /// Consumes a token 'tok' if it exists. Returns whether the given token was present.
pub fn eat(&mut self, tok: &token::Token) -> bool {
let is_present = self.check(tok);
if is_present { self.bump() }
self.token.is_keyword(kw)
}
- /// If the next token is the given keyword, eat it and return
- /// true. Otherwise, return false.
+ /// If the next token is the given keyword, eats it and returns
+ /// `true`. Otherwise, returns `false`.
pub fn eat_keyword(&mut self, kw: keywords::Keyword) -> bool {
if self.check_keyword(kw) {
self.bump();
}
}
- /// If the given word is not a keyword, signal an error.
- /// If the next token is not the given word, signal an error.
- /// Otherwise, eat it.
+ /// If the given word is not a keyword, signals an error.
+ /// If the next token is not the given word, signals an error.
+ /// Otherwise, eats it.
fn expect_keyword(&mut self, kw: keywords::Keyword) -> PResult<'a, ()> {
if !self.eat_keyword(kw) {
self.unexpected()
}
}
- /// Expect and consume a `+`. if `+=` is seen, replace it with a `=`
- /// and continue. If a `+` is not seen, return false.
+ /// Expects and consumes a `+`. if `+=` is seen, replaces it with a `=`
+ /// and continues. If a `+` is not seen, returns `false`.
///
- /// This is using when token splitting += into +.
- /// See issue 47856 for an example of when this may occur.
+ /// This is used when token-splitting `+=` into `+`.
+ /// See issue #47856 for an example of when this may occur.
fn eat_plus(&mut self) -> bool {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Plus)));
match self.token {
/// Checks to see if the next token is either `+` or `+=`.
- /// Otherwise returns false.
+ /// Otherwise returns `false`.
fn check_plus(&mut self) -> bool {
if self.token.is_like_plus() {
true
}
}
- /// Expect and consume an `&`. If `&&` is seen, replace it with a single
- /// `&` and continue. If an `&` is not seen, signal an error.
+ /// Expects and consumes an `&`. If `&&` is seen, replaces it with a single
+ /// `&` and continues. If an `&` is not seen, signals an error.
fn expect_and(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::And)));
match self.token {
}
}
- /// Expect and consume an `|`. If `||` is seen, replace it with a single
- /// `|` and continue. If an `|` is not seen, signal an error.
+ /// Expects and consumes an `|`. If `||` is seen, replaces it with a single
+ /// `|` and continues. If an `|` is not seen, signals an error.
fn expect_or(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::BinOp(token::Or)));
match self.token {
}
}
- /// Attempt to consume a `<`. If `<<` is seen, replace it with a single
- /// `<` and continue. If `<-` is seen, replace it with a single `<`
- /// and continue. If a `<` is not seen, return false.
+ /// Attempts to consume a `<`. If `<<` is seen, replaces it with a single
+ /// `<` and continue. If `<-` is seen, replaces it with a single `<`
+ /// and continue. If a `<` is not seen, returns false.
///
/// This is meant to be used when parsing generics on a path to get the
/// starting token.
}
}
- /// Expect and consume a GT. if a >> is seen, replace it
- /// with a single > and continue. If a GT is not seen,
- /// signal an error.
+ /// Expects and consumes a single `>` token. if a `>>` is seen, replaces it
+ /// with a single `>` and continues. If a `>` is not seen, signals an error.
fn expect_gt(&mut self) -> PResult<'a, ()> {
self.expected_tokens.push(TokenType::Token(token::Gt));
let ate = match self.token {
}
}
- /// Eat and discard tokens until one of `kets` is encountered. Respects token trees,
+ /// Eats and discards tokens until one of `kets` is encountered. Respects token trees,
/// passes through any errors encountered. Used for error recovery.
fn eat_to_tokens(&mut self, kets: &[&token::Token]) {
let handler = self.diagnostic();
}
}
- /// Parse a sequence, including the closing delimiter. The function
- /// f must consume tokens until reaching the next separator or
+ /// Parses a sequence, including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_end<T, F>(&mut self,
ket: &token::Token,
Ok(val)
}
- /// Parse a sequence, not including the closing delimiter. The function
- /// f must consume tokens until reaching the next separator or
+ /// Parses a sequence, not including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
/// closing bracket.
pub fn parse_seq_to_before_end<T, F>(
&mut self,
Ok((v, recovered))
}
- /// Parse a sequence, including the closing delimiter. The function
- /// f must consume tokens until reaching the next separator or
+ /// Parses a sequence, including the closing delimiter. The function
+ /// `f` must consume tokens until reaching the next separator or
/// closing bracket.
fn parse_unspanned_seq<T, F>(
&mut self,
&self.sess.span_diagnostic
}
- /// Is the current token one of the keywords that signals a bare function
- /// type?
+ /// Is the current token one of the keywords that signals a bare function type?
fn token_is_bare_fn_keyword(&mut self) -> bool {
self.check_keyword(keywords::Fn) ||
self.check_keyword(keywords::Unsafe) ||
self.check_keyword(keywords::Extern)
}
- /// parse a `TyKind::BareFn` type:
+ /// Parses a `TyKind::BareFn` type.
fn parse_ty_bare_fn(&mut self, generic_params: Vec<GenericParam>) -> PResult<'a, TyKind> {
/*
})))
}
- /// Parse asyncness: `async` or nothing
+ /// Parses asyncness: `async` or nothing.
fn parse_asyncness(&mut self) -> IsAsync {
if self.eat_keyword(keywords::Async) {
IsAsync::Async {
}
}
- /// Parse unsafety: `unsafe` or nothing.
+ /// Parses unsafety: `unsafe` or nothing.
fn parse_unsafety(&mut self) -> Unsafety {
if self.eat_keyword(keywords::Unsafe) {
Unsafety::Unsafe
}
}
- /// Parse the items in a trait declaration
+ /// Parses the items in a trait declaration.
pub fn parse_trait_item(&mut self, at_end: &mut bool) -> PResult<'a, TraitItem> {
maybe_whole!(self, NtTraitItem, |x| x);
let attrs = self.parse_outer_attributes()?;
})
}
- /// Parse optional return type [ -> TY ] in function decl
+ /// Parses an optional return type `[ -> TY ]` in a function declaration.
fn parse_ret_ty(&mut self, allow_plus: bool) -> PResult<'a, FunctionRetTy> {
if self.eat(&token::RArrow) {
Ok(FunctionRetTy::Ty(self.parse_ty_common(allow_plus, true)?))
}
}
- // Parse a type
+ /// Parses a type.
pub fn parse_ty(&mut self) -> PResult<'a, P<Ty>> {
self.parse_ty_common(true, true)
}
- /// Parse a type in restricted contexts where `+` is not permitted.
+ /// Parses a type in restricted contexts where `+` is not permitted.
+ ///
/// Example 1: `&'a TYPE`
/// `+` is prohibited to maintain operator priority (P(+) < P(&)).
/// Example 2: `value1 as TYPE + value2`
self.look_ahead(offset + 1, |t| t == &token::Colon)
}
- /// Skip unexpected attributes and doc comments in this position and emit an appropriate error.
+ /// Skips unexpected attributes and doc comments in this position and emits an appropriate
+ /// error.
fn eat_incorrect_doc_comment(&mut self, applied_to: &str) {
if let token::DocComment(_) = self.token {
let mut err = self.diagnostic().struct_span_err(
}
}
- /// This version of parse arg doesn't necessarily require
- /// identifier names.
+ /// This version of parse arg doesn't necessarily require identifier names.
fn parse_arg_general(&mut self, require_name: bool, is_trait_item: bool) -> PResult<'a, Arg> {
maybe_whole!(self, NtArg, |x| x);
Ok(Arg { ty, pat, id: ast::DUMMY_NODE_ID })
}
- /// Parse a single function argument
+ /// Parses a single function argument.
crate fn parse_arg(&mut self) -> PResult<'a, Arg> {
self.parse_arg_general(true, false)
}
- /// Parse an argument in a lambda header e.g., |arg, arg|
+ /// Parses an argument in a lambda header (e.g., `|arg, arg|`).
fn parse_fn_block_arg(&mut self) -> PResult<'a, Arg> {
let pat = self.parse_pat(Some("argument name"))?;
let t = if self.eat(&token::Colon) {
}
}
- /// Matches token_lit = LIT_INTEGER | ...
+ /// Matches `token_lit = LIT_INTEGER | ...`.
fn parse_lit_token(&mut self) -> PResult<'a, LitKind> {
let out = match self.token {
token::Interpolated(ref nt) => match nt.0 {
Ok(out)
}
- /// Matches lit = true | false | token_lit
+ /// Matches `lit = true | false | token_lit`.
crate fn parse_lit(&mut self) -> PResult<'a, Lit> {
let lo = self.span;
let lit = if self.eat_keyword(keywords::True) {
Ok(source_map::Spanned { node: lit, span: lo.to(self.prev_span) })
}
- /// matches '-' lit | lit (cf. ast_validation::AstValidator::check_expr_within_pat)
+ /// Matches `'-' lit | lit` (cf. `ast_validation::AstValidator::check_expr_within_pat`).
crate fn parse_literal_maybe_minus(&mut self) -> PResult<'a, P<Expr>> {
maybe_whole_expr!(self);
}
}
- /// Parses qualified path.
+ /// Parses a qualified path.
/// Assumes that the leading `<` has been parsed already.
///
/// `qualified_path = <type [as trait_ref]>::path`
Ok(ast::Path { segments, span: lo.to(self.prev_span) })
}
- /// Like `parse_path`, but also supports parsing `Word` meta items into paths for back-compat.
- /// This is used when parsing derive macro paths in `#[derive]` attributes.
+ /// Like `parse_path`, but also supports parsing `Word` meta items into paths for
+ /// backwards-compatibility. This is used when parsing derive macro paths in `#[derive]`
+ /// attributes.
pub fn parse_path_allowing_meta(&mut self, style: PathStyle) -> PResult<'a, ast::Path> {
let meta_ident = match self.token {
token::Interpolated(ref nt) => match nt.0 {
self.token.is_lifetime()
}
- /// Parse single lifetime 'a or panic.
+ /// Parses a single lifetime `'a` or panics.
crate fn expect_lifetime(&mut self) -> Lifetime {
if let Some(ident) = self.token.lifetime() {
let span = self.span;
}
}
- /// Parse mutability (`mut` or nothing).
+ /// Parses mutability (`mut` or nothing).
fn parse_mutability(&mut self) -> Mutability {
if self.eat_keyword(keywords::Mut) {
Mutability::Mutable
}
/// At the bottom (top?) of the precedence hierarchy,
- /// parse things like parenthesized exprs,
- /// macros, return, etc.
+ /// Parses things like parenthesized exprs, macros, `return`, etc.
///
- /// N.B., this does not parse outer attributes,
- /// and is private because it only works
- /// correctly if called from parse_dot_or_call_expr().
+ /// N.B., this does not parse outer attributes, and is private because it only works
+ /// correctly if called from `parse_dot_or_call_expr()`.
fn parse_bottom_expr(&mut self) -> PResult<'a, P<Expr>> {
maybe_whole_expr!(self);
}
}
- /// Parse a block or unsafe block
+ /// Parses a block or unsafe block.
fn parse_block_expr(&mut self, opt_label: Option<Label>,
lo: Span, blk_mode: BlockCheckMode,
outer_attrs: ThinVec<Attribute>)
return Ok(self.mk_expr(blk.span, ExprKind::Block(blk, opt_label), attrs));
}
- /// parse a.b or a(13) or a[4] or just a
+ /// Parses `a.b` or `a(13)` or `a[4]` or just `a`.
fn parse_dot_or_call_expr(&mut self,
already_parsed_attrs: Option<ThinVec<Attribute>>)
-> PResult<'a, P<Expr>> {
self.span = span;
}
- /// parse a single token tree from the input.
+ /// Parses a single token tree from the input.
crate fn parse_token_tree(&mut self) -> TokenTree {
match self.token {
token::OpenDelim(..) => {
return Ok(self.mk_expr(lo.to(hi), ex, attrs));
}
- /// Parse an associative expression
+ /// Parses an associative expression.
///
/// This parses an expression accounting for associativity and precedence of the operators in
/// the expression.
self.parse_assoc_expr_with(0, already_parsed_attrs.into())
}
- /// Parse an associative expression with operators of at least `min_prec` precedence
+ /// Parses an associative expression with operators of at least `min_prec` precedence.
fn parse_assoc_expr_with(&mut self,
min_prec: usize,
lhs: LhsExpr)
}
}
- /// Parse an 'if' or 'if let' expression ('if' token already eaten)
+ /// Parses an `if` or `if let` expression (`if` token already eaten).
fn parse_if_expr(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
if self.check_keyword(keywords::Let) {
return self.parse_if_let_expr(attrs);
Ok(self.mk_expr(lo.to(hi), ExprKind::If(cond, thn, els), attrs))
}
- /// Parse an 'if let' expression ('if' token already eaten)
+ /// Parses an `if let` expression (`if` token already eaten).
fn parse_if_let_expr(&mut self, attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>> {
let lo = self.prev_span;
Ok(self.mk_expr(lo.to(hi), ExprKind::IfLet(pats, expr, thn, els), attrs))
}
- // `move |args| expr`
+ /// Parses `move |args| expr`.
fn parse_lambda_expr(&mut self,
attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
Ok(self.mk_expr(span_lo.to(hi), ExprKind::ForLoop(pat, expr, loop_block, opt_label), attrs))
}
- /// Parse a 'while' or 'while let' expression ('while' token already eaten)
+ /// Parses a `while` or `while let` expression (`while` token already eaten).
fn parse_while_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
return Ok(self.mk_expr(span, ExprKind::While(cond, body, opt_label), attrs));
}
- /// Parse a 'while let' expression ('while' token already eaten)
+ /// Parses a `while let` expression (`while` token already eaten).
fn parse_while_let_expr(&mut self, opt_label: Option<Label>,
span_lo: Span,
mut attrs: ThinVec<Attribute>) -> PResult<'a, P<Expr>> {
Ok(self.mk_expr(span, ExprKind::Loop(body, opt_label), attrs))
}
- /// Parse an `async move {...}` expression
+ /// Parses an `async move {...}` expression.
pub fn parse_async_block(&mut self, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
ExprKind::Async(capture_clause, ast::DUMMY_NODE_ID, body), attrs))
}
- /// Parse a `try {...}` expression (`try` token already eaten)
+ /// Parses a `try {...}` expression (`try` token already eaten).
fn parse_try_block(&mut self, span_lo: Span, mut attrs: ThinVec<Attribute>)
-> PResult<'a, P<Expr>>
{
})
}
- /// Parse an expression
+ /// Parses an expression.
#[inline]
pub fn parse_expr(&mut self) -> PResult<'a, P<Expr>> {
self.parse_expr_res(Restrictions::empty(), None)
}
- /// Evaluate the closure with restrictions in place.
+ /// Evaluates the closure with restrictions in place.
///
- /// After the closure is evaluated, restrictions are reset.
+ /// Afters the closure is evaluated, restrictions are reset.
fn with_res<F, T>(&mut self, r: Restrictions, f: F) -> T
where F: FnOnce(&mut Self) -> T
{
}
- /// Parse an expression, subject to the given restrictions
+ /// Parses an expression, subject to the given restrictions.
#[inline]
fn parse_expr_res(&mut self, r: Restrictions,
already_parsed_attrs: Option<ThinVec<Attribute>>)
self.with_res(r, |this| this.parse_assoc_expr(already_parsed_attrs))
}
- /// Parse the RHS of a local variable declaration (e.g., '= 14;')
+ /// Parses the RHS of a local variable declaration (e.g., '= 14;').
fn parse_initializer(&mut self, skip_eq: bool) -> PResult<'a, Option<P<Expr>>> {
if self.eat(&token::Eq) {
Ok(Some(self.parse_expr()?))
}
}
- /// Parse patterns, separated by '|' s
+ /// Parses patterns, separated by '|' s.
fn parse_pats(&mut self) -> PResult<'a, Vec<P<Pat>>> {
// Allow a '|' before the pats (RFC 1925 + RFC 2530)
self.eat(&token::BinOp(token::Or));
})
}
- /// Parse the fields of a struct-like pattern
+ /// Parses the fields of a struct-like pattern.
fn parse_pat_fields(&mut self) -> PResult<'a, (Vec<source_map::Spanned<ast::FieldPat>>, bool)> {
let mut fields = Vec::new();
let mut etc = false;
Ok(pat)
}
- /// Parse a pattern.
+ /// Parses a pattern.
pub fn parse_pat(&mut self, expected: Option<&'static str>) -> PResult<'a, P<Pat>> {
self.parse_pat_with_range_pat(true, expected)
}
- /// Parse a pattern, with a setting whether modern range patterns e.g., `a..=b`, `a..b` are
- /// allowed.
+ /// Parses a pattern, with a setting whether modern range patterns (e.g., `a..=b`, `a..b` are
+ /// allowed).
fn parse_pat_with_range_pat(
&mut self,
allow_range_pat: bool,
Ok(P(pat))
}
- /// Parse ident or ident @ pat
+ /// Parses `ident` or `ident @ pat`.
/// used by the copy foo and ref foo patterns to give a good
- /// error message when parsing mistakes like ref foo(a,b)
+ /// error message when parsing mistakes like `ref foo(a, b)`.
fn parse_pat_ident(&mut self,
binding_mode: ast::BindingMode)
-> PResult<'a, PatKind> {
Ok(PatKind::Ident(binding_mode, ident, sub))
}
- /// Parse a local variable declaration
+ /// Parses a local variable declaration.
fn parse_local(&mut self, attrs: ThinVec<Attribute>) -> PResult<'a, P<Local>> {
let lo = self.prev_span;
let pat = self.parse_top_level_pat()?;
}))
}
- /// Parse a structure field
+ /// Parses a structure field.
fn parse_name_and_ty(&mut self,
lo: Span,
vis: Visibility,
})
}
- /// Emit an expected item after attributes error.
+ /// Emits an expected-item-after-attributes error.
fn expected_item_err(&mut self, attrs: &[Attribute]) -> PResult<'a, ()> {
let message = match attrs.last() {
Some(&Attribute { is_sugared_doc: true, .. }) => "expected item after doc comment",
}))
}
- /// Is this expression a successfully-parsed statement?
+ /// Checks if this expression is a successfully parsed statement.
fn expr_is_complete(&mut self, e: &Expr) -> bool {
self.restrictions.contains(Restrictions::STMT_EXPR) &&
!classify::expr_requires_semi_to_be_stmt(e)
}
- /// Parse a block. No inner attrs are allowed.
+ /// Parses a block. No inner attributes are allowed.
pub fn parse_block(&mut self) -> PResult<'a, P<Block>> {
maybe_whole!(self, NtBlock, |x| x);
self.parse_block_tail(lo, BlockCheckMode::Default)
}
- /// Parse a block. Inner attrs are allowed.
+ /// Parses a block. Inner attributes are allowed.
fn parse_inner_attrs_and_block(&mut self) -> PResult<'a, (Vec<Attribute>, P<Block>)> {
maybe_whole!(self, NtBlock, |x| (Vec::new(), x));
self.parse_block_tail(lo, BlockCheckMode::Default)?))
}
- /// Parse the rest of a block expression or function body
+ /// Parses the rest of a block expression or function body.
/// Precondition: already parsed the '{'.
fn parse_block_tail(&mut self, lo: Span, s: BlockCheckMode) -> PResult<'a, P<Block>> {
let mut stmts = vec![];
}))
}
- /// Parse a statement, including the trailing semicolon.
+ /// Parses a statement, including the trailing semicolon.
crate fn parse_full_stmt(&mut self, macro_legacy_warnings: bool) -> PResult<'a, Option<Stmt>> {
// skip looking for a trailing semicolon when we have an interpolated statement
maybe_whole!(self, NtStmt, |x| Some(x));
).emit();
}
- // Parse bounds of a type parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
- // BOUND = TY_BOUND | LT_BOUND
- // LT_BOUND = LIFETIME (e.g., `'a`)
- // TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
- // TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+ /// Parses bounds of a type parameter `BOUND + BOUND + ...`, possibly with trailing `+`.
+ ///
+ /// ```
+ /// BOUND = TY_BOUND | LT_BOUND
+ /// LT_BOUND = LIFETIME (e.g., `'a`)
+ /// TY_BOUND = TY_BOUND_NOPAREN | (TY_BOUND_NOPAREN)
+ /// TY_BOUND_NOPAREN = [?] [for<LT_PARAM_DEFS>] SIMPLE_PATH (e.g., `?for<'a: 'b> m::Trait<'a>`)
+ /// ```
fn parse_generic_bounds_common(&mut self, allow_plus: bool) -> PResult<'a, GenericBounds> {
let mut bounds = Vec::new();
loop {
self.parse_generic_bounds_common(true)
}
- // Parse bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
- // BOUND = LT_BOUND (e.g., `'a`)
+ /// Parses bounds of a lifetime parameter `BOUND + BOUND + BOUND`, possibly with trailing `+`.
+ ///
+ /// ```
+ /// BOUND = LT_BOUND (e.g., `'a`)
+ /// ```
fn parse_lt_param_bounds(&mut self) -> GenericBounds {
let mut lifetimes = Vec::new();
while self.check_lifetime() {
lifetimes
}
- /// Matches typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?
+ /// Matches `typaram = IDENT (`?` unbound)? optbounds ( EQ ty )?`.
fn parse_ty_param(&mut self,
preceding_attrs: Vec<Attribute>)
-> PResult<'a, GenericParam> {
}
/// Parses the following grammar:
+ ///
/// TraitItemAssocTy = Ident ["<"...">"] [":" [GenericBounds]] ["where" ...] ["=" Ty]
fn parse_trait_item_assoc_ty(&mut self)
-> PResult<'a, (Ident, TraitItemKind, ast::Generics)> {
})
}
- /// Parses (possibly empty) list of lifetime and type parameters, possibly including
- /// trailing comma and erroneous trailing attributes.
+ /// Parses a (possibly empty) list of lifetime and type parameters, possibly including
+ /// a trailing comma and erroneous trailing attributes.
crate fn parse_generic_params(&mut self) -> PResult<'a, Vec<ast::GenericParam>> {
let mut params = Vec::new();
loop {
Ok(params)
}
- /// Parse a set of optional generic type parameter declarations. Where
+ /// Parses a set of optional generic type parameter declarations. Where
/// clauses are not parsed here, and must be added later via
/// `parse_where_clause()`.
///
}
}
- /// Parse generic args (within a path segment) with recovery for extra leading angle brackets.
+ /// Parses generic args (within a path segment) with recovery for extra leading angle brackets.
/// For the purposes of understanding the parsing logic of generic arguments, this function
/// can be thought of being the same as just calling `self.parse_generic_args()` if the source
/// had the correct amount of leading angle brackets.
Ok((args, bindings))
}
- /// Parses an optional `where` clause and places it in `generics`.
+ /// Parses an optional where-clause and places it in `generics`.
///
/// ```ignore (only-for-syntax-highlight)
/// where T : Trait<U, V> + 'b, 'a : 'b
Ok((args, variadic))
}
- /// Parse the argument list and result type of a function declaration
+ /// Parses the argument list and result type of a function declaration.
fn parse_fn_decl(&mut self, allow_variadic: bool) -> PResult<'a, P<FnDecl>> {
let (args, variadic) = self.parse_fn_args(true, allow_variadic)?;
Ok(Some(Arg::from_self(eself, eself_ident)))
}
- /// Parse the parameter list and result type of a function that may have a `self` parameter.
+ /// Parses the parameter list and result type of a function that may have a `self` parameter.
fn parse_fn_decl_with_self<F>(&mut self, parse_arg_fn: F) -> PResult<'a, P<FnDecl>>
where F: FnMut(&mut Parser<'a>) -> PResult<'a, Arg>,
{
}))
}
- // parse the |arg, arg| header on a lambda
+ /// Parses the `|arg, arg|` header of a closure.
fn parse_fn_block_decl(&mut self) -> PResult<'a, P<FnDecl>> {
let inputs_captures = {
if self.eat(&token::OrOr) {
}))
}
- /// Parse the name and optional generic types of a function header.
+ /// Parses the name and optional generic types of a function header.
fn parse_fn_header(&mut self) -> PResult<'a, (Ident, ast::Generics)> {
let id = self.parse_ident()?;
let generics = self.parse_generics()?;
})
}
- /// Parse an item-position function declaration.
+ /// Parses an item-position function declaration.
fn parse_item_fn(&mut self,
unsafety: Unsafety,
asyncness: IsAsync,
Ok((ident, ItemKind::Fn(decl, header, generics, body), Some(inner_attrs)))
}
- /// true if we are looking at `const ID`, false for things like `const fn` etc
+ /// Returns `true` if we are looking at `const ID`
+ /// (returns `false` for things like `const fn`, etc.).
fn is_const_item(&mut self) -> bool {
self.token.is_keyword(keywords::Const) &&
!self.look_ahead(1, |t| t.is_keyword(keywords::Fn)) &&
!self.look_ahead(1, |t| t.is_keyword(keywords::Unsafe))
}
- /// parses all the "front matter" for a `fn` declaration, up to
+ /// Parses all the "front matter" for a `fn` declaration, up to
/// and including the `fn` keyword:
///
/// - `const fn`
/// - `unsafe fn`
/// - `const unsafe fn`
/// - `extern fn`
- /// - etc
+ /// - etc.
fn parse_fn_front_matter(&mut self)
-> PResult<'a, (
Spanned<Constness>,
Ok((constness, unsafety, asyncness, abi))
}
- /// Parse an impl item.
+ /// Parses an impl item.
pub fn parse_impl_item(&mut self, at_end: &mut bool) -> PResult<'a, ImplItem> {
maybe_whole!(self, NtImplItem, |x| x);
let attrs = self.parse_outer_attributes()?;
}
}
- /// Parse `trait Foo { ... }` or `trait Foo = Bar;`
+ /// Parses `trait Foo { ... }` or `trait Foo = Bar;`.
fn parse_item_trait(&mut self, is_auto: IsAuto, unsafety: Unsafety) -> PResult<'a, ItemInfo> {
let ident = self.parse_ident()?;
let mut tps = self.parse_generics()?;
let bounds = self.parse_generic_bounds()?;
tps.where_clause = self.parse_where_clause()?;
self.expect(&token::Semi)?;
+ if is_auto == IsAuto::Yes {
+ let msg = "trait aliases cannot be `auto`";
+ self.struct_span_err(self.prev_span, msg)
+ .span_label(self.prev_span, msg)
+ .emit();
+ }
if unsafety != Unsafety::Normal {
- let msg = "trait aliases cannot be unsafe";
+ let msg = "trait aliases cannot be `unsafe`";
self.struct_span_err(self.prev_span, msg)
.span_label(self.prev_span, msg)
.emit();
}
/// Parses an implementation item, `impl` keyword is already parsed.
+ ///
/// impl<'a, T> TYPE { /* impl items */ }
/// impl<'a, T> TRAIT for TYPE { /* impl items */ }
/// impl<'a, T> !TRAIT for TYPE { /* impl items */ }
+ ///
/// We actually parse slightly more relaxed grammar for better error reporting and recovery.
/// `impl` GENERICS `!`? TYPE `for`? (TYPE | `..`) (`where` PREDICATES)? `{` BODY `}`
/// `impl` GENERICS `!`? TYPE (`where` PREDICATES)? `{` BODY `}`
}
}
- /// Parse struct Foo { ... }
+ /// Parses `struct Foo { ... }`.
fn parse_item_struct(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
Ok((class_name, ItemKind::Struct(vdata, generics), None))
}
- /// Parse union Foo { ... }
+ /// Parses `union Foo { ... }`.
fn parse_item_union(&mut self) -> PResult<'a, ItemInfo> {
let class_name = self.parse_ident()?;
Ok(fields)
}
- /// Parse a structure field declaration
+ /// Parses a structure field declaration.
fn parse_single_struct_field(&mut self,
lo: Span,
vis: Visibility,
Ok(a_var)
}
- /// Parse an element of a struct definition
+ /// Parses an element of a struct declaration.
fn parse_struct_decl_field(&mut self) -> PResult<'a, StructField> {
let attrs = self.parse_outer_attributes()?;
let lo = self.span;
self.parse_single_struct_field(lo, vis, attrs)
}
- /// Parse `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
+ /// Parses `pub`, `pub(crate)` and `pub(in path)` plus shortcuts `crate` for `pub(crate)`,
/// `pub(self)` for `pub(in self)` and `pub(super)` for `pub(in super)`.
- /// If the following element can't be a tuple (i.e., it's a function definition,
- /// it's not a tuple struct field) and the contents within the parens
- /// isn't valid, emit a proper diagnostic.
+ /// If the following element can't be a tuple (i.e., it's a function definition), then
+ /// it's not a tuple struct field), and the contents within the parentheses isn't valid,
+ /// so emit a proper diagnostic.
pub fn parse_visibility(&mut self, can_take_tuple: bool) -> PResult<'a, Visibility> {
maybe_whole!(self, NtVis, |x| x);
Ok(respan(lo, VisibilityKind::Public))
}
- /// Parse defaultness: `default` or nothing.
+ /// Parses defaultness (i.e., `default` or nothing).
fn parse_defaultness(&mut self) -> Defaultness {
// `pub` is included for better error messages
if self.check_keyword(keywords::Default) &&
}
}
- /// Given a termination token, parse all of the items in a module
+ /// Given a termination token, parses all of the items in a module.
fn parse_mod_items(&mut self, term: &token::Token, inner_lo: Span) -> PResult<'a, Mod> {
let mut items = vec![];
while let Some(item) = self.parse_item()? {
}
}
- /// Returns either a path to a module, or .
+ /// Returns a path to a module.
pub fn default_submod_path(
id: ast::Ident,
relative: Option<ast::Ident>,
}
}
- /// Read a module from a source file.
+ /// Reads a module from a source file.
fn eval_src_mod(&mut self,
path: PathBuf,
directory_ownership: DirectoryOwnership,
Ok((m0, mod_attrs))
}
- /// Parse a function declaration from a foreign module
+ /// Parses a function declaration from a foreign module.
fn parse_item_foreign_fn(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
self.expect_keyword(keywords::Fn)?;
})
}
- /// Parse a static item from a foreign module.
+ /// Parses a static item from a foreign module.
/// Assumes that the `static` keyword is already parsed.
fn parse_item_foreign_static(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
})
}
- /// Parse a type from a foreign module
+ /// Parses a type from a foreign module.
fn parse_item_foreign_type(&mut self, vis: ast::Visibility, lo: Span, attrs: Vec<Attribute>)
-> PResult<'a, ForeignItem> {
self.expect_keyword(keywords::Type)?;
Ok(ident)
}
- /// Parse extern crate links
+ /// Parses `extern crate` links.
///
/// # Examples
///
+ /// ```
/// extern crate foo;
/// extern crate bar as foo;
+ /// ```
fn parse_item_extern_crate(&mut self,
lo: Span,
visibility: Visibility,
Ok(self.mk_item(span, item_name, ItemKind::ExternCrate(orig_name), visibility, attrs))
}
- /// Parse `extern` for foreign ABIs
- /// modules.
+ /// Parses `extern` for foreign ABIs modules.
///
/// `extern` is expected to have been
- /// consumed before calling this method
+ /// consumed before calling this method.
///
- /// # Examples:
+ /// # Examples
///
+ /// ```ignore (only-for-syntax-highlight)
/// extern "C" {}
/// extern {}
+ /// ```
fn parse_item_foreign_mod(&mut self,
lo: Span,
opt_abi: Option<Abi>,
Ok(self.mk_item(lo.to(prev_span), invalid, ItemKind::ForeignMod(m), visibility, attrs))
}
- /// Parse `type Foo = Bar;`
+ /// Parses `type Foo = Bar;`
/// or
/// `existential type Foo: Bar;`
/// or
- /// `return None` without modifying the parser state
+ /// `return `None``
+ /// without modifying the parser state.
fn eat_type(&mut self) -> Option<PResult<'a, (Ident, AliasKind, ast::Generics)>> {
// This parses the grammar:
// Ident ["<"...">"] ["where" ...] ("=" | ":") Ty ";"
}
}
- /// Parse type alias or existential type
+ /// Parses a type alias or existential type.
fn parse_existential_or_alias(
&mut self,
existential: bool,
Ok((ident, alias, tps))
}
- /// Parse the part of an "enum" decl following the '{'
+ /// Parses the part of an enum declaration following the `{`.
fn parse_enum_def(&mut self, _generics: &ast::Generics) -> PResult<'a, EnumDef> {
let mut variants = Vec::new();
let mut all_nullary = true;
Ok(ast::EnumDef { variants })
}
- /// Parse an "enum" declaration
+ /// Parses an enum declaration.
fn parse_item_enum(&mut self) -> PResult<'a, ItemInfo> {
let id = self.parse_ident()?;
let mut generics = self.parse_generics()?;
}))
}
- /// Parse one of the items allowed by the flags.
+ /// Parses one of the items allowed by the flags.
fn parse_item_implementation(
&mut self,
attrs: Vec<Attribute>,
self.parse_macro_use_or_failure(attrs, macros_allowed, attributes_allowed, lo, visibility)
}
- /// Parse a foreign item.
+ /// Parses a foreign item.
crate fn parse_foreign_item(&mut self) -> PResult<'a, ForeignItem> {
maybe_whole!(self, NtForeignItem, |ni| ni);
Ok(None)
}
- /// Parse a macro invocation inside a `trait`, `impl` or `extern` block
+ /// Parses a macro invocation inside a `trait`, `impl` or `extern` block.
fn parse_assoc_macro_invoc(&mut self, item_kind: &str, vis: Option<&Visibility>,
at_end: &mut bool) -> PResult<'a, Option<Mac>>
{
*t == token::BinOp(token::Star))
}
- /// Parse UseTree
+ /// Parses a `UseTree`.
///
+ /// ```
/// USE_TREE = [`::`] `*` |
/// [`::`] `{` USE_TREE_LIST `}` |
/// PATH `::` `*` |
/// PATH `::` `{` USE_TREE_LIST `}` |
/// PATH [`as` IDENT]
+ /// ```
fn parse_use_tree(&mut self) -> PResult<'a, UseTree> {
let lo = self.span;
Ok(UseTree { prefix, kind, span: lo.to(self.prev_span) })
}
- /// Parse UseTreeKind::Nested(list)
+ /// Parses a `UseTreeKind::Nested(list)`.
///
+ /// ```
/// USE_TREE_LIST = Ø | (USE_TREE `,`)* USE_TREE [`,`]
+ /// ```
fn parse_use_tree_list(&mut self) -> PResult<'a, Vec<(UseTree, ast::NodeId)>> {
self.parse_unspanned_seq(&token::OpenDelim(token::Brace),
&token::CloseDelim(token::Brace),
}
}
- /// Parses a source module as a crate. This is the main
- /// entry point for the parser.
+ /// Parses a source module as a crate. This is the main entry point for the parser.
pub fn parse_crate_mod(&mut self) -> PResult<'a, Crate> {
let lo = self.span;
let krate = Ok(ast::Crate {
Shr,
}
-/// A delimiter token
+/// A delimiter token.
#[derive(Clone, PartialEq, RustcEncodable, RustcDecodable, Hash, Debug, Copy)]
pub enum DelimToken {
- /// A round parenthesis: `(` or `)`
+ /// A round parenthesis (i.e., `(` or `)`).
Paren,
- /// A square bracket: `[` or `]`
+ /// A square bracket (i.e., `[` or `]`).
Bracket,
- /// A curly brace: `{` or `}`
+ /// A curly brace (i.e., `{` or `}`).
Brace,
- /// An empty delimiter
+ /// An empty delimiter.
NoDelim,
}
Question,
/// Used by proc macros for representing lifetimes, not generated by lexer right now.
SingleQuote,
- /// An opening delimiter, eg. `{`
+ /// An opening delimiter (e.g., `{`).
OpenDelim(DelimToken),
- /// A closing delimiter, eg. `}`
+ /// A closing delimiter (e.g., `}`).
CloseDelim(DelimToken),
/* Literals */
// and so the `LazyTokenStream` can be ignored by Eq, Hash, etc.
Interpolated(Lrc<(Nonterminal, LazyTokenStream)>),
// Can be expanded into several tokens.
- /// Doc comment
+ /// A doc comment.
DocComment(ast::Name),
// Junk. These carry no data because we don't really care about the data
// they *would* carry, and don't really want to allocate a new ident for
// them. Instead, users could extract that from the associated span.
- /// Whitespace
+ /// Whitespace.
Whitespace,
- /// Comment
+ /// A comment.
Comment,
Shebang(ast::Name),
//! This pretty-printer is a direct reimplementation of Philip Karlton's
//! Mesa pretty-printer, as described in appendix A of
//!
-//! ````text
+//! ```text
//! STAN-CS-79-770: "Pretty Printing", by Derek C. Oppen.
//! Stanford Department of Computer Science, 1979.
-//! ````
+//! ```
//!
//! The algorithm's aim is to break a stream into as few lines as possible
//! while respecting the indentation-consistency requirements of the enclosing
match lit.node {
ast::LitKind::Str(st, style) => self.print_string(&st.as_str(), style),
ast::LitKind::Err(st) => {
- let st = st.as_str().escape_debug();
+ let st = st.as_str().escape_debug().to_string();
let mut res = String::with_capacity(st.len() + 2);
res.push('\'');
res.push_str(&st);
-//! The AST pointer
+//! The AST pointer.
//!
//! Provides `P<T>`, a frozen owned smart pointer, as a replacement for `@T` in
//! the AST.
use std::str::FromStr;
use crate::ast;
-use crate::errors;
use crate::visit;
use crate::visit::Visitor;
use std::io;
use log::debug;
-use crate::errors::SourceMapper;
+use errors::SourceMapper;
-/// Return the span itself if it doesn't come from a macro expansion,
+/// Returns the span itself if it doesn't come from a macro expansion,
/// otherwise return the call site span up to the `enclosing_sp` by
/// following the `expn_info` chain.
pub fn original_sp(sp: Span, enclosing_sp: Span) -> Span {
/// Query the existence of a file.
fn file_exists(&self, path: &Path) -> bool;
- /// Return an absolute path to a file, if possible.
+ /// Returns an absolute path to a file, if possible.
fn abs_path(&self, path: &Path) -> Option<PathBuf>;
/// Read the contents of an UTF-8 file into memory.
}
}
- /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
+ /// Returns `Some(span)`, a union of the lhs and rhs span. The lhs must precede the rhs. If
/// there are gaps between lhs and rhs, the resulting union will cross these gaps.
/// For this to work, the spans have to be:
///
Ok(FileLines {file: lo.file, lines: lines})
}
- /// Extract the source surrounding the given `Span` using the `extract_source` function. The
+ /// Extracts the source surrounding the given `Span` using the `extract_source` function. The
/// extract function takes three arguments: a string slice containing the source, an index in
/// the slice for the beginning of the span and an index in the slice for the end of the span.
fn span_to_source<F>(&self, sp: Span, extract_source: F) -> Result<String, SpanSnippetError>
}
}
- /// Return the source snippet as `String` corresponding to the given `Span`
+ /// Returns the source snippet as `String` corresponding to the given `Span`
pub fn span_to_snippet(&self, sp: Span) -> Result<String, SpanSnippetError> {
self.span_to_source(sp, |src, start_index, end_index| src[start_index..end_index]
.to_string())
}
}
- /// Return the source snippet as `String` before the given `Span`
+ /// Returns the source snippet as `String` before the given `Span`
pub fn span_to_prev_source(&self, sp: Span) -> Result<String, SpanSnippetError> {
self.span_to_source(sp, |src, start_index, _| src[..start_index].to_string())
}
/// Given a string like " ~~~~~~~~~~~~ ", produces a span
/// converting that range. The idea is that the string has the same
- /// length as the input, and we uncover the byte positions. Note
+ /// length as the input, and we uncover the byte positions. Note
/// that this can span lines and so on.
fn span_from_selection(input: &str, selection: &str) -> Span {
assert_eq!(input.len(), selection.len());
Span::new(BytePos(left_index), BytePos(right_index + 1), NO_EXPANSION)
}
- /// Test span_to_snippet and span_to_lines for a span converting 3
+ /// Tests span_to_snippet and span_to_lines for a span converting 3
/// lines in the middle of a file.
#[test]
fn span_to_snippet_and_lines_spanning_multiple_lines() {
assert_eq!(sstr, "blork.rs:2:1: 2:12");
}
- /// Test failing to merge two spans on different lines
+ /// Tests failing to merge two spans on different lines
#[test]
fn span_merging_fail() {
let sm = SourceMap::new(FilePathMapping::empty());
use log::debug;
use smallvec::{smallvec, SmallVec};
-use syntax_pos::{self, DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos};
+use syntax_pos::{DUMMY_SP, NO_EXPANSION, Span, SourceFile, BytePos};
use crate::attr::{self, HasAttrs};
use crate::source_map::{self, SourceMap, ExpnInfo, MacroAttribute, dummy_spanned, respan};
-use crate::errors;
use crate::config;
use crate::entry::{self, EntryPointType};
use crate::ext::base::{ExtCtxt, Resolver};
use crate::source_map::{SourceMap, FilePathMapping};
-use crate::errors::Handler;
-use crate::errors::emitter::EmitterWriter;
use crate::with_globals;
+use errors::Handler;
+use errors::emitter::EmitterWriter;
+
use std::io;
use std::io::prelude::*;
use rustc_data_structures::sync::Lrc;
//! which are themselves a single `Token` or a `Delimited` subsequence of tokens.
//!
//! ## Ownership
+//!
//! `TokenStreams` are persistent data structures constructed as ropes with reference
//! counted-children. In general, this means that calling an operation on a `TokenStream`
//! (such as `slice`) produces an entirely new `TokenStream` from the borrowed reference to
macro_parser::parse(cx.parse_sess(), tts, mtch, Some(directory), true)
}
- /// Check if this TokenTree is equal to the other, regardless of span information.
+ /// Checks if this TokenTree is equal to the other, regardless of span information.
pub fn eq_unspanned(&self, other: &TokenTree) -> bool {
match (self, other) {
(&TokenTree::Token(_, ref tk), &TokenTree::Token(_, ref tk2)) => tk == tk2,
}
}
- /// Retrieve the TokenTree's span.
+ /// Retrieves the TokenTree's span.
pub fn span(&self) -> Span {
match *self {
TokenTree::Token(sp, _) => sp,
use std::cmp;
use crate::symbol::Symbol;
-/// Find the Levenshtein distance between two strings
+/// Finds the Levenshtein distance between two strings
pub fn lev_distance(a: &str, b: &str) -> usize {
// cases which don't require further computation
if a.is_empty() {
dcol[t_last + 1]
}
-/// Find the best match for a given word in the given iterator
+/// Finds the best match for a given word in the given iterator
///
/// As a loose rule to avoid the obviously incorrect suggestions, it takes
/// an optional limit for the maximum allowable edit distance, which defaults
}
impl AssocOp {
- /// Create a new AssocOP from a token
+ /// Creates a new AssocOP from a token
pub fn from_token(t: &Token) -> Option<AssocOp> {
use AssocOp::*;
match *t {
}
}
- /// Create a new AssocOp from ast::BinOpKind.
+ /// Creates a new AssocOp from ast::BinOpKind.
pub fn from_ast_binop(op: BinOpKind) -> Self {
use AssocOp::*;
match op {
})
}
-/// Convert a vector of strings to a vector of Ident's
+/// Converts a vector of strings to a vector of Ident's
pub fn strs_to_idents(ids: Vec<&str> ) -> Vec<Ident> {
ids.iter().map(|u| Ident::from_str(*u)).collect()
}
//! Note: it is an important invariant that the default visitor walks the body
//! of a function in "execution order" (more concretely, reverse post-order
//! with respect to the CFG implied by the AST), meaning that if AST node A may
-//! execute before AST node B, then A is visited first. The borrow checker in
+//! execute before AST node B, then A is visited first. The borrow checker in
//! particular relies on this property.
//!
//! Note: walking an AST before macro expansion is probably a bad idea. For
}
/// Each method of the Visitor trait is a hook to be potentially
-/// overridden. Each method's default implementation recursively visits
+/// overridden. Each method's default implementation recursively visits
/// the substructure of the input via the corresponding `walk` method;
/// e.g., the `visit_mod` method by default calls `visit::walk_mod`.
///
/// If you want to ensure that your code handles every variant
-/// explicitly, you need to override each method. (And you also need
+/// explicitly, you need to override each method. (And you also need
/// to monitor future changes to `Visitor` in case a new method with a
/// new default implementation gets introduced.)
pub trait Visitor<'ast>: Sized {
[dependencies]
fmt_macros = { path = "../libfmt_macros" }
-rustc_errors = { path = "../librustc_errors" }
+errors = { path = "../librustc_errors", package = "rustc_errors" }
syntax = { path = "../libsyntax" }
syntax_pos = { path = "../libsyntax_pos" }
rustc_data_structures = { path = "../librustc_data_structures" }
use rustc_data_structures::thin_vec::ThinVec;
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use syntax::ast;
use syntax::ext::base::{self, *};
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use syntax::ast::{self, *};
use syntax::source_map::Spanned;
/// a literal `true` or `false` based on whether the given cfg matches the
/// current compilation environment.
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use syntax::ast;
use syntax::ext::base::{self, *};
-use crate::errors::FatalError;
use crate::proc_macro_impl::EXEC_STRATEGY;
use crate::proc_macro_server;
+use errors::FatalError;
use syntax::ast::{self, ItemKind, Attribute, Mac};
use syntax::attr::{mark_used, mark_known};
use syntax::source_map::Span;
};
}
-/// Create a decoder for a single enum variant/struct:
+/// Creates a decoder for a single enum variant/struct:
/// - `outer_pat_path` is the path to this enum variant/struct
/// - `getarg` should retrieve the `usize`-th field with name `@str`.
fn decode_static_fields<F>(cx: &mut ExtCtxt<'_>,
//! The compiler code necessary to implement the `#[derive(Encodable)]`
-//! (and `Decodable`, in decodable.rs) extension. The idea here is that
+//! (and `Decodable`, in `decodable.rs`) extension. The idea here is that
//! type-defining items may be tagged with `#[derive(Encodable, Decodable)]`.
//!
//! For example, a type like:
//! ```
//!
//! Other interesting scenarios are when the item has type parameters or
-//! references other non-built-in types. A type definition like:
+//! references other non-built-in types. A type definition like:
//!
//! ```
//! # #[derive(Encodable, Decodable)] struct Span;
/// Arguments other than the self argument
pub args: Vec<(Ty<'a>, &'a str)>,
- /// Return type
+ /// Returns type
pub ret_ty: Ty<'a>,
pub attributes: Vec<ast::Attribute>,
EnumMatching(usize, usize, &'a ast::Variant, Vec<FieldInfo<'a>>),
/// Non-matching variants of the enum, but with all state hidden from
- /// the consequent code. The first component holds `Ident`s for all of
+ /// the consequent code. The first component holds `Ident`s for all of
/// the `Self` arguments; the second component is a slice of all of the
/// variants for the enum itself, and the third component is a list of
/// `Ident`s bound to the variant index values for each of the actual
pub type CombineSubstructureFunc<'a> =
Box<dyn FnMut(&mut ExtCtxt<'_>, Span, &Substructure<'_>) -> P<Expr> + 'a>;
-/// Deal with non-matching enum variants. The tuple is a list of
+/// Deal with non-matching enum variants. The tuple is a list of
/// identifiers (one for each `Self` argument, which could be any of the
/// variants since they have been collapsed together) and the identifiers
-/// holding the variant index value for each of the `Self` arguments. The
+/// holding the variant index value for each of the `Self` arguments. The
/// last argument is all the non-`Self` args of the method being derived.
pub type EnumNonMatchCollapsedFunc<'a> =
Box<dyn FnMut(&mut ExtCtxt<'_>, Span, (&[Ident], &[Ident]), &[P<Expr>]) -> P<Expr> + 'a>;
/// create an impl like:
///
/// ```ignore (only-for-syntax-highlight)
- /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where
+ /// impl<'a, ..., 'z, A, B: DeclaredTrait, C, ... Z> where
/// C: WhereTrait,
/// A: DerivedTrait + B1 + ... + BN,
/// B: DerivedTrait + B1 + ... + BN,
///
/// (Of course `__self_vi` and `__arg_1_vi` are unused for
/// `PartialEq`, and those subcomputations will hopefully be removed
- /// as their results are unused. The point of `__self_vi` and
+ /// as their results are unused. The point of `__self_vi` and
/// `__arg_1_vi` is for `PartialOrd`; see #15503.)
fn expand_enum_method_body<'b>(&self,
cx: &mut ExtCtxt<'_>,
/// Function to fold over fields, with three cases, to generate more efficient and concise code.
/// When the `substructure` has grouped fields, there are two cases:
-/// Zero fields: call the base case function with None (like the usual base case of `cs_fold`).
+/// Zero fields: call the base case function with `None` (like the usual base case of `cs_fold`).
/// One or more fields: call the base case function on the first value (which depends on
/// `use_fold`), and use that as the base case. Then perform `cs_fold` on the remainder of the
/// fields.
}
}
-/// Return true if the type has no value fields
+/// Returns `true` if the type has no value fields
/// (for an enum, no variant has any fields)
pub fn is_type_without_fields(item: &Annotatable) -> bool {
if let Annotatable::Item(ref item) = *item {
use fmt_macros as parse;
-use crate::errors::DiagnosticBuilder;
-use crate::errors::Applicability;
+use errors::DiagnosticBuilder;
+use errors::Applicability;
use syntax::ast;
use syntax::ext::base::{self, *};
self.ecx.expr_str(sp, s)
}
- /// Build a static `rt::Argument` from a `parse::Piece` or append
+ /// Builds a static `rt::Argument` from a `parse::Piece` or append
/// to the `literal` string.
fn build_piece(&mut self,
piece: &parse::Piece<'_>,
}
/// Actually builds the expression which the format_args! block will be
- /// expanded to
+ /// expanded to.
fn into_expr(self) -> P<ast::Expr> {
let mut locals = Vec::with_capacity(
(0..self.args.len()).map(|i| self.arg_unique_types[i].len()).sum()
},
};
- /// Find the indices of all characters that have been processed and differ between the actual
+ /// Finds the indices of all characters that have been processed and differ between the actual
/// written code (code snippet) and the `InternedString` that get's processed in the `Parser`
/// in order to properly synthethise the intra-string `Span`s for error diagnostics.
fn find_skips(snippet: &str, is_raw: bool) -> Vec<usize> {
);
}
- /// Check that the translations are what we expect.
+ /// Checks that the translations are what we expect.
#[test]
fn test_translation() {
assert_eq_pnsat!("%c", Some("{}"));
/// LLVM's `module asm "some assembly here"`. All of LLVM's caveats
/// therefore apply.
-use crate::errors::DiagnosticBuilder;
+use errors::DiagnosticBuilder;
use syntax::ast;
use syntax::source_map::respan;
#![feature(proc_macro_span)]
#![feature(decl_macro)]
#![feature(nll)]
-#![feature(str_escape)]
#![feature(rustc_diagnostic_macros)]
#![recursion_limit="256"]
extern crate proc_macro;
-use rustc_errors as errors;
-
mod diagnostics;
mod asm;
use std::mem;
use crate::deriving;
-use crate::errors;
use syntax::ast::{self, Ident};
use syntax::attr;
-use crate::errors::FatalError;
use crate::proc_macro_server;
+use errors::FatalError;
use syntax::source_map::Span;
use syntax::ext::base::{self, *};
use syntax::tokenstream::TokenStream;
-use crate::errors::{self, Diagnostic, DiagnosticBuilder};
+use errors::{Diagnostic, DiagnosticBuilder};
use std::panic;
use unicode_width::UnicodeWidthChar;
use super::*;
-/// Find all newlines, multi-byte characters, and non-narrow characters in a
+/// Finds all newlines, multi-byte characters, and non-narrow characters in a
/// SourceFile.
///
/// This function will use an SSE2 enhanced implementation if hardware support
}
}
- /// Check 16 byte chunks of text at a time. If the chunk contains
+ /// Checks 16 byte chunks of text at a time. If the chunk contains
/// something other than printable ASCII characters and newlines, the
/// function falls back to the generic implementation. Otherwise it uses
/// SSE2 intrinsics to quickly find all newlines.
dollar_crate_name: Symbol,
}
-/// A mark is a unique id associated with a macro expansion.
+/// A mark is a unique ID associated with a macro expansion.
#[derive(Copy, Clone, PartialEq, Eq, Hash, Debug, RustcEncodable, RustcDecodable)]
pub struct Mark(u32);
QuestionMark,
TryBlock,
/// Desugaring of an `impl Trait` in return type position
- /// to an `existential type Foo: Trait;` + replacing the
+ /// to an `existential type Foo: Trait;` and replacing the
/// `impl Trait` with `Foo`.
ExistentialReturnType,
Async,
#[derive(Debug, Eq, PartialEq, Clone, Ord, PartialOrd, Hash, RustcDecodable, RustcEncodable)]
pub enum FileName {
Real(PathBuf),
- /// A macro. This includes the full name of the macro, so that there are no clashes.
+ /// A macro. This includes the full name of the macro, so that there are no clashes.
Macros(String),
/// Call to `quote!`.
QuoteExpansion(u64),
if self.is_dummy() { other } else { self }
}
- /// Return `true` if `self` fully encloses `other`.
+ /// Returns `true` if `self` fully encloses `other`.
pub fn contains(self, other: Span) -> bool {
let span = self.data();
let other = other.data();
span.lo <= other.lo && other.hi <= span.hi
}
- /// Return `true` if `self` touches `other`.
+ /// Returns `true` if `self` touches `other`.
pub fn overlaps(self, other: Span) -> bool {
let span = self.data();
let other = other.data();
span.lo < other.hi && other.lo < span.hi
}
- /// Return true if the spans are equal with regards to the source text.
+ /// Returns `true` if the spans are equal with regards to the source text.
///
/// Use this instead of `==` when either span could be generated code,
/// and you only care that they point to the same bytes of source text.
}
}
- /// Return the source span -- this is either the supplied span, or the span for
+ /// Returns the source span -- this is either the supplied span, or the span for
/// the macro callsite that expanded to it.
pub fn source_callsite(self) -> Span {
self.ctxt().outer().expn_info().map(|info| info.call_site.source_callsite()).unwrap_or(self)
self.edition() >= edition::Edition::Edition2018
}
- /// Return the source callee.
+ /// Returns the source callee.
///
/// Returns `None` if the supplied span has no expansion trace,
/// else returns the `ExpnInfo` for the macro definition
self.ctxt().outer().expn_info().map(source_callee)
}
- /// Check if a span is "internal" to a macro in which `#[unstable]`
+ /// Checks if a span is "internal" to a macro in which `#[unstable]`
/// items can be used (that is, a macro marked with
/// `#[allow_internal_unstable]`).
pub fn allows_unstable(&self, feature: &str) -> bool {
}
}
- /// Check if this span arises from a compiler desugaring of kind `kind`.
+ /// Checks if this span arises from a compiler desugaring of kind `kind`.
pub fn is_compiler_desugaring(&self, kind: CompilerDesugaringKind) -> bool {
match self.ctxt().outer().expn_info() {
Some(info) => match info.format {
}
}
- /// Return the compiler desugaring that created this span, or `None`
+ /// Returns the compiler desugaring that created this span, or `None`
/// if this span is not from a desugaring.
pub fn compiler_desugaring_kind(&self) -> Option<CompilerDesugaringKind> {
match self.ctxt().outer().expn_info() {
}
}
- /// Check if a span is "internal" to a macro in which `unsafe`
+ /// Checks if a span is "internal" to a macro in which `unsafe`
/// can be used without triggering the `unsafe_code` lint
// (that is, a macro marked with `#[allow_internal_unsafe]`).
pub fn allows_unsafe(&self) -> bool {
result
}
- /// Return a `Span` that would enclose both `self` and `end`.
+ /// Returns a `Span` that would enclose both `self` and `end`.
pub fn to(self, end: Span) -> Span {
let span_data = self.data();
let end_data = end.data();
)
}
- /// Return a `Span` between the end of `self` to the beginning of `end`.
+ /// Returns a `Span` between the end of `self` to the beginning of `end`.
pub fn between(self, end: Span) -> Span {
let span = self.data();
let end = end.data();
)
}
- /// Return a `Span` between the beginning of `self` to the beginning of `end`.
+ /// Returns a `Span` between the beginning of `self` to the beginning of `end`.
pub fn until(self, end: Span) -> Span {
let span = self.data();
let end = end.data();
&self.primary_spans
}
- /// Returns whether any of the primary spans is displayable.
+ /// Returns `true` if any of the primary spans are displayable.
pub fn has_primary_spans(&self) -> bool {
self.primary_spans.iter().any(|sp| !sp.is_dummy())
}
}
/// Replaces all occurrences of one Span with another. Used to move `Span`s in areas that don't
- /// display well (like std macros). Returns true if replacements occurred.
+ /// display well (like std macros). Returns whether replacements occurred.
pub fn replace(&mut self, before: Span, after: Span) -> bool {
let mut replacements_occurred = false;
for primary_span in &mut self.primary_spans {
span_labels
}
- /// Returns whether any of the span labels is displayable.
+ /// Returns `true` if any of the span labels is displayable.
pub fn has_span_labels(&self) -> bool {
self.span_labels.iter().any(|(sp, _)| !sp.is_dummy())
}
/// originate from files has names between angle brackets by convention
/// (e.g., `<anon>`).
pub name: FileName,
- /// True if the `name` field above has been modified by `--remap-path-prefix`.
+ /// `true` if the `name` field above has been modified by `--remap-path-prefix`.
pub name_was_remapped: bool,
/// The unmapped path of the file that the source came from.
/// Set to `None` if the `SourceFile` was imported from an external crate.
}
}
- /// Return the `BytePos` of the beginning of the current line.
+ /// Returns the `BytePos` of the beginning of the current line.
pub fn line_begin_pos(&self, pos: BytePos) -> BytePos {
let line_index = self.lookup_line(pos).unwrap();
self.lines[line_index]
}
}
- /// Get a line from the list of pre-computed line-beginnings.
+ /// Gets a line from the list of pre-computed line-beginnings.
/// The line number here is 0-based.
pub fn get_line(&self, line_number: usize) -> Option<Cow<'_, str>> {
fn get_until_newline(src: &str, begin: usize) -> &str {
self.lines.len()
}
- /// Find the line containing the given position. The return value is the
+ /// Finds the line containing the given position. The return value is the
/// index into the `lines` array of this `SourceFile`, not the 1-based line
/// number. If the source_file is empty or the position is located before the
/// first line, `None` is returned.
}
}
-/// Remove utf-8 BOM if any.
+/// Removes UTF-8 BOM, if any.
fn remove_bom(src: &mut String) {
if src.starts_with("\u{feff}") {
src.drain(..3);
Ident::with_empty_ctxt(Symbol::intern(string))
}
- /// Replace `lo` and `hi` with those from `span`, but keep hygiene context.
+ /// Replaces `lo` and `hi` with those from `span`, but keep hygiene context.
pub fn with_span_pos(self, span: Span) -> Ident {
Ident::new(self.name, span.with_ctxt(self.span.ctxt()))
}
}
}
-/// A symbol is an interned or gensymed string. The use of newtype_index! means
-/// that Option<Symbol> only takes up 4 bytes, because newtype_index! reserves
+/// A symbol is an interned or gensymed string. The use of `newtype_index!` means
+/// that `Option<Symbol>` only takes up 4 bytes, because `newtype_index! reserves
/// the last 256 values for tagging purposes.
///
-/// Note that Symbol cannot be a newtype_index! directly because it implements
-/// fmt::Debug, Encodable, and Decodable in special ways.
+/// Note that `Symbol` cannot directly be a `newtype_index!` because it implements
+/// `fmt::Debug`, `Encodable`, and `Decodable` in special ways.
#[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Symbol(SymbolIndex);
with_interner(|interner| interner.interned(self))
}
- /// Gensyms a new usize, using the current interner.
+ /// Gensyms a new `usize`, using the current interner.
pub fn gensym(string: &str) -> Self {
with_interner(|interner| interner.gensym(string))
}
pub type StderrTerminal = dyn Terminal<Output = Stderr> + Send;
#[cfg(not(windows))]
-/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
+/// Returns a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<StdoutTerminal>> {
TerminfoTerminal::new(io::stdout()).map(|t| Box::new(t) as Box<StdoutTerminal>)
}
#[cfg(windows)]
-/// Return a Terminal wrapping stdout, or None if a terminal couldn't be
+/// Returns a Terminal wrapping stdout, or None if a terminal couldn't be
/// opened.
pub fn stdout() -> Option<Box<StdoutTerminal>> {
TerminfoTerminal::new(io::stdout())
}
#[cfg(not(windows))]
-/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
+/// Returns a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<StderrTerminal>> {
TerminfoTerminal::new(io::stderr()).map(|t| Box::new(t) as Box<StderrTerminal>)
}
#[cfg(windows)]
-/// Return a Terminal wrapping stderr, or None if a terminal couldn't be
+/// Returns a Terminal wrapping stderr, or None if a terminal couldn't be
/// opened.
pub fn stderr() -> Option<Box<StderrTerminal>> {
TerminfoTerminal::new(io::stderr())
/// if there was an I/O error.
fn bg(&mut self, color: color::Color) -> io::Result<bool>;
- /// Sets the given terminal attribute, if supported. Returns `Ok(true)`
+ /// Sets the given terminal attribute, if supported. Returns `Ok(true)`
/// if the attribute was supported, `Ok(false)` otherwise, and `Err(e)` if
/// there was an I/O error.
fn attr(&mut self, attr: Attr) -> io::Result<bool>;
- /// Returns whether the given terminal attribute is supported.
+ /// Returns `true` if the given terminal attribute is supported.
fn supports_attr(&self, attr: Attr) -> bool;
/// Resets all terminal attributes and colors to their defaults.
}
impl TermInfo {
- /// Create a TermInfo based on current environment.
+ /// Creates a TermInfo based on current environment.
pub fn from_env() -> Result<TermInfo, Error> {
let term = match env::var("TERM") {
Ok(name) => TermInfo::from_name(&name),
}
}
- /// Create a TermInfo for the named terminal.
+ /// Creates a TermInfo for the named terminal.
pub fn from_name(name: &str) -> Result<TermInfo, Error> {
get_dbpath_for_term(name)
.ok_or_else(|| {
}
impl<T: Write + Send> TerminfoTerminal<T> {
- /// Create a new TerminfoTerminal with the given TermInfo and Write.
+ /// Creates a new TerminfoTerminal with the given TermInfo and Write.
pub fn new_with_terminfo(out: T, terminfo: TermInfo) -> TerminfoTerminal<T> {
let nc = if terminfo.strings.contains_key("setaf") &&
terminfo.strings.contains_key("setab") {
}
}
- /// Create a new TerminfoTerminal for the current environment with the given Write.
+ /// Creates a new TerminfoTerminal for the current environment with the given Write.
///
/// Returns `None` when the terminfo cannot be found or parsed.
pub fn new(out: T) -> Option<TerminfoTerminal<T>> {
}
impl Variables {
- /// Return a new zero-initialized Variables
+ /// Returns a new zero-initialized Variables
pub fn new() -> Variables {
Variables {
sta_va: [
})
}
-/// Create a dummy TermInfo struct for msys terminals
+/// Creates a dummy TermInfo struct for msys terminals
pub fn msys_terminfo() -> TermInfo {
let mut strings = HashMap::new();
strings.insert("sgr0".to_string(), b"\x1B[0m".to_vec());
-//! ncurses-compatible database discovery
+//! ncurses-compatible database discovery.
//!
//! Does not support hashed database, only filesystem!
use crate::color;
use crate::Terminal;
-/// A Terminal implementation which uses the Win32 Console API.
+/// A Terminal implementation that uses the Win32 Console API.
pub struct WinConsole<T> {
buf: T,
def_foreground: color::Color,
}
}
- /// Returns `None` whenever the terminal cannot be created for some
- /// reason.
+ /// Returns `None` whenever the terminal cannot be created for some reason.
pub fn new(out: T) -> io::Result<WinConsole<T>> {
let fg;
let bg;
//! benchmarks themselves) should be done via the `#[test]` and
//! `#[bench]` attributes.
//!
-//! See the [Testing Chapter](../book/first-edition/testing.html) of the book for more details.
+//! See the [Testing Chapter](../book/ch11-00-testing.html) of the book for more details.
// Currently, not much of this is meant for users. It is intended to
// support the simplest interface possible for representing and
}
/// Invoked when unit tests terminate. Should panic if the unit
-/// test is considered a failure. By default, invokes `report()`
+/// Tests is considered a failure. By default, invokes `report()`
/// and checks for a `0` result.
pub fn assert_test_result<T: Termination>(result: T) {
let code = result.report();
// processing. We'll call this once per module optimized through ThinLTO, and
// it'll be called concurrently on many threads.
extern "C" LLVMModuleRef
-LLVMRustParseBitcodeForThinLTO(LLVMContextRef Context,
- const char *data,
- size_t len,
- const char *identifier) {
+LLVMRustParseBitcodeForLTO(LLVMContextRef Context,
+ const char *data,
+ size_t len,
+ const char *identifier) {
StringRef Data(data, len);
MemoryBufferRef Buffer(Data, identifier);
unwrap(Context)->enableDebugTypeODRUniquing();
// no-prefer-dynamic
// only-msvc
-// compile-flags: -Z cross-lang-lto
+// compile-flags: -C linker-plugin-lto
#![crate_type = "rlib"]
// no-prefer-dynamic
// ignore-tidy-linelength
-// compile-flags: -C no-prepopulate-passes -C panic=abort -Z cross-lang-lto -Cpasses=name-anon-globals
+// compile-flags: -C no-prepopulate-passes -C panic=abort -C linker-plugin-lto -Cpasses=name-anon-globals
#![crate_type = "staticlib"]
use std::iter::Iterator;
use std::future::Future;
-use std::task::{Poll, LocalWaker};
+use std::task::{Poll, Waker};
use std::pin::Pin;
use std::unimplemented;
impl Future for MyFuture {
type Output = u32;
- fn poll(self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<u32> {
+ fn poll(self: Pin<&mut Self>, waker: &Waker) -> Poll<u32> {
Poll::Pending
}
}
}
}
-/// A fn that has the changed type in its signature; must currently be
+/// A function that has the changed type in its signature; must currently be
/// rebuilt.
///
/// You could imagine that, in the future, if the change were
}
}
-/// Call a fn that has the changed type in its signature; this
+/// Call a function that has the changed type in its signature; this
/// currently must also be rebuilt.
///
/// You could imagine that, in the future, if the change were
}
}
-/// A fn that uses the changed type, but only in its body, not its
+/// A function that uses the changed type, but only in its body, not its
/// signature.
///
/// You could imagine that, in the future, if the change were
}
}
-/// A fn X that calls a fn Y, where Y uses the changed type in its
+/// A function `X` that calls a function `Y`, where `Y` uses the changed type in its
/// body. In this case, the effects of the change should be contained
-/// to Y; X should not have to be rebuilt, nor should it need to be
-/// typechecked again.
+/// to `Y`; `X` should not have to be rebuilt, nor should it need to be
+/// type-checked again.
pub mod call_fn_with_type_in_body {
use fn_with_type_in_body;
}
}
-/// A fn item that makes an instance of `Point` but does not invoke methods
+/// A function item that makes an instance of `Point` but does not invoke methods.
pub mod fn_make_struct {
use point::Point;
}
}
-/// A fn item that reads fields from `Point` but does not invoke methods
+/// A function item that reads fields from `Point` but does not invoke methods.
pub mod fn_read_field {
use point::Point;
}
}
-/// A fn item that writes to a field of `Point` but does not invoke methods
+/// A function item that writes to a field of `Point` but does not invoke methods.
pub mod fn_write_field {
use point::Point;
--- /dev/null
+// revisions: rpass cfail
+
+trait Tr {
+ type Arr;
+
+ const C: usize = 0;
+}
+
+impl Tr for str {
+ #[cfg(rpass)]
+ type Arr = [u8; 8];
+ #[cfg(cfail)]
+ type Arr = [u8; Self::C];
+ //[cfail]~^ ERROR cycle detected when const-evaluating
+}
+
+fn main() {}
--- /dev/null
+// no-prefer-dynamic
+// revisions:rpass1 rpass2
+// compile-flags: -C lto
+
+mod x {
+ pub struct X {
+ x: u32, y: u32,
+ }
+
+ #[cfg(rpass1)]
+ fn make() -> X {
+ X { x: 22, y: 0 }
+ }
+
+ #[cfg(rpass2)]
+ fn make() -> X {
+ X { x: 11, y: 11 }
+ }
+
+ pub fn new() -> X {
+ make()
+ }
+
+ pub fn sum(x: &X) -> u32 {
+ x.x + x.y
+ }
+}
+
+mod y {
+ use x;
+
+ pub fn assert_sum() -> bool {
+ let x = x::new();
+ x::sum(&x) == 22
+ }
+}
+
+pub fn main() {
+ y::assert_sum();
+}
--- /dev/null
+// Test that we don't ICE when trying to dump MIR for unusual item types and
+// that we don't create filenames containing `<` and `>`
+
+struct A;
+
+impl A {
+ const ASSOCIATED_CONSTANT: i32 = 2;
+}
+
+enum E {
+ V = 5,
+}
+
+fn main() {
+ let v = Vec::<i32>::new();
+}
+
+// END RUST SOURCE
+
+// START rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir
+// bb0: {
+// _0 = const 2i32;
+// return;
+// }
+// bb1: {
+// resume;
+// }
+// END rustc.{{impl}}-ASSOCIATED_CONSTANT.mir_map.0.mir
+
+// START rustc.E-V-{{constant}}.mir_map.0.mir
+// bb0: {
+// _0 = const 5isize;
+// return;
+// }
+// bb1: {
+// resume;
+// }
+// END rustc.E-V-{{constant}}.mir_map.0.mir
+
+// START rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir
+// bb0: {
+// goto -> bb7;
+// }
+// bb1: {
+// return;
+// }
+// bb2: {
+// resume;
+// }
+// bb3: {
+// goto -> bb1;
+// }
+// bb4: {
+// goto -> bb2;
+// }
+// bb5: {
+// drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> bb4;
+// }
+// bb6: {
+// drop(((*_1).0: alloc::raw_vec::RawVec<i32>)) -> [return: bb3, unwind: bb4];
+// }
+// bb7: {
+// _2 = &mut (*_1);
+// _3 = const std::ops::Drop::drop(move _2) -> [return: bb6, unwind: bb5];
+// }
+// END rustc.ptr-real_drop_in_place.std__vec__Vec_i32_.AddMovesForPackedDrops.before.mir
all: cpp-executable rust-executable
cpp-executable:
- $(RUSTC) -Zcross-lang-lto=on -o $(TMPDIR)/librustlib-xlto.a -Copt-level=2 -Ccodegen-units=1 ./rustlib.rs
+ $(RUSTC) -Clinker-plugin-lto=on -o $(TMPDIR)/librustlib-xlto.a -Copt-level=2 -Ccodegen-units=1 ./rustlib.rs
$(CLANG) -flto=thin -fuse-ld=lld -L $(TMPDIR) -lrustlib-xlto -o $(TMPDIR)/cmain ./cmain.c -O3
# Make sure we don't find a call instruction to the function we expect to
# always be inlined.
rust-executable:
$(CLANG) ./clib.c -flto=thin -c -o $(TMPDIR)/clib.o -O2
(cd $(TMPDIR); $(AR) crus ./libxyz.a ./clib.o)
- $(RUSTC) -Zcross-lang-lto=on -L$(TMPDIR) -Copt-level=2 -Clinker=$(CLANG) -Clink-arg=-fuse-ld=lld ./main.rs -o $(TMPDIR)/rsmain
+ $(RUSTC) -Clinker-plugin-lto=on -L$(TMPDIR) -Copt-level=2 -Clinker=$(CLANG) -Clink-arg=-fuse-ld=lld ./main.rs -o $(TMPDIR)/rsmain
llvm-objdump -d $(TMPDIR)/rsmain | $(CGREP) -e "call.*c_never_inlined"
llvm-objdump -d $(TMPDIR)/rsmain | $(CGREP) -v -e "call.*c_always_inlined"
ifndef IS_WINDOWS
# This test makes sure that we don't loose upstream object files when compiling
-# staticlibs with -Zcross-lang-lto
+# staticlibs with -C linker-plugin-lto
all: staticlib.rs upstream.rs
- $(RUSTC) upstream.rs -Z cross-lang-lto -Ccodegen-units=1
+ $(RUSTC) upstream.rs -C linker-plugin-lto -Ccodegen-units=1
# Check No LTO
- $(RUSTC) staticlib.rs -Z cross-lang-lto -Ccodegen-units=1 -L. -o $(TMPDIR)/staticlib.a
+ $(RUSTC) staticlib.rs -C linker-plugin-lto -Ccodegen-units=1 -L. -o $(TMPDIR)/staticlib.a
(cd $(TMPDIR); $(LD_LIB_PATH_ENVVAR)=$(REAL_LD_LIBRARY_PATH) llvm-ar x ./staticlib.a)
# Make sure the upstream object file was included
ls $(TMPDIR)/upstream.*.rcgu.o
rm $(TMPDIR)/*
# Check ThinLTO
- $(RUSTC) upstream.rs -Z cross-lang-lto -Ccodegen-units=1 -Clto=thin
- $(RUSTC) staticlib.rs -Z cross-lang-lto -Ccodegen-units=1 -Clto=thin -L. -o $(TMPDIR)/staticlib.a
+ $(RUSTC) upstream.rs -C linker-plugin-lto -Ccodegen-units=1 -Clto=thin
+ $(RUSTC) staticlib.rs -C linker-plugin-lto -Ccodegen-units=1 -Clto=thin -L. -o $(TMPDIR)/staticlib.a
(cd $(TMPDIR); $(LD_LIB_PATH_ENVVAR)=$(REAL_LD_LIBRARY_PATH) llvm-ar x ./staticlib.a)
ls $(TMPDIR)/upstream.*.rcgu.o
# This test makes sure that the object files we generate are actually
# LLVM bitcode files (as used by linker LTO plugins) when compiling with
-# -Z cross-lang-lto.
+# -Clinker-plugin-lto.
# this only succeeds for bitcode files
ASSERT_IS_BITCODE_OBJ=($(LD_LIB_PATH_ENVVAR)=$(REAL_LD_LIBRARY_PATH) llvm-bcanalyzer $(1))
EXTRACT_OBJS=(cd $(TMPDIR); rm -f ./*.o; $(LD_LIB_PATH_ENVVAR)=$(REAL_LD_LIBRARY_PATH) llvm-ar x $(1))
-BUILD_LIB=$(RUSTC) lib.rs -Copt-level=2 -Z cross-lang-lto=on -Ccodegen-units=1
-BUILD_EXE=$(RUSTC) main.rs -Copt-level=2 -Z cross-lang-lto=on -Ccodegen-units=1 --emit=obj
+BUILD_LIB=$(RUSTC) lib.rs -Copt-level=2 -Clinker-plugin-lto -Ccodegen-units=1
+BUILD_EXE=$(RUSTC) main.rs -Copt-level=2 -Clinker-plugin-lto -Ccodegen-units=1 --emit=obj
all: staticlib staticlib-fat-lto staticlib-thin-lto rlib exe cdylib rdylib
--- /dev/null
+-include ../../run-make-fulldeps/tools.mk
+
+ifeq ($(TARGET),wasm32-unknown-unknown)
+all:
+ $(RUSTC) foo.rs -C lto -O --target wasm32-unknown-unknown
+ wc -c < $(TMPDIR)/foo.wasm
+ [ "`wc -c < $(TMPDIR)/foo.wasm`" -lt "20500" ]
+else
+all:
+endif
--- /dev/null
+#![crate_type = "cdylib"]
+
+extern "C" {
+ fn observe(ptr: *const u8, len: usize);
+}
+
+macro_rules! s {
+ ( $( $f:ident -> $t:ty );* $(;)* ) => {
+ $(
+ extern "C" {
+ fn $f() -> $t;
+ }
+ let s = $f().to_string();
+ observe(s.as_ptr(), s.len());
+ )*
+ };
+}
+
+#[no_mangle]
+pub unsafe extern "C" fn foo() {
+ s! {
+ get_u8 -> u8;
+ get_i8 -> i8;
+ get_u16 -> u16;
+ get_i16 -> i16;
+ get_u32 -> u32;
+ get_i32 -> i32;
+ get_u64 -> u64;
+ get_i64 -> i64;
+ get_usize -> usize;
+ get_isize -> isize;
+ }
+}
expr(ExprKind::Path(None, path))
}
-/// Iterate over exprs of depth up to `depth`. The goal is to explore all "interesting"
-/// combinations of expression nesting. For example, we explore combinations using `if`, but not
+/// Iterate over exprs of depth up to `depth`. The goal is to explore all "interesting"
+/// combinations of expression nesting. For example, we explore combinations using `if`, but not
/// `while` or `match`, since those should print and parse in much the same way as `if`.
fn iter_exprs(depth: usize, f: &mut FnMut(P<Expr>)) {
if depth == 0 {
// edition:2018
+// aux-build:arc_wake.rs
#![feature(arbitrary_self_types, async_await, await_macro, futures_api)]
+extern crate arc_wake;
+
use std::pin::Pin;
use std::future::Future;
use std::sync::{
atomic::{self, AtomicUsize},
};
use std::task::{
- LocalWaker, Poll, Wake,
- local_waker_from_nonlocal,
+ Poll, Waker,
};
+use arc_wake::ArcWake;
struct Counter {
wakes: AtomicUsize,
}
-impl Wake for Counter {
- fn wake(this: &Arc<Self>) {
- this.wakes.fetch_add(1, atomic::Ordering::SeqCst);
+impl ArcWake for Counter {
+ fn wake(arc_self: &Arc<Self>) {
+ arc_self.wakes.fetch_add(1, atomic::Ordering::SeqCst);
}
}
impl Future for WakeOnceThenComplete {
type Output = ();
- fn poll(mut self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<()> {
+ fn poll(mut self: Pin<&mut Self>, waker: &Waker) -> Poll<()> {
if self.0 {
Poll::Ready(())
} else {
- lw.wake();
+ waker.wake();
self.0 = true;
Poll::Pending
}
{
let mut fut = Box::pin(f(9));
let counter = Arc::new(Counter { wakes: AtomicUsize::new(0) });
- let waker = local_waker_from_nonlocal(counter.clone());
+ let waker = ArcWake::into_waker(counter.clone());
assert_eq!(0, counter.wakes.load(atomic::Ordering::SeqCst));
assert_eq!(Poll::Pending, fut.as_mut().poll(&waker));
assert_eq!(1, counter.wakes.load(atomic::Ordering::SeqCst));
--- /dev/null
+// edition:2018
+
+#![feature(arbitrary_self_types, futures_api)]
+
+use std::sync::Arc;
+use std::task::{
+ Poll, Waker, RawWaker, RawWakerVTable,
+};
+
+macro_rules! waker_vtable {
+ ($ty:ident) => {
+ &RawWakerVTable {
+ clone: clone_arc_raw::<$ty>,
+ drop: drop_arc_raw::<$ty>,
+ wake: wake_arc_raw::<$ty>,
+ }
+ };
+}
+
+pub trait ArcWake {
+ fn wake(arc_self: &Arc<Self>);
+
+ fn into_waker(wake: Arc<Self>) -> Waker where Self: Sized
+ {
+ let ptr = Arc::into_raw(wake) as *const();
+
+ unsafe {
+ Waker::new_unchecked(RawWaker::new(ptr, waker_vtable!(Self)))
+ }
+ }
+}
+
+unsafe fn increase_refcount<T: ArcWake>(data: *const()) {
+ // Retain Arc by creating a copy
+ let arc: Arc<T> = Arc::from_raw(data as *const T);
+ let arc_clone = arc.clone();
+ // Forget the Arcs again, so that the refcount isn't decrased
+ let _ = Arc::into_raw(arc);
+ let _ = Arc::into_raw(arc_clone);
+}
+
+unsafe fn clone_arc_raw<T: ArcWake>(data: *const()) -> RawWaker {
+ increase_refcount::<T>(data);
+ RawWaker::new(data, waker_vtable!(T))
+}
+
+unsafe fn drop_arc_raw<T: ArcWake>(data: *const()) {
+ // Drop Arc
+ let _: Arc<T> = Arc::from_raw(data as *const T);
+}
+
+unsafe fn wake_arc_raw<T: ArcWake>(data: *const()) {
+ let arc: Arc<T> = Arc::from_raw(data as *const T);
+ ArcWake::wake(&arc);
+ let _ = Arc::into_raw(arc);
+}
-//! This is a client of the `a` crate defined in "svn-a-base.rs". The
-//! rpass and cfail tests (such as "run-pass/svh-add-comment.rs") use
+//! This is a client of the `a` crate defined in `svn-a-base.rs`. The
+//! rpass and cfail tests (such as `run-pass/svh-add-comment.rs`) use
//! it by swapping in a different object code library crate built from
-//! some variant of "svn-a-base.rs", and then we are checking if the
+//! some variant of `svn-a-base.rs`, and then we are checking if the
//! compiler properly ignores or accepts the change, based on whether
//! the change could affect the downstream crate content or not
//! (#14132).
+// aux-build:arc_wake.rs
+
#![feature(arbitrary_self_types, futures_api)]
#![allow(unused)]
+extern crate arc_wake;
+
use std::future::Future;
use std::pin::Pin;
-use std::rc::Rc;
use std::sync::{
Arc,
atomic::{self, AtomicUsize},
};
use std::task::{
- Poll, Wake, Waker, LocalWaker,
- local_waker, local_waker_from_nonlocal,
+ Poll, Waker,
};
+use arc_wake::ArcWake;
struct Counter {
- local_wakes: AtomicUsize,
- nonlocal_wakes: AtomicUsize,
+ wakes: AtomicUsize,
}
-impl Wake for Counter {
- fn wake(this: &Arc<Self>) {
- this.nonlocal_wakes.fetch_add(1, atomic::Ordering::SeqCst);
- }
-
- unsafe fn wake_local(this: &Arc<Self>) {
- this.local_wakes.fetch_add(1, atomic::Ordering::SeqCst);
+impl ArcWake for Counter {
+ fn wake(arc_self: &Arc<Self>) {
+ arc_self.wakes.fetch_add(1, atomic::Ordering::SeqCst);
}
}
impl Future for MyFuture {
type Output = ();
- fn poll(self: Pin<&mut Self>, lw: &LocalWaker) -> Poll<Self::Output> {
- // Wake once locally
- lw.wake();
- // Wake twice non-locally
- let waker = lw.clone().into_waker();
+ fn poll(self: Pin<&mut Self>, waker: &Waker) -> Poll<Self::Output> {
+ // Wake twice
waker.wake();
waker.wake();
Poll::Ready(())
}
}
-fn test_local_waker() {
+fn test_waker() {
let counter = Arc::new(Counter {
- local_wakes: AtomicUsize::new(0),
- nonlocal_wakes: AtomicUsize::new(0),
+ wakes: AtomicUsize::new(0),
});
- let waker = unsafe { local_waker(counter.clone()) };
- assert_eq!(Poll::Ready(()), Pin::new(&mut MyFuture).poll(&waker));
- assert_eq!(1, counter.local_wakes.load(atomic::Ordering::SeqCst));
- assert_eq!(2, counter.nonlocal_wakes.load(atomic::Ordering::SeqCst));
-}
+ let waker = ArcWake::into_waker(counter.clone());
+ assert_eq!(2, Arc::strong_count(&counter));
-fn test_local_as_nonlocal_waker() {
- let counter = Arc::new(Counter {
- local_wakes: AtomicUsize::new(0),
- nonlocal_wakes: AtomicUsize::new(0),
- });
- let waker: LocalWaker = local_waker_from_nonlocal(counter.clone());
assert_eq!(Poll::Ready(()), Pin::new(&mut MyFuture).poll(&waker));
- assert_eq!(0, counter.local_wakes.load(atomic::Ordering::SeqCst));
- assert_eq!(3, counter.nonlocal_wakes.load(atomic::Ordering::SeqCst));
+ assert_eq!(2, counter.wakes.load(atomic::Ordering::SeqCst));
+
+ drop(waker);
+ assert_eq!(1, Arc::strong_count(&counter));
}
fn main() {
- test_local_waker();
- test_local_as_nonlocal_waker();
+ test_waker();
}
/*
# Comparison of static arrays
-The expected behaviour would be that test==test1, therefore 'true'
+The expected behaviour would be that `test == test1`, therefore 'true'
would be printed, however the below prints false.
*/
}
-// FIXME #623 - these aren't supported yet
+// FIXME(#623): - these aren't supported yet
/*mod test_literals {
#![str = "s"]
#![char = 'c']
/*!
* On x86_64-linux-gnu and possibly other platforms, structs get 8-byte "preferred" alignment,
* but their "ABI" alignment (i.e., what actually matters for data layout) is the largest alignment
- * of any field. (Also, u64 has 8-byte ABI alignment; this is not always true).
+ * of any field. (Also, `u64` has 8-byte ABI alignment; this is not always true).
*
* On such platforms, if monomorphize uses the "preferred" alignment, then it will unify
* `A` and `B`, even though `S<A>` and `S<B>` have the field `t` at different offsets,
assert_eq!(black_box(tmp), Wrapping($ans));
}
- // FIXME(30524): Uncomment this test
+ // FIXME(30524): uncomment this test
/*
{
let mut tmp = Wrapping($initial);
// pretty-expanded FIXME #23616
#![allow(warnings)]
-#![feature(iter_empty)]
-#![feature(iter_once)]
-#![feature(str_escape)]
use std::iter::{empty, once, repeat};
+++ /dev/null
-// exact-check
-
-const QUERY = 'waker_from';
-
-const EXPECTED = {
- 'others': [
- { 'path': 'std::task', 'name': 'local_waker_from_nonlocal' },
- { 'path': 'alloc::task', 'name': 'local_waker_from_nonlocal' },
- ],
-};
//! This crate exports a macro `enum_from_primitive!` that wraps an
//! `enum` declaration and automatically adds an implementation of
//! `num::FromPrimitive` (reexported here), to allow conversion from
-//! primitive integers to the enum. It therefore provides an
+//! primitive integers to the enum. It therefore provides an
//! alternative to the built-in `#[derive(FromPrimitive)]`, which
//! requires the unstable `std::num::FromPrimitive` and is disabled in
//! Rust 1.0.
#![feature(deprecated)]
+// @has deprecated_future/index.html '//*[@class="stab deprecated"]' \
+// 'Deprecated'
// @has deprecated_future/struct.S.html '//*[@class="stab deprecated"]' \
-// 'Deprecating in 99.99.99: effectively never'
+// 'Deprecated since 99.99.99: effectively never'
#[deprecated(since = "99.99.99", note = "effectively never")]
pub struct S;
// 'Deprecated$'
#[deprecated]
pub struct W;
+
+// @matches deprecated/struct.X.html '//*[@class="stab deprecated"]' \
+// 'Deprecated: shorthand reason$'
+#[deprecated = "shorthand reason"]
+pub struct X;
-/// Test | Table
+/// Tests | Table
/// ------|-------------
/// t = b | id = \|x\| x
pub struct Foo; // @has issue_27862/struct.Foo.html //td 'id = |x| x'
--- /dev/null
+#![feature(staged_api)]
+
+#![stable(feature = "rustc_deprecated-future-test", since = "1.0.0")]
+
+// @has rustc_deprecated_future/index.html '//*[@class="stab deprecated"]' \
+// 'Deprecation planned'
+// @has rustc_deprecated_future/struct.S.html '//*[@class="stab deprecated"]' \
+// 'Deprecating in 99.99.99: effectively never'
+#[rustc_deprecated(since = "99.99.99", reason = "effectively never")]
+#[stable(feature = "rustc_deprecated-future-test", since = "1.0.0")]
+pub struct S;
pub trait ToOwned {
type Owned;
- /// Create owned data from borrowed data, usually by copying.
+ /// Creates owned data from borrowed data, usually by copying.
fn to_owned(&self) -> Self::Owned;
}
--> $DIR/bad-lint-cap2.rs:6:5
|
LL | use std::option; //~ ERROR
- | ----^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^
|
note: lint level defined here
--> $DIR/bad-lint-cap2.rs:4:9
--> $DIR/bad-lint-cap3.rs:7:5
|
LL | use std::option; //~ WARN
- | ----^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^
|
note: lint level defined here
--> $DIR/bad-lint-cap3.rs:4:9
// ignore-tidy-linelength
+// run-pass
+
#![deny(deprecated_in_future)]
#[deprecated(since = "99.99.99", note = "text")]
pub fn deprecated_future() {}
fn test() {
- deprecated_future(); //~ ERROR use of item 'deprecated_future' that will be deprecated in future version 99.99.99: text
+ deprecated_future(); // ok; deprecated_in_future only applies to rustc_deprecated
}
fn main() {}
-error: use of item 'deprecated_future' that will be deprecated in future version 99.99.99: text
- --> $DIR/deprecation-in-future.rs:9:5
+warning: use of deprecated item 'deprecated_future': text
+ --> $DIR/deprecation-in-future.rs:11:5
|
-LL | deprecated_future(); //~ ERROR use of item 'deprecated_future' that will be deprecated in future version 99.99.99: text
+LL | deprecated_future(); // ok; deprecated_in_future only applies to rustc_deprecated
| ^^^^^^^^^^^^^^^^^
|
-note: lint level defined here
- --> $DIR/deprecation-in-future.rs:3:9
- |
-LL | #![deny(deprecated_in_future)]
- | ^^^^^^^^^^^^^^^^^^^^
-
-error: aborting due to previous error
+ = note: #[warn(deprecated)] on by default
<Foo>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
<Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
- deprecated_future(); // Fine; no error.
- deprecated_future_text(); // Fine; no error.
+ // Future deprecations are only permitted for rustc_deprecated.
+ deprecated_future(); //~ ERROR use of deprecated item
+ deprecated_future_text(); //~ ERROR use of deprecated item
let _ = DeprecatedStruct {
//~^ ERROR use of deprecated item 'this_crate::DeprecatedStruct': text
LL | <Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+error: use of deprecated item 'this_crate::deprecated_future': text
+ --> $DIR/deprecation-lint.rs:265:9
+ |
+LL | deprecated_future(); //~ ERROR use of deprecated item
+ | ^^^^^^^^^^^^^^^^^
+
+error: use of deprecated item 'this_crate::deprecated_future_text': text
+ --> $DIR/deprecation-lint.rs:266:9
+ |
+LL | deprecated_future_text(); //~ ERROR use of deprecated item
+ | ^^^^^^^^^^^^^^^^^^^^^^
+
error: use of deprecated item 'this_crate::DeprecatedStruct': text
- --> $DIR/deprecation-lint.rs:267:17
+ --> $DIR/deprecation-lint.rs:268:17
|
LL | let _ = DeprecatedStruct {
| ^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::DeprecatedUnitStruct': text
- --> $DIR/deprecation-lint.rs:272:17
+ --> $DIR/deprecation-lint.rs:273:17
|
LL | let _ = DeprecatedUnitStruct; //~ ERROR use of deprecated item 'this_crate::DeprecatedUnitStruct': text
| ^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Enum::DeprecatedVariant': text
- --> $DIR/deprecation-lint.rs:274:17
+ --> $DIR/deprecation-lint.rs:275:17
|
LL | let _ = Enum::DeprecatedVariant; //~ ERROR use of deprecated item 'this_crate::Enum::DeprecatedVariant': text
| ^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::DeprecatedTupleStruct': text
- --> $DIR/deprecation-lint.rs:276:17
+ --> $DIR/deprecation-lint.rs:277:17
|
LL | let _ = DeprecatedTupleStruct (1); //~ ERROR use of deprecated item 'this_crate::DeprecatedTupleStruct': text
| ^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::nested::DeprecatedStruct': text
- --> $DIR/deprecation-lint.rs:278:17
+ --> $DIR/deprecation-lint.rs:279:17
|
LL | let _ = nested::DeprecatedStruct {
| ^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::nested::DeprecatedUnitStruct': text
- --> $DIR/deprecation-lint.rs:283:17
+ --> $DIR/deprecation-lint.rs:284:17
|
LL | let _ = nested::DeprecatedUnitStruct; //~ ERROR use of deprecated item 'this_crate::nested::DeprecatedUnitStruct': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::nested::Enum::DeprecatedVariant': text
- --> $DIR/deprecation-lint.rs:285:17
+ --> $DIR/deprecation-lint.rs:286:17
|
LL | let _ = nested::Enum::DeprecatedVariant; //~ ERROR use of deprecated item 'this_crate::nested::Enum::DeprecatedVariant': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::nested::DeprecatedTupleStruct': text
- --> $DIR/deprecation-lint.rs:287:17
+ --> $DIR/deprecation-lint.rs:288:17
|
LL | let _ = nested::DeprecatedTupleStruct (1); //~ ERROR use of deprecated item 'this_crate::nested::DeprecatedTupleStruct': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated': text
- --> $DIR/deprecation-lint.rs:292:9
+ --> $DIR/deprecation-lint.rs:293:9
|
LL | Trait::trait_deprecated(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated'
| ^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated': text
- --> $DIR/deprecation-lint.rs:294:9
+ --> $DIR/deprecation-lint.rs:295:9
|
LL | <Foo as Trait>::trait_deprecated(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated'
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
- --> $DIR/deprecation-lint.rs:296:9
+ --> $DIR/deprecation-lint.rs:297:9
|
LL | Trait::trait_deprecated_text(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
- --> $DIR/deprecation-lint.rs:298:9
+ --> $DIR/deprecation-lint.rs:299:9
|
LL | <Foo as Trait>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::test_fn_closure_body::{{closure}}::bar'
- --> $DIR/deprecation-lint.rs:316:13
+ --> $DIR/deprecation-lint.rs:317:13
|
LL | bar(); //~ ERROR use of deprecated item 'this_crate::test_fn_closure_body::{{closure}}::bar'
| ^^^
error: use of deprecated item 'this_crate::DeprecatedTrait': text
- --> $DIR/deprecation-lint.rs:335:10
+ --> $DIR/deprecation-lint.rs:336:10
|
LL | impl DeprecatedTrait for S { } //~ ERROR use of deprecated item 'this_crate::DeprecatedTrait': text
| ^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::DeprecatedTrait': text
- --> $DIR/deprecation-lint.rs:337:24
+ --> $DIR/deprecation-lint.rs:338:24
|
LL | trait LocalTrait : DeprecatedTrait { } //~ ERROR use of deprecated item 'this_crate::DeprecatedTrait': text
| ^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated': text
- --> $DIR/deprecation-lint.rs:389:17
+ --> $DIR/deprecation-lint.rs:390:17
|
LL | let x = Deprecated {
| ^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated': text
- --> $DIR/deprecation-lint.rs:398:13
+ --> $DIR/deprecation-lint.rs:399:13
|
LL | let Deprecated {
| ^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated': text
- --> $DIR/deprecation-lint.rs:404:13
+ --> $DIR/deprecation-lint.rs:405:13
|
LL | let Deprecated
| ^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated2': text
- --> $DIR/deprecation-lint.rs:409:17
+ --> $DIR/deprecation-lint.rs:410:17
|
LL | let x = Deprecated2(1, 2, 3);
| ^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated2': text
- --> $DIR/deprecation-lint.rs:419:13
+ --> $DIR/deprecation-lint.rs:420:13
|
LL | let Deprecated2
| ^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated2': text
- --> $DIR/deprecation-lint.rs:428:13
+ --> $DIR/deprecation-lint.rs:429:13
|
LL | let Deprecated2
| ^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::DeprecatedStruct::i': text
- --> $DIR/deprecation-lint.rs:269:13
+ --> $DIR/deprecation-lint.rs:270:13
|
LL | i: 0 //~ ERROR use of deprecated item 'this_crate::DeprecatedStruct::i': text
| ^^^^
error: use of deprecated item 'this_crate::nested::DeprecatedStruct::i': text
- --> $DIR/deprecation-lint.rs:280:13
+ --> $DIR/deprecation-lint.rs:281:13
|
LL | i: 0 //~ ERROR use of deprecated item 'this_crate::nested::DeprecatedStruct::i': text
| ^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated': text
- --> $DIR/deprecation-lint.rs:291:13
+ --> $DIR/deprecation-lint.rs:292:13
|
LL | foo.trait_deprecated(); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated'
| ^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated': text
- --> $DIR/deprecation-lint.rs:293:9
+ --> $DIR/deprecation-lint.rs:294:9
|
LL | <Foo>::trait_deprecated(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated'
| ^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
- --> $DIR/deprecation-lint.rs:295:13
+ --> $DIR/deprecation-lint.rs:296:13
|
LL | foo.trait_deprecated_text(); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
| ^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
- --> $DIR/deprecation-lint.rs:297:9
+ --> $DIR/deprecation-lint.rs:298:9
|
LL | <Foo>::trait_deprecated_text(&foo); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated': text
- --> $DIR/deprecation-lint.rs:302:13
+ --> $DIR/deprecation-lint.rs:303:13
|
LL | foo.trait_deprecated(); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated'
| ^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
- --> $DIR/deprecation-lint.rs:303:13
+ --> $DIR/deprecation-lint.rs:304:13
|
LL | foo.trait_deprecated_text(); //~ ERROR use of deprecated item 'this_crate::Trait::trait_deprecated_text': text
| ^^^^^^^^^^^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Stable::override2': text
- --> $DIR/deprecation-lint.rs:362:13
+ --> $DIR/deprecation-lint.rs:363:13
|
LL | override2: 3,
| ^^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Stable::override2': text
- --> $DIR/deprecation-lint.rs:366:17
+ --> $DIR/deprecation-lint.rs:367:17
|
LL | let _ = x.override2;
| ^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Stable::override2': text
- --> $DIR/deprecation-lint.rs:370:13
+ --> $DIR/deprecation-lint.rs:371:13
|
LL | override2: _
| ^^^^^^^^^^^^
error: use of deprecated item 'this_crate2::Stable2::2': text
- --> $DIR/deprecation-lint.rs:378:17
+ --> $DIR/deprecation-lint.rs:379:17
|
LL | let _ = x.2;
| ^^^
error: use of deprecated item 'this_crate2::Stable2::2': text
- --> $DIR/deprecation-lint.rs:383:20
+ --> $DIR/deprecation-lint.rs:384:20
|
LL | _)
| ^
error: use of deprecated item 'this_crate2::Deprecated::inherit': text
- --> $DIR/deprecation-lint.rs:391:13
+ --> $DIR/deprecation-lint.rs:392:13
|
LL | inherit: 1,
| ^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated::inherit': text
- --> $DIR/deprecation-lint.rs:395:17
+ --> $DIR/deprecation-lint.rs:396:17
|
LL | let _ = x.inherit;
| ^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated::inherit': text
- --> $DIR/deprecation-lint.rs:400:13
+ --> $DIR/deprecation-lint.rs:401:13
|
LL | inherit: _,
| ^^^^^^^^^^
error: use of deprecated item 'this_crate2::Deprecated2::0': text
- --> $DIR/deprecation-lint.rs:412:17
+ --> $DIR/deprecation-lint.rs:413:17
|
LL | let _ = x.0;
| ^^^
error: use of deprecated item 'this_crate2::Deprecated2::1': text
- --> $DIR/deprecation-lint.rs:414:17
+ --> $DIR/deprecation-lint.rs:415:17
|
LL | let _ = x.1;
| ^^^
error: use of deprecated item 'this_crate2::Deprecated2::2': text
- --> $DIR/deprecation-lint.rs:416:17
+ --> $DIR/deprecation-lint.rs:417:17
|
LL | let _ = x.2;
| ^^^
error: use of deprecated item 'this_crate2::Deprecated2::0': text
- --> $DIR/deprecation-lint.rs:421:14
+ --> $DIR/deprecation-lint.rs:422:14
|
LL | (_,
| ^
error: use of deprecated item 'this_crate2::Deprecated2::1': text
- --> $DIR/deprecation-lint.rs:423:14
+ --> $DIR/deprecation-lint.rs:424:14
|
LL | _,
| ^
error: use of deprecated item 'this_crate2::Deprecated2::2': text
- --> $DIR/deprecation-lint.rs:425:14
+ --> $DIR/deprecation-lint.rs:426:14
|
LL | _)
| ^
-error: aborting due to 120 previous errors
+error: aborting due to 122 previous errors
#[deprecated(since(b), note = "a")] //~ ERROR incorrect meta item
fn f6() { }
+
+ #[deprecated(note = b"test")] //~ ERROR literal in `deprecated` value must be a string
+ fn f7() { }
+
+ #[deprecated("test")] //~ ERROR item in `deprecated` must be a key/value pair
+ fn f8() { }
}
#[deprecated(since = "a", note = "b")]
LL | #[deprecated(since(b), note = "a")] //~ ERROR incorrect meta item
| ^^^^^^^^
+error[E0565]: literal in `deprecated` value must be a string
+ --> $DIR/deprecation-sanity.rs:19:25
+ |
+LL | #[deprecated(note = b"test")] //~ ERROR literal in `deprecated` value must be a string
+ | ^^^^^^^ help: consider removing the prefix: `"test"`
+
+error[E0565]: item in `deprecated` must be a key/value pair
+ --> $DIR/deprecation-sanity.rs:22:18
+ |
+LL | #[deprecated("test")] //~ ERROR item in `deprecated` must be a key/value pair
+ | ^^^^^^
+
error[E0550]: multiple deprecated attributes
- --> $DIR/deprecation-sanity.rs:22:1
+ --> $DIR/deprecation-sanity.rs:28:1
|
LL | fn multiple1() { } //~ ERROR multiple deprecated attributes
| ^^^^^^^^^^^^^^^^^^
error[E0538]: multiple 'since' items
- --> $DIR/deprecation-sanity.rs:24:27
+ --> $DIR/deprecation-sanity.rs:30:27
|
LL | #[deprecated(since = "a", since = "b", note = "c")] //~ ERROR multiple 'since' items
| ^^^^^^^^^^^
-error: aborting due to 7 previous errors
+error: aborting due to 9 previous errors
-Some errors occurred: E0538, E0541, E0550, E0551.
+Some errors occurred: E0538, E0541, E0550, E0551, E0565.
For more information about an error, try `rustc --explain E0538`.
--- /dev/null
+#[deprecated = b"test"] //~ ERROR attribute must be of the form
+fn foo() {}
+
+fn main() {}
--- /dev/null
+error: attribute must be of the form `#[deprecated]` or `#[deprecated(/*opt*/ since = "version", /*opt*/ note = "reason)]` or `#[deprecated = "reason"]`
+ --> $DIR/invalid-literal.rs:1:1
+ |
+LL | #[deprecated = b"test"] //~ ERROR attribute must be of the form
+ | ^^^^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
--- /dev/null
+// ignore-tidy-linelength
+
+#![deny(deprecated_in_future)]
+
+#![feature(staged_api)]
+
+#![stable(feature = "rustc_deprecation-in-future-test", since = "1.0.0")]
+
+#[rustc_deprecated(since = "99.99.99", reason = "effectively never")]
+#[stable(feature = "rustc_deprecation-in-future-test", since = "1.0.0")]
+pub struct S;
+
+fn main() {
+ let _ = S; //~ ERROR use of item 'S' that will be deprecated in future version 99.99.99: effectively never
+}
--- /dev/null
+error: use of item 'S' that will be deprecated in future version 99.99.99: effectively never
+ --> $DIR/rustc_deprecation-in-future.rs:14:13
+ |
+LL | let _ = S; //~ ERROR use of item 'S' that will be deprecated in future version 99.99.99: effectively never
+ | ^
+ |
+note: lint level defined here
+ --> $DIR/rustc_deprecation-in-future.rs:3:9
+ |
+LL | #![deny(deprecated_in_future)]
+ | ^^^^^^^^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
LL | const CR: &'static mut i32 = &mut C; //~ ERROR E0017
| ^^^^^^ constants require immutable values
-error: cannot mutate statics in the initializer of another static
+error[E0017]: references in statics may only refer to immutable values
--> $DIR/E0017.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^
+ | ^^^^^^ statics require immutable values
-error[E0017]: references in statics may only refer to immutable values
+error: cannot mutate statics in the initializer of another static
--> $DIR/E0017.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^ statics require immutable values
+ | ^^^^^^
error[E0596]: cannot borrow immutable static item `X` as mutable
--> $DIR/E0017.rs:5:39
LL | const CR: &'static mut i32 = &mut C; //~ ERROR E0017
| ^^^^^^ constants require immutable values
-error: cannot mutate statics in the initializer of another static
+error[E0017]: references in statics may only refer to immutable values
--> $DIR/E0017.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^
+ | ^^^^^^ statics require immutable values
-error[E0017]: references in statics may only refer to immutable values
+error: cannot mutate statics in the initializer of another static
--> $DIR/E0017.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^ statics require immutable values
+ | ^^^^^^
error[E0596]: cannot borrow immutable static item as mutable
--> $DIR/E0017.rs:5:44
LL | const CR: &'static mut i32 = &mut C; //~ ERROR E0017
| ^^^^^^ constants require immutable values
-error: cannot mutate statics in the initializer of another static
+error[E0017]: references in statics may only refer to immutable values
--> $DIR/E0388.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^
+ | ^^^^^^ statics require immutable values
-error[E0017]: references in statics may only refer to immutable values
+error: cannot mutate statics in the initializer of another static
--> $DIR/E0388.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^ statics require immutable values
+ | ^^^^^^
error[E0596]: cannot borrow immutable static item `X` as mutable
--> $DIR/E0388.rs:5:39
LL | const CR: &'static mut i32 = &mut C; //~ ERROR E0017
| ^^^^^^ constants require immutable values
-error: cannot mutate statics in the initializer of another static
+error[E0017]: references in statics may only refer to immutable values
--> $DIR/E0388.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^
+ | ^^^^^^ statics require immutable values
-error[E0017]: references in statics may only refer to immutable values
+error: cannot mutate statics in the initializer of another static
--> $DIR/E0388.rs:5:39
|
LL | static STATIC_REF: &'static mut i32 = &mut X; //~ ERROR E0017
- | ^^^^^^ statics require immutable values
+ | ^^^^^^
error[E0596]: cannot borrow immutable static item as mutable
--> $DIR/E0388.rs:5:44
fn main() {
- if let Some(b) = None { //~ ERROR: `if let` arms have incompatible types
- //~^ expected (), found integer
- //~| expected type `()`
- //~| found type `{integer}`
+ if let Some(b) = None {
+ //~^ NOTE if let` arms have incompatible types
()
} else {
1
};
+ //~^^ ERROR: `if let` arms have incompatible types
+ //~| NOTE expected (), found integer
+ //~| NOTE expected type `()`
}
error[E0308]: `if let` arms have incompatible types
- --> $DIR/if-let-arm-types.rs:2:5
+ --> $DIR/if-let-arm-types.rs:6:9
|
-LL | / if let Some(b) = None { //~ ERROR: `if let` arms have incompatible types
-LL | | //~^ expected (), found integer
-LL | | //~| expected type `()`
-LL | | //~| found type `{integer}`
-... |
+LL | / if let Some(b) = None {
+LL | | //~^ NOTE if let` arms have incompatible types
+LL | | ()
+LL | | } else {
LL | | 1
+ | | ^ expected (), found integer
LL | | };
- | |_____^ expected (), found integer
+ | |_____- `if let` arms have incompatible types
|
= note: expected type `()`
found type `{integer}`
-note: `if let` arm with an incompatible type
- --> $DIR/if-let-arm-types.rs:7:12
- |
-LL | } else {
- | ____________^
-LL | | 1
-LL | | };
- | |_____^
error: aborting due to previous error
--> $DIR/unused.rs:7:24
|
LL | pub(super) use super::f; //~ ERROR unused
- | ---------------^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^
|
note: lint level defined here
--> $DIR/unused.rs:1:9
--- /dev/null
+mod foo {
+ pub struct B(());
+}
+
+mod bar {
+ use foo::B;
+
+ fn foo() {
+ B(()); //~ ERROR expected function, found struct `B` [E0423]
+ }
+}
+
+mod baz {
+ fn foo() {
+ B(()); //~ ERROR cannot find function `B` in this scope [E0425]
+ }
+}
+
+fn main() {}
--- /dev/null
+error[E0423]: expected function, found struct `B`
+ --> $DIR/issue-42944.rs:9:9
+ |
+LL | B(()); //~ ERROR expected function, found struct `B` [E0423]
+ | ^ constructor is not visible here due to private fields
+
+error[E0425]: cannot find function `B` in this scope
+ --> $DIR/issue-42944.rs:15:9
+ |
+LL | B(()); //~ ERROR cannot find function `B` in this scope [E0425]
+ | ^ not found in this scope
+help: possible candidate is found in another module, you can import it into scope
+ |
+LL | use foo::B;
+ |
+
+error: aborting due to 2 previous errors
+
+Some errors occurred: E0423, E0425.
+For more information about an error, try `rustc --explain E0423`.
fn main() {
match Some(10) {
- //~^ ERROR match arms have incompatible types
- //~| expected type `bool`
- //~| found type `()`
- //~| expected bool, found ()
+ //~^ NOTE `match` arms have incompatible types
Some(5) => false,
+ //~^ NOTE this is found to be of type `bool`
Some(2) => true,
+ //~^ NOTE this is found to be of type `bool`
None => (),
+ //~^ ERROR match arms have incompatible types
+ //~| NOTE expected bool, found ()
+ //~| NOTE expected type `bool`
_ => true
}
}
error[E0308]: match arms have incompatible types
- --> $DIR/issue-11319.rs:2:5
+ --> $DIR/issue-11319.rs:8:20
|
LL | / match Some(10) {
-LL | | //~^ ERROR match arms have incompatible types
-LL | | //~| expected type `bool`
-LL | | //~| found type `()`
-... |
+LL | | //~^ NOTE `match` arms have incompatible types
+LL | | Some(5) => false,
+ | | ----- this is found to be of type `bool`
+LL | | //~^ NOTE this is found to be of type `bool`
+LL | | Some(2) => true,
+ | | ---- this is found to be of type `bool`
+LL | | //~^ NOTE this is found to be of type `bool`
LL | | None => (),
- | | -- match arm with an incompatible type
+ | | ^^ expected bool, found ()
+... |
LL | | _ => true
LL | | }
- | |_____^ expected bool, found ()
+ | |_____- `match` arms have incompatible types
|
= note: expected type `bool`
found type `()`
LL | const C1: &'static mut [usize] = &mut [];
| ^^^^^^^ constants require immutable values
-error[E0013]: constants cannot refer to statics, use a constant instead
+error[E0017]: references in constants may only refer to immutable values
--> $DIR/issue-17718-const-bad-values.rs:5:41
|
LL | const C2: &'static mut usize = unsafe { &mut S };
- | ^^^^^^
+ | ^^^^^^ constants require immutable values
-error[E0017]: references in constants may only refer to immutable values
+error[E0013]: constants cannot refer to statics, use a constant instead
--> $DIR/issue-17718-const-bad-values.rs:5:41
|
LL | const C2: &'static mut usize = unsafe { &mut S };
- | ^^^^^^ constants require immutable values
+ | ^^^^^^
error: aborting due to 3 previous errors
}
fn str_to_direction(to_parse: &str) -> RoomDirection {
- match to_parse { //~ ERROR match arms have incompatible types
+ match to_parse {
"w" | "west" => RoomDirection::West,
"e" | "east" => RoomDirection::East,
"n" | "north" => RoomDirection::North,
"down" => RoomDirection::Down,
_ => None
}
+ //~^^ ERROR match arms have incompatible types
}
fn main() {
| ^^^^^^^^^ ...but data from `room` is returned here
error[E0308]: match arms have incompatible types
- --> $DIR/issue-17728.rs:100:5
+ --> $DIR/issue-17728.rs:109:14
|
-LL | / match to_parse { //~ ERROR match arms have incompatible types
+LL | / match to_parse {
LL | | "w" | "west" => RoomDirection::West,
LL | | "e" | "east" => RoomDirection::East,
LL | | "n" | "north" => RoomDirection::North,
... |
+LL | | "down" => RoomDirection::Down,
+ | | ------------------- this and all prior arms are found to be of type `RoomDirection`
LL | | _ => None
- | | ---- match arm with an incompatible type
+ | | ^^^^ expected enum `RoomDirection`, found enum `std::option::Option`
LL | | }
- | |_____^ expected enum `RoomDirection`, found enum `std::option::Option`
+ | |_____- `match` arms have incompatible types
|
= note: expected type `RoomDirection`
found type `std::option::Option<_>`
/// A strategy for acquiring more subpaths to walk.
pub trait Strategy {
type P: PathExtensions;
- /// Get additional subpaths from a given path.
+ /// Gets additional subpaths from a given path.
fn get_more(&self, item: &Self::P) -> io::Result<Vec<Self::P>>;
/// Determine whether a path should be walked further.
/// This is run against each item from `get_more()`.
}
impl<S: Strategy> Subpaths<S> {
- /// Create a directory walker with a root path and strategy.
+ /// Creates a directory walker with a root path and strategy.
pub fn new(p: &S::P, strategy: S) -> io::Result<Subpaths<S>> {
let stack = strategy.get_more(p)?;
Ok(Subpaths { stack: stack, strategy: strategy })
}
impl<S: Default + Strategy> Subpaths<S> {
- /// Create a directory walker with a root path and a default strategy.
+ /// Creates a directory walker with a root path and a default strategy.
pub fn walk(p: &S::P) -> io::Result<Subpaths<S>> {
Subpaths::new(p, Default::default())
}
fn closure_from_match() {
let x = match 1usize {
- //~^ ERROR match arms have incompatible types
1 => |c| c + 1,
2 => |c| c - 1,
_ => |c| c - 1
};
+ //~^^^ ERROR match arms have incompatible types
}
fn main() { }
= help: consider boxing your closure and/or using it as a trait object
error[E0308]: match arms have incompatible types
- --> $DIR/issue-24036.rs:8:13
+ --> $DIR/issue-24036.rs:10:14
|
LL | let x = match 1usize {
- | _____________^
-LL | | //~^ ERROR match arms have incompatible types
+ | _____________-
LL | | 1 => |c| c + 1,
+ | | --------- this is found to be of type `[closure@$DIR/issue-24036.rs:9:14: 9:23]`
LL | | 2 => |c| c - 1,
- | | --------- match arm with an incompatible type
+ | | ^^^^^^^^^ expected closure, found a different closure
LL | | _ => |c| c - 1
LL | | };
- | |_____^ expected closure, found a different closure
+ | |_____- `match` arms have incompatible types
|
- = note: expected type `[closure@$DIR/issue-24036.rs:10:14: 10:23]`
- found type `[closure@$DIR/issue-24036.rs:11:14: 11:23]`
+ = note: expected type `[closure@$DIR/issue-24036.rs:9:14: 9:23]`
+ found type `[closure@$DIR/issue-24036.rs:10:14: 10:23]`
= note: no two closures, even if identical, have the same type
= help: consider boxing your closure and/or using it as a trait object
--> $DIR/issue-30730.rs:3:5
|
LL | use std::thread;
- | ----^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^
|
note: lint level defined here
--> $DIR/issue-30730.rs:2:9
struct S {
x: u8,
- /// The id of the parent core
+ /// The ID of the parent core
y: u8,
}
//~^^^ ERROR found a documentation comment that doesn't document anything
+
fn main() {}
struct S {
x: u8
- /// The id of the parent core
+ /// The ID of the parent core
y: u8,
}
//~^^^ ERROR found a documentation comment that doesn't document anything
+
fn main() {}
|
LL | x: u8
| - help: missing comma here: `,`
-LL | /// The id of the parent core
+LL | /// The ID of the parent core
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= help: doc comments must come before what they document, maybe a comment was intended with `//`?
}
}
-/// often times crashes, if not prints invalid strings
+/// Often crashes, if not prints invalid strings.
pub fn panics() {
let mut acc = Counter{map: HashMap::new()};
for line in vec!["123456789".to_string(), "12345678".to_string()] {
--> $DIR/lint-directives-on-use-items-issue-10534.rs:12:9
|
LL | use a::x; //~ ERROR: unused import
- | ----^^^^- help: remove the whole `use` item
+ | ^^^^
|
note: lint level defined here
--> $DIR/lint-directives-on-use-items-issue-10534.rs:1:9
--> $DIR/lint-directives-on-use-items-issue-10534.rs:21:9
|
LL | use a::y; //~ ERROR: unused import
- | ----^^^^- help: remove the whole `use` item
+ | ^^^^
|
note: lint level defined here
--> $DIR/lint-directives-on-use-items-issue-10534.rs:20:12
fn CamelCase() {} //~ WARN should have a snake
- struct snake_case; //~ WARN should have a camel
+ struct snake_case; //~ WARN should have an upper camel
}
}
-warning: type `snake_case` should have a camel case name
+warning: type `snake_case` should have an upper camel case name
--> $DIR/lint-group-nonstandard-style.rs:22:16
|
-LL | struct snake_case; //~ WARN should have a camel
- | ^^^^^^^^^^ help: convert the identifier to camel case: `SnakeCase`
+LL | struct snake_case; //~ WARN should have an upper camel
+ | ^^^^^^^^^^ help: convert the identifier to upper camel case: `SnakeCase`
|
note: lint level defined here
--> $DIR/lint-group-nonstandard-style.rs:18:17
#![allow(dead_code)]
struct ONE_TWO_THREE;
-//~^ ERROR type `ONE_TWO_THREE` should have a camel case name
+//~^ ERROR type `ONE_TWO_THREE` should have an upper camel case name
-struct foo { //~ ERROR type `foo` should have a camel case name
+struct foo { //~ ERROR type `foo` should have an upper camel case name
bar: isize,
}
-enum foo2 { //~ ERROR type `foo2` should have a camel case name
+enum foo2 { //~ ERROR type `foo2` should have an upper camel case name
Bar
}
-struct foo3 { //~ ERROR type `foo3` should have a camel case name
+struct foo3 { //~ ERROR type `foo3` should have an upper camel case name
bar: isize
}
-type foo4 = isize; //~ ERROR type `foo4` should have a camel case name
+type foo4 = isize; //~ ERROR type `foo4` should have an upper camel case name
enum Foo5 {
- bar //~ ERROR variant `bar` should have a camel case name
+ bar //~ ERROR variant `bar` should have an upper camel case name
}
-trait foo6 { //~ ERROR trait `foo6` should have a camel case name
+trait foo6 { //~ ERROR trait `foo6` should have an upper camel case name
fn dummy(&self) { }
}
-fn f<ty>(_: ty) {} //~ ERROR type parameter `ty` should have a camel case name
+fn f<ty>(_: ty) {} //~ ERROR type parameter `ty` should have an upper camel case name
#[repr(C)]
struct foo7 {
bar: isize,
}
-struct X86_64;
-
-struct X86__64; //~ ERROR type `X86__64` should have a camel case name
-
-struct Abc_123; //~ ERROR type `Abc_123` should have a camel case name
-
-struct A1_b2_c3; //~ ERROR type `A1_b2_c3` should have a camel case name
-
fn main() { }
-error: type `ONE_TWO_THREE` should have a camel case name
+error: type `ONE_TWO_THREE` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:4:8
|
LL | struct ONE_TWO_THREE;
- | ^^^^^^^^^^^^^ help: convert the identifier to camel case: `OneTwoThree`
+ | ^^^^^^^^^^^^^ help: convert the identifier to upper camel case: `OneTwoThree`
|
note: lint level defined here
--> $DIR/lint-non-camel-case-types.rs:1:11
LL | #![forbid(non_camel_case_types)]
| ^^^^^^^^^^^^^^^^^^^^
-error: type `foo` should have a camel case name
+error: type `foo` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:7:8
|
-LL | struct foo { //~ ERROR type `foo` should have a camel case name
- | ^^^ help: convert the identifier to camel case: `Foo`
+LL | struct foo { //~ ERROR type `foo` should have an upper camel case name
+ | ^^^ help: convert the identifier to upper camel case: `Foo`
-error: type `foo2` should have a camel case name
+error: type `foo2` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:11:6
|
-LL | enum foo2 { //~ ERROR type `foo2` should have a camel case name
- | ^^^^ help: convert the identifier to camel case: `Foo2`
+LL | enum foo2 { //~ ERROR type `foo2` should have an upper camel case name
+ | ^^^^ help: convert the identifier to upper camel case: `Foo2`
-error: type `foo3` should have a camel case name
+error: type `foo3` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:15:8
|
-LL | struct foo3 { //~ ERROR type `foo3` should have a camel case name
- | ^^^^ help: convert the identifier to camel case: `Foo3`
+LL | struct foo3 { //~ ERROR type `foo3` should have an upper camel case name
+ | ^^^^ help: convert the identifier to upper camel case: `Foo3`
-error: type `foo4` should have a camel case name
+error: type `foo4` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:19:6
|
-LL | type foo4 = isize; //~ ERROR type `foo4` should have a camel case name
- | ^^^^ help: convert the identifier to camel case: `Foo4`
+LL | type foo4 = isize; //~ ERROR type `foo4` should have an upper camel case name
+ | ^^^^ help: convert the identifier to upper camel case: `Foo4`
-error: variant `bar` should have a camel case name
+error: variant `bar` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:22:5
|
-LL | bar //~ ERROR variant `bar` should have a camel case name
- | ^^^ help: convert the identifier to camel case: `Bar`
+LL | bar //~ ERROR variant `bar` should have an upper camel case name
+ | ^^^ help: convert the identifier to upper camel case: `Bar`
-error: trait `foo6` should have a camel case name
+error: trait `foo6` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:25:7
|
-LL | trait foo6 { //~ ERROR trait `foo6` should have a camel case name
- | ^^^^ help: convert the identifier to camel case: `Foo6`
+LL | trait foo6 { //~ ERROR trait `foo6` should have an upper camel case name
+ | ^^^^ help: convert the identifier to upper camel case: `Foo6`
-error: type parameter `ty` should have a camel case name
+error: type parameter `ty` should have an upper camel case name
--> $DIR/lint-non-camel-case-types.rs:29:6
|
-LL | fn f<ty>(_: ty) {} //~ ERROR type parameter `ty` should have a camel case name
- | ^^ help: convert the identifier to camel case: `Ty`
+LL | fn f<ty>(_: ty) {} //~ ERROR type parameter `ty` should have an upper camel case name
+ | ^^ help: convert the identifier to upper camel case: `Ty`
-error: type `X86__64` should have a camel case name
- --> $DIR/lint-non-camel-case-types.rs:38:8
- |
-LL | struct X86__64; //~ ERROR type `X86__64` should have a camel case name
- | ^^^^^^^ help: convert the identifier to camel case: `X86_64`
-
-error: type `Abc_123` should have a camel case name
- --> $DIR/lint-non-camel-case-types.rs:40:8
- |
-LL | struct Abc_123; //~ ERROR type `Abc_123` should have a camel case name
- | ^^^^^^^ help: convert the identifier to camel case: `Abc123`
-
-error: type `A1_b2_c3` should have a camel case name
- --> $DIR/lint-non-camel-case-types.rs:42:8
- |
-LL | struct A1_b2_c3; //~ ERROR type `A1_b2_c3` should have a camel case name
- | ^^^^^^^^ help: convert the identifier to camel case: `A1B2C3`
-
-error: aborting due to 11 previous errors
+error: aborting due to 8 previous errors
--> $DIR/lint-unused-imports.rs:8:5
|
LL | use std::fmt::{};
- | ----^^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^^
|
note: lint level defined here
--> $DIR/lint-unused-imports.rs:1:9
--> $DIR/lint-unused-imports.rs:12:27
|
LL | use std::option::Option::{Some, None};
- | --------------------------^^^^--^^^^-- help: remove the whole `use` item
+ | ^^^^ ^^^^
error: unused import: `test::A`
--> $DIR/lint-unused-imports.rs:15:5
|
LL | use test::A; //~ ERROR unused import: `test::A`
- | ----^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^
error: unused import: `bar`
--> $DIR/lint-unused-imports.rs:24:18
|
LL | use test2::{foo, bar}; //~ ERROR unused import: `bar`
- | --^^^
- | |
- | help: remove the unused import
+ | ^^^
error: unused import: `foo::Square`
--> $DIR/lint-unused-imports.rs:52:13
|
LL | use foo::Square; //~ ERROR unused import: `foo::Square`
- | ----^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^
error: unused import: `self::g`
--> $DIR/lint-unused-imports.rs:68:9
|
LL | use self::g; //~ ERROR unused import: `self::g`
- | ----^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^
error: unused import: `test2::foo`
--> $DIR/lint-unused-imports.rs:77:9
|
LL | use test2::foo; //~ ERROR unused import: `test2::foo`
- | ----^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^
error: unused import: `test::B2`
--> $DIR/lint-unused-imports.rs:20:5
--> $DIR/lints-in-foreign-macros.rs:11:16
|
LL | () => {use std::string::ToString;} //~ WARN: unused import
- | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^^^^^^^^^^^
...
LL | mod a { foo!(); }
| ------- in this macro invocation
--> $DIR/lints-in-foreign-macros.rs:16:18
|
LL | mod c { baz!(use std::string::ToString;); } //~ WARN: unused import
- | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^^^^^^^^^^^
warning: unused import: `std::string::ToString`
--> $DIR/lints-in-foreign-macros.rs:17:19
|
LL | mod d { baz2!(use std::string::ToString;); } //~ WARN: unused import
- | ----^^^^^^^^^^^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^^^^^^^^^^^
warning: missing documentation for crate
--> $DIR/lints-in-foreign-macros.rs:4:1
--- /dev/null
+fn main() {
+ let _ = test_func1(1);
+ let _ = test_func2(1);
+}
+
+fn test_func1(n: i32) -> i32 {
+ //~^ NOTE expected `i32` because of return type
+ match n {
+ 12 => 'b',
+ //~^ ERROR mismatched types
+ //~| NOTE expected i32, found char
+ _ => 42,
+ }
+}
+
+fn test_func2(n: i32) -> i32 {
+ let x = match n {
+ //~^ NOTE `match` arms have incompatible types
+ 12 => 'b',
+ //~^ NOTE this is found to be of type `char`
+ _ => 42,
+ //~^ ERROR match arms have incompatible types
+ //~| NOTE expected char, found integer
+ //~| NOTE expected type `char`
+ };
+ x
+}
+
+fn test_func3(n: i32) -> i32 {
+ let x = match n {
+ //~^ NOTE `match` arms have incompatible types
+ 1 => 'b',
+ 2 => 'b',
+ 3 => 'b',
+ 4 => 'b',
+ 5 => 'b',
+ 6 => 'b',
+ //~^ NOTE this and all prior arms are found to be of type `char`
+ _ => 42,
+ //~^ ERROR match arms have incompatible types
+ //~| NOTE expected char, found integer
+ //~| NOTE expected type `char`
+ };
+ x
+}
--- /dev/null
+error[E0308]: mismatched types
+ --> $DIR/match-type-err-first-arm.rs:9:15
+ |
+LL | fn test_func1(n: i32) -> i32 {
+ | --- expected `i32` because of return type
+...
+LL | 12 => 'b',
+ | ^^^ expected i32, found char
+
+error[E0308]: match arms have incompatible types
+ --> $DIR/match-type-err-first-arm.rs:21:14
+ |
+LL | let x = match n {
+ | _____________-
+LL | | //~^ NOTE `match` arms have incompatible types
+LL | | 12 => 'b',
+ | | --- this is found to be of type `char`
+LL | | //~^ NOTE this is found to be of type `char`
+LL | | _ => 42,
+ | | ^^ expected char, found integer
+... |
+LL | | //~| NOTE expected type `char`
+LL | | };
+ | |_____- `match` arms have incompatible types
+ |
+ = note: expected type `char`
+ found type `{integer}`
+
+error[E0308]: match arms have incompatible types
+ --> $DIR/match-type-err-first-arm.rs:39:14
+ |
+LL | let x = match n {
+ | _____________-
+LL | | //~^ NOTE `match` arms have incompatible types
+LL | | 1 => 'b',
+LL | | 2 => 'b',
+... |
+LL | | 6 => 'b',
+ | | --- this and all prior arms are found to be of type `char`
+LL | | //~^ NOTE this and all prior arms are found to be of type `char`
+LL | | _ => 42,
+ | | ^^ expected char, found integer
+... |
+LL | | //~| NOTE expected type `char`
+LL | | };
+ | |_____- `match` arms have incompatible types
+ |
+ = note: expected type `char`
+ found type `{integer}`
+
+error: aborting due to 3 previous errors
+
+For more information about this error, try `rustc --explain E0308`.
#![stable(feature = "stable_test_feature", since = "1.0.0")]
pub fn unmarked() {
- //~^ ERROR This node does not have a stability attribute
+ //~^ ERROR function has missing stability attribute
()
}
pub mod bar {
// #[stable] is not inherited
pub fn unmarked() {}
- //~^ ERROR This node does not have a stability attribute
+ //~^ ERROR function has missing stability attribute
}
-error: This node does not have a stability attribute
+error: function has missing stability attribute
--> $DIR/missing-stability.rs:8:1
|
LL | / pub fn unmarked() {
-LL | | //~^ ERROR This node does not have a stability attribute
+LL | | //~^ ERROR function has missing stability attribute
LL | | ()
LL | | }
| |_^
-error: This node does not have a stability attribute
+error: function has missing stability attribute
--> $DIR/missing-stability.rs:22:5
|
LL | pub fn unmarked() {}
--- /dev/null
+// revisions: migrate nll
+//[migrate]compile-flags: -Z borrowck=migrate
+#![cfg_attr(nll, feature(nll))]
+
+// compile-pass
+
+// Test that we propagate region relations from closures precisely when there is
+// more than one non-local lower bound.
+
+// In this case the closure has signature
+// |x: &'4 mut (&'5 (&'1 str, &'2 str), &'3 str)| -> ..
+// We end up with a `'3: '5` constraint that we can propagate as
+// `'3: '1`, `'3: '2`, but previously we approximated it as `'3: 'static`.
+
+// As an optimization, we primarily propagate bounds for the "representative"
+// of each SCC. As such we have these two similar cases where hopefully one
+// of them will test the case we want (case2, when this test was added).
+mod case1 {
+ fn f(s: &str) {
+ g(s, |x| h(x));
+ }
+
+ fn g<T, F>(_: T, _: F)
+ where F: Fn(&mut (&(T, T), T)) {}
+
+ fn h<T>(_: &mut (&(T, T), T)) {}
+}
+
+mod case2 {
+ fn f(s: &str) {
+ g(s, |x| h(x));
+ }
+
+ fn g<T, F>(_: T, _: F)
+ where F: Fn(&mut (T, &(T, T))) {}
+
+ fn h<T>(_: &mut (T, &(T, T))) {}
+}
+
+fn main() {}
--- /dev/null
+#![allow(dead_code)]
+#![feature(nll)]
+
+struct A<'a>(&'a ());
+
+trait Y {
+ const X: i32;
+}
+
+impl Y for A<'static> {
+ const X: i32 = 10;
+}
+
+fn foo<'a>(x: i32) {
+ match x {
+ // This uses <A<'a> as Y>::X, but `A<'a>` does not implement `Y`.
+ A::<'a>::X..=A::<'static>::X => (), //~ ERROR lifetime may not live long enough
+ _ => (),
+ }
+}
+
+fn bar<'a>(x: i32) {
+ match x {
+ // This uses <A<'a> as Y>::X, but `A<'a>` does not implement `Y`.
+ A::<'static>::X..=A::<'a>::X => (), //~ ERROR lifetime may not live long enough
+ _ => (),
+ }
+}
+
+fn main() {}
--- /dev/null
+error: lifetime may not live long enough
+ --> $DIR/issue-58299.rs:17:9
+ |
+LL | fn foo<'a>(x: i32) {
+ | -- lifetime `'a` defined here
+...
+LL | A::<'a>::X..=A::<'static>::X => (), //~ ERROR lifetime may not live long enough
+ | ^^^^^^^^^^ requires that `'a` must outlive `'static`
+
+error: lifetime may not live long enough
+ --> $DIR/issue-58299.rs:25:27
+ |
+LL | fn bar<'a>(x: i32) {
+ | -- lifetime `'a` defined here
+...
+LL | A::<'static>::X..=A::<'a>::X => (), //~ ERROR lifetime may not live long enough
+ | ^^^^^^^^^^ requires that `'a` must outlive `'static`
+
+error: aborting due to 2 previous errors
+
}
pub trait Trie<H: Hasher, C: NodeCodec<H>> {
- /// Return the root of the trie.
+ /// Returns the root of the trie.
fn root(&self) -> &H::Out;
/// Is the trie empty?
#[rustc_on_unimplemented="a collection of type `{Self}` cannot be built from an iterator over elements of type `{A}`"]
trait MyFromIterator<A> {
- /// Build a container with elements from an external iterator.
+ /// Builds a container with elements from an external iterator.
fn my_from_iter<T: Iterator<Item=A>>(iterator: T) -> Self;
}
#[rustc_on_unimplemented="a collection of type `{Self}` cannot be built from an iterator over elements of type `{A}`"]
trait MyFromIterator<A> {
- /// Build a container with elements from an external iterator.
+ /// Builds a container with elements from an external iterator.
fn my_from_iter<T: Iterator<Item=A>>(iterator: T) -> Self;
}
--> $DIR/privacy-struct-ctor.rs:20:9
|
LL | Z;
- | ^ constructor is not visible here due to private fields
-help: a tuple struct with a similar name exists
- |
-LL | S;
| ^
-help: possible better candidate is found in another module, you can import it into scope
- |
-LL | use m::n::Z;
- |
+ | |
+ | constructor is not visible here due to private fields
+ | help: a tuple struct with a similar name exists: `S`
error[E0423]: expected value, found struct `S`
--> $DIR/privacy-struct-ctor.rs:33:5
|
LL | S;
| ^ constructor is not visible here due to private fields
-help: possible better candidate is found in another module, you can import it into scope
- |
-LL | use m::S;
- |
error[E0423]: expected value, found struct `S2`
--> $DIR/privacy-struct-ctor.rs:38:5
--> $DIR/basic.rs:26:9
|
LL | use m::Tr1 as _; //~ WARN unused import
- | ----^^^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^^^
|
note: lint level defined here
--> $DIR/basic.rs:4:9
--> $DIR/basic.rs:27:9
|
LL | use S as _; //~ WARN unused import
- | ----^^^^^^- help: remove the whole `use` item
+ | ^^^^^^
--> $DIR/unused-2018.rs:6:9
|
LL | use core::any; //~ ERROR unused import: `core::any`
- | ----^^^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^^^
|
note: lint level defined here
--> $DIR/unused-2018.rs:3:9
--> $DIR/unused-2018.rs:10:9
|
LL | use core; //~ ERROR unused import: `core`
- | ----^^^^- help: remove the whole `use` item
+ | ^^^^
error: aborting due to 2 previous errors
LL | #![warn(unused)]
| ^^^^^^
= note: #[warn(unused_imports)] implied by #[warn(unused)]
-help: remove the unused imports
- |
-LL | use std::cmp::{min};
- | -- --
--- /dev/null
+#![feature(staged_api)]
+//~^ ERROR crate has missing stability attribute
+
+fn main() {}
--- /dev/null
+error: crate has missing stability attribute
+ --> $DIR/missing-stability-attr-at-top-level.rs:1:1
+ |
+LL | / #![feature(staged_api)]
+LL | | //~^ ERROR crate has missing stability attribute
+LL | |
+LL | | fn main() {}
+ | |____________^
+
+error: aborting due to previous error
+
#![stable(feature = "test", since = "0")]
#[stable(feature = "test", since = "0")]
-pub struct Reverse<T>(pub T); //~ ERROR This node does not have a stability attribute
+pub struct Reverse<T>(pub T); //~ ERROR field has missing stability attribute
fn main() {
// Make sure the field is used to fill the stability cache
-error: This node does not have a stability attribute
+error: field has missing stability attribute
--> $DIR/stability-attribute-issue-43027.rs:5:23
|
-LL | pub struct Reverse<T>(pub T); //~ ERROR This node does not have a stability attribute
+LL | pub struct Reverse<T>(pub T); //~ ERROR field has missing stability attribute
| ^^^^^
error: aborting due to previous error
#![stable(feature = "stable_test_feature", since = "1.0.0")]
#[macro_export]
-macro_rules! mac { //~ ERROR This node does not have a stability attribute
+macro_rules! mac { //~ ERROR macro has missing stability attribute
() => ()
}
-error: This node does not have a stability attribute
+error: macro has missing stability attribute
--> $DIR/stability-attribute-sanity-3.rs:8:1
|
-LL | / macro_rules! mac { //~ ERROR This node does not have a stability attribute
+LL | / macro_rules! mac { //~ ERROR macro has missing stability attribute
LL | | () => ()
LL | | }
| |_^
--> $DIR/as-ref.rs:6:27
|
LL | opt.map(|arg| takes_ref(arg));
- | - ^^^ expected &Foo, found struct `Foo`
+ | --- ^^^ expected &Foo, found struct `Foo`
| |
- | help: consider using `as_ref` instead: `as_ref().`
+ | help: consider using `as_ref` instead: `as_ref().map`
|
= note: expected type `&Foo`
found type `Foo`
--> $DIR/as-ref.rs:8:37
|
LL | opt.and_then(|arg| Some(takes_ref(arg)));
- | - ^^^ expected &Foo, found struct `Foo`
+ | -------- ^^^ expected &Foo, found struct `Foo`
| |
- | help: consider using `as_ref` instead: `as_ref().`
+ | help: consider using `as_ref` instead: `as_ref().and_then`
|
= note: expected type `&Foo`
found type `Foo`
--> $DIR/as-ref.rs:11:27
|
LL | opt.map(|arg| takes_ref(arg));
- | - ^^^ expected &Foo, found struct `Foo`
+ | --- ^^^ expected &Foo, found struct `Foo`
| |
- | help: consider using `as_ref` instead: `as_ref().`
+ | help: consider using `as_ref` instead: `as_ref().map`
|
= note: expected type `&Foo`
found type `Foo`
--> $DIR/as-ref.rs:13:35
|
LL | opt.and_then(|arg| Ok(takes_ref(arg)));
- | - ^^^ expected &Foo, found struct `Foo`
+ | -------- ^^^ expected &Foo, found struct `Foo`
| |
- | help: consider using `as_ref` instead: `as_ref().`
+ | help: consider using `as_ref` instead: `as_ref().and_then`
|
= note: expected type `&Foo`
found type `Foo`
-//! This is a client of the `a` crate defined in "svn-a-base.rs". The
-//! rpass and cfail tests (such as "run-pass/svh-add-comment.rs") use
+//! This is a client of the `a` crate defined in `svn-a-base.rs`. The
+//! rpass and cfail tests (such as `run-pass/svh-add-comment.rs`) use
//! it by swapping in a different object code library crate built from
-//! some variant of "svn-a-base.rs", and then we are checking if the
+//! some variant of `svn-a-base.rs`, and then we are checking if the
//! compiler properly ignores or accepts the change, based on whether
//! the change could affect the downstream crate content or not
//! (#14132).
--- /dev/null
+#![feature(trait_alias)]
+
+trait Foo {}
+auto trait A = Foo; //~ ERROR trait aliases cannot be `auto`
+unsafe trait B = Foo; //~ ERROR trait aliases cannot be `unsafe`
+
+fn main() {}
--- /dev/null
+error: trait aliases cannot be `auto`
+ --> $DIR/trait-alias-syntax.rs:4:19
+ |
+LL | auto trait A = Foo; //~ ERROR trait aliases cannot be `auto`
+ | ^ trait aliases cannot be `auto`
+
+error: trait aliases cannot be `unsafe`
+ --> $DIR/trait-alias-syntax.rs:5:21
+ |
+LL | unsafe trait B = Foo; //~ ERROR trait aliases cannot be `unsafe`
+ | ^ trait aliases cannot be `unsafe`
+
+error: aborting due to 2 previous errors
+
--> $DIR/use-nested-groups-unused-imports.rs:16:11
|
LL | use foo::{Foo, bar::{baz::{}, foobar::*}, *};
- | ----------^^^--------^^^^^^^--^^^^^^^^^---^-- help: remove the whole `use` item
+ | ^^^ ^^^^^^^ ^^^^^^^^^ ^
|
note: lint level defined here
--> $DIR/use-nested-groups-unused-imports.rs:3:9
--> $DIR/use-nested-groups-unused-imports.rs:18:24
|
LL | use foo::bar::baz::{*, *};
- | --^
- | |
- | help: remove the unused import
+ | ^
error: unused import: `foo::{}`
--> $DIR/use-nested-groups-unused-imports.rs:20:5
|
LL | use foo::{};
- | ----^^^^^^^- help: remove the whole `use` item
+ | ^^^^^^^
error: aborting due to 3 previous errors
-//
-
fn foo<
'β, //~ ERROR non-ascii idents are not fully supported
γ //~ ERROR non-ascii idents are not fully supported
- //~^ WARN type parameter `γ` should have a camel case name
+ //~^ WARN type parameter `γ` should have an upper camel case name
>() {}
struct X {
error[E0658]: non-ascii idents are not fully supported. (see issue #55467)
- --> $DIR/utf8_idents.rs:4:5
+ --> $DIR/utf8_idents.rs:2:5
|
LL | 'β, //~ ERROR non-ascii idents are not fully supported
| ^^
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #55467)
- --> $DIR/utf8_idents.rs:5:5
+ --> $DIR/utf8_idents.rs:3:5
|
LL | γ //~ ERROR non-ascii idents are not fully supported
| ^
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #55467)
- --> $DIR/utf8_idents.rs:10:5
+ --> $DIR/utf8_idents.rs:8:5
|
LL | δ: usize //~ ERROR non-ascii idents are not fully supported
| ^
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
error[E0658]: non-ascii idents are not fully supported. (see issue #55467)
- --> $DIR/utf8_idents.rs:14:9
+ --> $DIR/utf8_idents.rs:12:9
|
LL | let α = 0.00001f64; //~ ERROR non-ascii idents are not fully supported
| ^
|
= help: add #![feature(non_ascii_idents)] to the crate attributes to enable
-warning: type parameter `γ` should have a camel case name
- --> $DIR/utf8_idents.rs:5:5
+warning: type parameter `γ` should have an upper camel case name
+ --> $DIR/utf8_idents.rs:3:5
|
LL | γ //~ ERROR non-ascii idents are not fully supported
- | ^ help: convert the identifier to camel case: `Γ`
+ | ^ help: convert the identifier to upper camel case: `Γ`
|
= note: #[warn(non_camel_case_types)] on by default
#[derive(Clone)]
pub struct Config {
- /// Whether to overwrite stderr/stdout files instead of complaining about changes in output
+ /// `true` to to overwrite stderr/stdout files instead of complaining about changes in output.
pub bless: bool,
- /// The library paths required for running the compiler
+ /// The library paths required for running the compiler.
pub compile_lib_path: PathBuf,
- /// The library paths required for running compiled programs
+ /// The library paths required for running compiled programs.
pub run_lib_path: PathBuf,
- /// The rustc executable
+ /// The rustc executable.
pub rustc_path: PathBuf,
- /// The rustdoc executable
+ /// The rustdoc executable.
pub rustdoc_path: Option<PathBuf>,
- /// The python executable to use for LLDB
+ /// The Python executable to use for LLDB.
pub lldb_python: String,
- /// The python executable to use for htmldocck
+ /// The Python executable to use for htmldocck.
pub docck_python: String,
- /// The llvm FileCheck binary path
+ /// The LLVM `FileCheck` binary path.
pub llvm_filecheck: Option<PathBuf>,
- /// The valgrind path
+ /// The valgrind path.
pub valgrind_path: Option<String>,
/// Whether to fail if we can't run run-pass-valgrind tests under valgrind
}
/// Absolute path to the directory where all output for the given
-/// test/revision should reside. Example:
+/// test/revision should reside. Example:
/// /path/to/build/host-triple/test/ui/relative/testname.revision.mode/
pub fn output_base_dir(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
output_relative_path(config, &testpaths.relative_dir)
}
/// Absolute path to the base filename used as output for the given
-/// test/revision. Example:
+/// test/revision. Example:
/// /path/to/build/host-triple/test/ui/relative/testname.revision.mode/testname
pub fn output_base_name(config: &Config, testpaths: &TestPaths, revision: Option<&str>) -> PathBuf {
output_base_dir(config, testpaths, revision).join(testpaths.file.file_stem().unwrap())
/// Whether to ignore the test.
#[derive(Clone, Copy, PartialEq, Debug)]
pub enum Ignore {
- /// Run it.
+ /// Runs it.
Run,
/// Ignore it totally.
Ignore,
props
}
- /// Load properties from `testfile` into `props`. If a property is
+ /// Loads properties from `testfile` into `props`. If a property is
/// tied to a particular revision `foo` (indicated by writing
/// `//[foo]`), then the property is ignored unless `cfg` is
/// `Some("foo")`.
}))
}
-/// Returns true if the given target is an Android target for the
+/// Returns `true` if the given target is an Android target for the
/// purposes of GDB testing.
fn is_android_gdb_target(target: &String) -> bool {
match &target[..] {
}
}
- /// Returns true if we should report an error about `actual_error`,
+ /// Returns `true` if we should report an error about `actual_error`,
/// which did not match any of the expected error. We always require
/// errors/warnings to be explicitly listed, but only require
/// helps/notes if there are explicit helps/notes given.
fs::write(&outfile, out).unwrap();
}
- /// Create a filename for output with the given extension. Example:
- /// /.../testname.revision.mode/testname.extension
+ /// Creates a filename for output with the given extension.
+ /// E.g., `/.../testname.revision.mode/testname.extension`.
fn make_out_name(&self, extension: &str) -> PathBuf {
self.output_base_name().with_extension(extension)
}
- /// Directory where auxiliary files are written. Example:
- /// /.../testname.revision.mode/auxiliary/
+ /// Gets the directory where auxiliary files are written.
+ /// E.g., `/.../testname.revision.mode/auxiliary/`.
fn aux_output_dir_name(&self) -> PathBuf {
self.output_base_dir()
.join("auxiliary")
output_testname_unique(self.config, self.testpaths, self.safe_revision())
}
- /// The revision, ignored for Incremental since it wants all revisions in
+ /// The revision, ignored for incremental compilation since it wants all revisions in
/// the same directory.
fn safe_revision(&self) -> Option<&str> {
if self.config.mode == Incremental {
}
}
- /// Absolute path to the directory where all output for the given
- /// test/revision should reside. Example:
- /// /path/to/build/host-triple/test/ui/relative/testname.revision.mode/
+ /// Gets the absolute path to the directory where all output for the given
+ /// test/revision should reside.
+ /// E.g., `/path/to/build/host-triple/test/ui/relative/testname.revision.mode/`.
fn output_base_dir(&self) -> PathBuf {
output_base_dir(self.config, self.testpaths, self.safe_revision())
}
- /// Absolute path to the base filename used as output for the given
- /// test/revision. Example:
- /// /.../relative/testname.revision.mode/testname
+ /// Gets the absolute path to the base filename used as output for the given
+ /// test/revision.
+ /// E.g., `/.../relative/testname.revision.mode/testname`.
fn output_base_name(&self) -> PathBuf {
output_base_name(self.config, self.testpaths, self.safe_revision())
}
}
}
-/// Load all the metadata files from `metadata_dir` into an in-memory map.
+/// Loads all the metadata files from `metadata_dir` into an in-memory map.
fn load_all_errors(metadata_dir: &Path) -> Result<ErrorMetadataMap, Box<dyn Error>> {
let mut all_errors = BTreeMap::new();
'miri': '@oli-obk @RalfJung @eddyb',
'clippy-driver': '@Manishearth @llogiq @mcarton @oli-obk',
'rls': '@nrc @Xanewok',
- 'rustfmt': '@nrc',
+ 'rustfmt': '@nrc @topecongiro',
'book': '@carols10cents @steveklabnik',
'nomicon': '@frewsxcv @Gankro',
'reference': '@steveklabnik @Havvy @matthewjasper @alercah',
'rust-by-example': '@steveklabnik @marioidival @projektir',
}
+REPOS = {
+ 'miri': 'https://github.com/solson/miri',
+ 'clippy-driver': 'https://github.com/rust-lang/rust-clippy',
+ 'rls': 'https://github.com/rust-lang/rls',
+ 'rustfmt': 'https://github.com/rust-lang/rustfmt',
+ 'book': 'https://github.com/rust-lang/book',
+ 'nomicon': 'https://github.com/rust-lang-nursery/nomicon',
+ 'reference': 'https://github.com/rust-lang-nursery/reference',
+ 'rust-by-example': 'https://github.com/rust-lang/rust-by-example',
+}
+
def read_current_status(current_commit, path):
'''Reads build status of `current_commit` from content of `history/*.tsv`
return json.loads(status)
return {}
+def issue(
+ tool,
+ maintainers,
+ relevant_pr_number,
+ relevant_pr_user,
+ pr_reviewer,
+):
+ # Open an issue about the toolstate failure.
+ gh_url = 'https://api.github.com/repos/rust-lang/rust/issues'
+ assignees = [x.strip() for x in maintainers.split('@') if x != '']
+ assignees.append(relevant_pr_user)
+ response = urllib2.urlopen(urllib2.Request(
+ gh_url,
+ json.dumps({
+ 'body': textwrap.dedent('''\
+ Hello, this is your friendly neighborhood mergebot.
+ After merging PR {}, I observed that the tool {} no longer builds.
+ A follow-up PR to the repository {} is needed to fix the fallout.
+
+ cc @{}, do you think you would have time to do the follow-up work?
+ If so, that would be great!
+
+ cc @{}, the PR reviewer, and @rust-lang/compiler -- nominating for prioritization.
+
+ ''').format(relevant_pr_number, tool, REPOS[tool], relevant_pr_user, pr_reviewer),
+ 'title': '`{}` no longer builds after {}'.format(tool, relevant_pr_number),
+ 'assignees': assignees,
+ 'labels': ['T-compiler', 'I-nominated'],
+ }),
+ {
+ 'Authorization': 'token ' + github_token,
+ 'Content-Type': 'application/json',
+ }
+ ))
+ response.read()
def update_latest(
current_commit,
relevant_pr_number,
relevant_pr_url,
+ relevant_pr_user,
+ pr_reviewer,
current_datetime
):
'''Updates `_data/latest.json` to match build result of the given commit.
for status in latest:
tool = status['tool']
changed = False
+ build_failed = False
for os, s in current_status.items():
old = status[os]
new = s.get(tool, old)
status[os] = new
if new > old:
+ # things got fixed or at least the status quo improved
changed = True
message += '🎉 {} on {}: {} → {} (cc {}, @rust-lang/infra).\n' \
.format(tool, os, old, new, MAINTAINERS.get(tool))
elif new < old:
+ # tests or builds are failing and were not failing before
changed = True
- message += '💔 {} on {}: {} → {} (cc {}, @rust-lang/infra).\n' \
- .format(tool, os, old, new, MAINTAINERS.get(tool))
+ title = '💔 {} on {}: {} → {}' \
+ .format(tool, os, old, new)
+ message += '{} (cc {}, @rust-lang/infra).\n' \
+ .format(title, MAINTAINERS.get(tool))
+ # only create issues for build failures. Other failures can be spurious
+ if new == 'build-fail':
+ build_failed = True
+
+ if build_failed:
+ try:
+ issue(
+ tool, MAINTAINERS.get(tool),
+ relevant_pr_number, relevant_pr_user, pr_reviewer,
+ )
+ except IOError as (errno, strerror):
+ # network errors will simply end up not creating an issue, but that's better
+ # than failing the entire build job
+ print "I/O error({0}): {1}".format(errno, strerror)
+ except:
+ print "Unexpected error:", sys.exc_info()[0]
+ raise
if changed:
status['commit'] = current_commit
save_message_to_path = sys.argv[3]
github_token = sys.argv[4]
- relevant_pr_match = re.search('#([0-9]+)', cur_commit_msg)
+ # assume that PR authors are also owners of the repo where the branch lives
+ relevant_pr_match = re.search(
+ 'Auto merge of #([0-9]+) - ([^:]+):[^,]+ r=([^\s]+)',
+ cur_commit_msg,
+ )
if relevant_pr_match:
number = relevant_pr_match.group(1)
+ relevant_pr_user = relevant_pr_match.group(2)
relevant_pr_number = 'rust-lang/rust#' + number
relevant_pr_url = 'https://github.com/rust-lang/rust/pull/' + number
+ pr_reviewer = relevant_pr_match.group(3)
else:
number = '-1'
+ relevant_pr_user = '<unknown user>'
relevant_pr_number = '<unknown PR>'
relevant_pr_url = '<unknown>'
+ pr_reviewer = '<unknown reviewer>'
message = update_latest(
cur_commit,
relevant_pr_number,
relevant_pr_url,
+ relevant_pr_user,
+ pr_reviewer,
cur_datetime
)
if not message:
EXP_END,
}
-/// Returns whether `line` appears to be a line comment containing an URL,
+/// Returns `true` if `line` appears to be a line comment containing an URL,
/// possibly with a Markdown link label in front, and nothing else.
/// The Markdown link label, if present, may not contain whitespace.
/// Lines of this form are allowed to be overlength, because Markdown
state == EXP_END
}
-/// Returns whether `line` is allowed to be longer than the normal limit.
+/// Returns `true` if `line` is allowed to be longer than the normal limit.
/// Currently there is only one exception, for long URLs, but more
/// may be added in the future.
fn long_line_is_ok(line: &str) -> bool {
.collect()
}
-/// Retrieve file names of all library feature sections in the Unstable Book with:
+/// Retrieves file names of all library feature sections in the Unstable Book with:
///
/// * hyphens replaced by underscores,
/// * the markdown suffix ('.md') removed.