- "**"
defaults:
run:
- shell: "python src/ci/exec-with-shell.py {0}"
+ shell: bash
jobs:
pr:
name: PR
steps:
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- shell: bash
- name: checkout the source code
uses: actions/checkout@v1
with:
steps:
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- shell: bash
- name: checkout the source code
uses: actions/checkout@v1
with:
- name: dist-powerpc64le-linux
os: ubuntu-latest-xl
env: {}
+ - name: dist-riscv64-linux
+ os: ubuntu-latest-xl
+ env: {}
- name: dist-s390x-linux
os: ubuntu-latest-xl
env: {}
steps:
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- shell: bash
- name: checkout the source code
uses: actions/checkout@v1
with:
steps:
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- shell: bash
- name: checkout the source code
uses: actions/checkout@v1
with:
fetch-depth: 2
- name: publish toolstate
run: src/ci/publish_toolstate.sh
+ shell: bash
env:
TOOLSTATE_REPO_ACCESS_TOKEN: "${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}"
if: success() && !env.SKIP_JOB
[submodule "src/tools/rust-analyzer"]
path = src/tools/rust-analyzer
url = https://github.com/rust-analyzer/rust-analyzer.git
+[submodule "src/backtrace"]
+ path = src/backtrace
+ url = https://github.com/rust-lang/backtrace-rs.git
To get started, read the [Getting Started] guide in the [rustc-dev-guide].
+## Bug reports
+
+Did a compiler error message tell you to come here? If you want to create an ICE report,
+refer to [this section][contributing-bug-reports] and [open an issue][issue template].
+
[Getting Started]: https://rustc-dev-guide.rust-lang.org/getting-started.html
[rustc-dev-guide]: https://rustc-dev-guide.rust-lang.org/
+[contributing-bug-reports]: https://rustc-dev-guide.rust-lang.org/contributing.html#bug-reports
+[issue template]: https://github.com/rust-lang/rust/issues/new/choose
# This file is automatically @generated by Cargo.
# It is not intended for manual editing.
[[package]]
-name = "adler32"
-version = "1.0.3"
+name = "addr2line"
+version = "0.13.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1b6a2d3371669ab3ca9797670853d61402b03d0b4b9ebf33d677dfa720203072"
+dependencies = [
+ "compiler_builtins",
+ "gimli",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+]
+
+[[package]]
+name = "adler"
+version = "0.2.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "7e522997b529f05601e05166c07ed17789691f562762c7f3b987263d2dedee5c"
+checksum = "ccc9a9dd069569f212bc4330af9f17c4afb5e8ce185e83dbb14f1349dda18b10"
+dependencies = [
+ "compiler_builtins",
+ "rustc-std-workspace-core",
+]
[[package]]
name = "aho-corasick"
[[package]]
name = "backtrace"
-version = "0.3.46"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "b1e692897359247cc6bb902933361652380af0f1b7651ae5c5013407f30e109e"
+version = "0.3.50"
dependencies = [
- "backtrace-sys",
+ "addr2line",
"cfg-if",
- "compiler_builtins",
"libc",
+ "miniz_oxide",
+ "object",
"rustc-demangle",
- "rustc-std-workspace-core",
-]
-
-[[package]]
-name = "backtrace-sys"
-version = "0.1.37"
-source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "18fbebbe1c9d1f383a9cc7e8ccdb471b91c8d024ee9c2ca5b5346121fe8b4399"
-dependencies = [
- "cc",
- "compiler_builtins",
- "libc",
- "rustc-std-workspace-core",
]
[[package]]
[[package]]
name = "crc32fast"
-version = "1.1.2"
+version = "1.2.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "e91d5240c6975ef33aeb5f148f35275c25eda8e8a5f95abe421978b05b8bf192"
+checksum = "ba125de2af0df55319f41944744ad91c71113bf74a4646efff39afe1f6842db1"
dependencies = [
"cfg-if",
]
[[package]]
name = "flate2"
-version = "1.0.12"
+version = "1.0.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "ad3c5233c9a940c8719031b423d7e6c16af66e031cb0420b0896f5245bf181d3"
+checksum = "68c90b0fc46cf89d227cc78b40e494ff81287a92dd07631e5af0d06fe3cf885e"
dependencies = [
"cfg-if",
"crc32fast",
"wasi",
]
+[[package]]
+name = "gimli"
+version = "0.22.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "aaf91faf136cb47367fa430cd46e37a788775e7fa104f8b4bcb3861dc389b724"
+dependencies = [
+ "compiler_builtins",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+]
+
[[package]]
name = "git2"
version = "0.13.5"
[[package]]
name = "miniz_oxide"
-version = "0.3.5"
+version = "0.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
-checksum = "6f3f74f726ae935c3f514300cc6773a0c9492abc5e972d42ba0c0ebb88757625"
+checksum = "be0f75932c1f6cfae3c04000e40114adf955636e19040f9c0a2c380702aa1c7f"
dependencies = [
- "adler32",
+ "adler",
+ "compiler_builtins",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
]
[[package]]
"libc",
]
+[[package]]
+name = "object"
+version = "0.20.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "1ab52be62400ca80aa00285d25253d7f7c437b7375c4de678f5405d3afe82ca5"
+dependencies = [
+ "compiler_builtins",
+ "rustc-std-workspace-alloc",
+ "rustc-std-workspace-core",
+]
+
[[package]]
name = "once_cell"
version = "1.1.0"
name = "panic_abort"
version = "0.0.0"
dependencies = [
+ "cfg-if",
"compiler_builtins",
"core",
"libc",
"winapi 0.3.8",
]
+[[package]]
+name = "pathdiff"
+version = "0.2.0"
+source = "registry+https://github.com/rust-lang/crates.io-index"
+checksum = "877630b3de15c0b64cc52f659345724fbf6bdad9bd9566699fc53688f3c34a34"
+
[[package]]
name = "percent-encoding"
version = "1.0.1"
"rls-span",
]
+[[package]]
+name = "rust-demangler"
+version = "0.0.0"
+dependencies = [
+ "rustc-demangle",
+]
+
[[package]]
name = "rustbook"
version = "0.1.0"
"log",
"memmap",
"num_cpus",
+ "pathdiff",
"rustc_apfloat",
"rustc_ast",
"rustc_attr",
name = "std"
version = "0.0.0"
dependencies = [
+ "addr2line",
"alloc",
- "backtrace",
"cfg-if",
"compiler_builtins",
"core",
"hashbrown",
"hermit-abi",
"libc",
+ "miniz_oxide",
+ "object",
"panic_abort",
"panic_unwind",
"profiler_builtins",
"rand 0.7.3",
+ "rustc-demangle",
"unwind",
"wasi",
]
name = "test"
version = "0.0.0"
dependencies = [
+ "cfg-if",
"core",
"getopts",
"libc",
"src/tools/remote-test-client",
"src/tools/remote-test-server",
"src/tools/rust-installer",
+ "src/tools/rust-demangler",
"src/tools/cargo",
"src/tools/rustdoc",
"src/tools/rls",
rustc-std-workspace-alloc = { path = 'src/tools/rustc-std-workspace-alloc' }
rustc-std-workspace-std = { path = 'src/tools/rustc-std-workspace-std' }
+# This crate's integration with libstd is a bit wonky, so we use a submodule
+# instead of a crates.io dependency. Make sure everything else in the repo is
+# also using the submodule, however, so we can avoid duplicate copies of the
+# source code for this crate.
+backtrace = { path = "src/backtrace" }
+
[patch."https://github.com/rust-lang/rust-clippy"]
clippy_lints = { path = "src/tools/clippy/clippy_lints" }
// Prints "abcdefghijklmnopqrstuvwxyz"
```
- [`OsString` now implements `FromStr`.][71662]
-- [The `saturating_neg` method as been added to all signed integer primitive
+- [The `saturating_neg` method has been added to all signed integer primitive
types, and the `saturating_abs` method has been added for all integer
primitive types.][71886]
- [`Arc<T>`, `Rc<T>` now implement `From<Cow<'_, T>>`, and `Box` now
Cargo
-----
+- [Cargo uses the `embed-bitcode` flag to optimize disk usage and build
+ time.][cargo/8066]
+
Misc
----
- [Rustdoc now supports strikethrough text in Markdown.][71928] E.g.
- [Rustdoc's CLI's extra error exit codes have been removed.][71900] These were
previously undocumented and not intended for public use. Rustdoc still provides
a non-zero exit code on errors.
+- [Rustc's `lto` flag is incompatible with the new `embed-bitcode=no`.][71848]
+ This may cause issues if LTO is enabled through `RUSTFLAGS` or `cargo rustc`
+ flags while cargo is adding `embed-bitcode` itself. The recommended way to
+ control LTO is with Cargo profiles, either in `Cargo.toml` or `.cargo/config`,
+ or by setting `CARGO_PROFILE_<name>_LTO` in the environment.
Internals Only
--------------
- [Make clippy a git subtree instead of a git submodule][70655]
- [Unify the undo log of all snapshot types][69464]
+[71848]: https://github.com/rust-lang/rust/issues/71848/
[73420]: https://github.com/rust-lang/rust/issues/73420/
[72324]: https://github.com/rust-lang/rust/pull/72324/
[71843]: https://github.com/rust-lang/rust/pull/71843/
[69813]: https://github.com/rust-lang/rust/pull/69813/
[69464]: https://github.com/rust-lang/rust/pull/69464/
[68717]: https://github.com/rust-lang/rust/pull/68717/
+[cargo/8066]: https://github.com/rust-lang/cargo/pull/8066
[`Arc::as_ptr`]: https://doc.rust-lang.org/stable/std/sync/struct.Arc.html#method.as_ptr
[`BTreeMap::remove_entry`]: https://doc.rust-lang.org/stable/std/collections/struct.BTreeMap.html#method.remove_entry
[`Rc::as_ptr`]: https://doc.rust-lang.org/stable/std/rc/struct.Rc.html#method.as_ptr
"src/tools/rust-analyzer",
"src/tools/rust-installer",
"src/tools/rustfmt",
+ "src/backtrace",
# We do not format this file as it is externally sourced and auto-generated.
"src/libstd/sys/cloudabi/abi/cloudabi.rs",
--- /dev/null
+Subproject commit 8f89434446f72f27f8145d8bbc1a302c6ef29d1e
self.use_vendored_sources = ''
self.verbose = False
self.git_version = None
+ self.nix_deps_dir = None
def download_stage0(self):
"""Fetch the build system for Rust, written in Rust
filename = "rustc-{}-{}{}".format(rustc_channel, self.build,
tarball_suffix)
self._download_stage0_helper(filename, "rustc", tarball_suffix)
- self.fix_executable("{}/bin/rustc".format(self.bin_root()))
- self.fix_executable("{}/bin/rustdoc".format(self.bin_root()))
+ self.fix_bin_or_dylib("{}/bin/rustc".format(self.bin_root()))
+ self.fix_bin_or_dylib("{}/bin/rustdoc".format(self.bin_root()))
+ lib_dir = "{}/lib".format(self.bin_root())
+ for lib in os.listdir(lib_dir):
+ if lib.endswith(".so"):
+ self.fix_bin_or_dylib("{}/{}".format(lib_dir, lib))
with output(self.rustc_stamp()) as rust_stamp:
rust_stamp.write(self.date)
filename = "cargo-{}-{}{}".format(cargo_channel, self.build,
tarball_suffix)
self._download_stage0_helper(filename, "cargo", tarball_suffix)
- self.fix_executable("{}/bin/cargo".format(self.bin_root()))
+ self.fix_bin_or_dylib("{}/bin/cargo".format(self.bin_root()))
with output(self.cargo_stamp()) as cargo_stamp:
cargo_stamp.write(self.date)
[channel, date] = rustfmt_channel.split('-', 1)
filename = "rustfmt-{}-{}{}".format(channel, self.build, tarball_suffix)
self._download_stage0_helper(filename, "rustfmt-preview", tarball_suffix, date)
- self.fix_executable("{}/bin/rustfmt".format(self.bin_root()))
- self.fix_executable("{}/bin/cargo-fmt".format(self.bin_root()))
+ self.fix_bin_or_dylib("{}/bin/rustfmt".format(self.bin_root()))
+ self.fix_bin_or_dylib("{}/bin/cargo-fmt".format(self.bin_root()))
with output(self.rustfmt_stamp()) as rustfmt_stamp:
rustfmt_stamp.write(self.date + self.rustfmt_channel)
get("{}/{}".format(url, filename), tarball, verbose=self.verbose)
unpack(tarball, tarball_suffix, self.bin_root(), match=pattern, verbose=self.verbose)
- @staticmethod
- def fix_executable(fname):
- """Modifies the interpreter section of 'fname' to fix the dynamic linker
+ def fix_bin_or_dylib(self, fname):
+ """Modifies the interpreter section of 'fname' to fix the dynamic linker,
+ or the RPATH section, to fix the dynamic library search path
This method is only required on NixOS and uses the PatchELF utility to
- change the dynamic linker of ELF executables.
+ change the interpreter/RPATH of ELF executables.
Please see https://nixos.org/patchelf.html for more information
"""
nix_os_msg = "info: you seem to be running NixOS. Attempting to patch"
print(nix_os_msg, fname)
- try:
- interpreter = subprocess.check_output(
- ["patchelf", "--print-interpreter", fname])
- interpreter = interpreter.strip().decode(default_encoding)
- except subprocess.CalledProcessError as reason:
- print("warning: failed to call patchelf:", reason)
- return
-
- loader = interpreter.split("/")[-1]
-
- try:
- ldd_output = subprocess.check_output(
- ['ldd', '/run/current-system/sw/bin/sh'])
- ldd_output = ldd_output.strip().decode(default_encoding)
- except subprocess.CalledProcessError as reason:
- print("warning: unable to call ldd:", reason)
- return
-
- for line in ldd_output.splitlines():
- libname = line.split()[0]
- if libname.endswith(loader):
- loader_path = libname[:len(libname) - len(loader)]
- break
+ # Only build `stage0/.nix-deps` once.
+ nix_deps_dir = self.nix_deps_dir
+ if not nix_deps_dir:
+ nix_deps_dir = "{}/.nix-deps".format(self.bin_root())
+ if not os.path.exists(nix_deps_dir):
+ os.makedirs(nix_deps_dir)
+
+ nix_deps = [
+ # Needed for the path of `ld-linux.so` (via `nix-support/dynamic-linker`).
+ "stdenv.cc.bintools",
+
+ # Needed as a system dependency of `libLLVM-*.so`.
+ "zlib",
+
+ # Needed for patching ELF binaries (see doc comment above).
+ "patchelf",
+ ]
+
+ # Run `nix-build` to "build" each dependency (which will likely reuse
+ # the existing `/nix/store` copy, or at most download a pre-built copy).
+ # Importantly, we don't rely on `nix-build` printing the `/nix/store`
+ # path on stdout, but use `-o` to symlink it into `stage0/.nix-deps/$dep`,
+ # ensuring garbage collection will never remove the `/nix/store` path
+ # (which would break our patched binaries that hardcode those paths).
+ for dep in nix_deps:
+ try:
+ subprocess.check_output([
+ "nix-build", "<nixpkgs>",
+ "-A", dep,
+ "-o", "{}/{}".format(nix_deps_dir, dep),
+ ])
+ except subprocess.CalledProcessError as reason:
+ print("warning: failed to call nix-build:", reason)
+ return
+
+ self.nix_deps_dir = nix_deps_dir
+
+ patchelf = "{}/patchelf/bin/patchelf".format(nix_deps_dir)
+
+ if fname.endswith(".so"):
+ # Dynamic library, patch RPATH to point to system dependencies.
+ dylib_deps = ["zlib"]
+ rpath_entries = [
+ # Relative default, all binary and dynamic libraries we ship
+ # appear to have this (even when `../lib` is redundant).
+ "$ORIGIN/../lib",
+ ] + ["{}/{}/lib".format(nix_deps_dir, dep) for dep in dylib_deps]
+ patchelf_args = ["--set-rpath", ":".join(rpath_entries)]
else:
- print("warning: unable to find the path to the dynamic linker")
- return
-
- correct_interpreter = loader_path + loader
+ bintools_dir = "{}/stdenv.cc.bintools".format(nix_deps_dir)
+ with open("{}/nix-support/dynamic-linker".format(bintools_dir)) as dynamic_linker:
+ patchelf_args = ["--set-interpreter", dynamic_linker.read().rstrip()]
try:
- subprocess.check_output(
- ["patchelf", "--set-interpreter", correct_interpreter, fname])
+ subprocess.check_output([patchelf] + patchelf_args + [fname])
except subprocess.CalledProcessError as reason:
print("warning: failed to call patchelf:", reason)
return
use crate::cache::{Cache, Interned, INTERNER};
use crate::check;
use crate::compile;
+use crate::config::TargetSelection;
use crate::dist;
use crate::doc;
use crate::flags::Subcommand;
pub struct RunConfig<'a> {
pub builder: &'a Builder<'a>,
- pub host: Interned<String>,
- pub target: Interned<String>,
+ pub host: TargetSelection,
+ pub target: TargetSelection,
pub path: PathBuf,
}
tool::Cargo,
tool::Rls,
tool::RustAnalyzer,
+ tool::RustDemangler,
tool::Rustdoc,
tool::Clippy,
tool::CargoClippy,
/// not take `Compiler` since all `Compiler` instances are meant to be
/// obtained through this function, since it ensures that they are valid
/// (i.e., built and assembled).
- pub fn compiler(&self, stage: u32, host: Interned<String>) -> Compiler {
+ pub fn compiler(&self, stage: u32, host: TargetSelection) -> Compiler {
self.ensure(compile::Assemble { target_compiler: Compiler { stage, host } })
}
pub fn compiler_for(
&self,
stage: u32,
- host: Interned<String>,
- target: Interned<String>,
+ host: TargetSelection,
+ target: TargetSelection,
) -> Compiler {
if self.build.force_use_stage1(Compiler { stage, host }, target) {
self.compiler(1, self.config.build)
/// Returns the libdir where the standard library and other artifacts are
/// found for a compiler's sysroot.
- pub fn sysroot_libdir(
- &self,
- compiler: Compiler,
- target: Interned<String>,
- ) -> Interned<PathBuf> {
+ pub fn sysroot_libdir(&self, compiler: Compiler, target: TargetSelection) -> Interned<PathBuf> {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
struct Libdir {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for Libdir {
type Output = Interned<PathBuf>;
.sysroot(self.compiler)
.join(lib)
.join("rustlib")
- .join(self.target)
+ .join(self.target.triple)
.join("lib");
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
Some(relative_libdir) if compiler.stage >= 1 => {
self.sysroot(compiler).join(relative_libdir)
}
- _ => self.sysroot(compiler).join(libdir(&compiler.host)),
+ _ => self.sysroot(compiler).join(libdir(compiler.host)),
}
}
}
/// Windows.
pub fn libdir_relative(&self, compiler: Compiler) -> &Path {
if compiler.is_snapshot(self) {
- libdir(&self.config.build).as_ref()
+ libdir(self.config.build).as_ref()
} else {
match self.config.libdir_relative() {
Some(relative_libdir) if compiler.stage >= 1 => relative_libdir,
- _ => libdir(&compiler.host).as_ref(),
+ _ => libdir(compiler.host).as_ref(),
}
}
}
if compiler.is_snapshot(self) {
self.initial_rustc.clone()
} else {
- self.sysroot(compiler).join("bin").join(exe("rustc", &compiler.host))
+ self.sysroot(compiler).join("bin").join(exe("rustc", compiler.host))
}
}
.env("CFG_RELEASE_CHANNEL", &self.config.channel)
.env("RUSTDOC_REAL", self.rustdoc(compiler))
.env("RUSTDOC_CRATE_VERSION", self.rust_version())
- .env("RUSTC_BOOTSTRAP", "1");
+ .env("RUSTC_BOOTSTRAP", "1")
+ .arg("-Winvalid_codeblock_attributes");
+ if self.config.deny_warnings {
+ cmd.arg("-Dwarnings");
+ }
// Remove make-related flags that can cause jobserver problems.
cmd.env_remove("MAKEFLAGS");
///
/// Note that this returns `None` if LLVM is disabled, or if we're in a
/// check build or dry-run, where there's no need to build all of LLVM.
- fn llvm_config(&self, target: Interned<String>) -> Option<PathBuf> {
+ fn llvm_config(&self, target: TargetSelection) -> Option<PathBuf> {
if self.config.llvm_enabled() && self.kind != Kind::Check && !self.config.dry_run {
let llvm_config = self.ensure(native::Llvm { target });
if llvm_config.is_file() {
compiler: Compiler,
mode: Mode,
source_type: SourceType,
- target: Interned<String>,
+ target: TargetSelection,
cmd: &str,
) -> Cargo {
let mut cargo = Command::new(&self.initial_cargo);
let my_out = match mode {
// This is the intended out directory for compiler documentation.
Mode::Rustc | Mode::ToolRustc | Mode::Codegen => self.compiler_doc_out(target),
- _ => self.crate_doc_out(target),
+ Mode::Std => out_dir.join(target.triple).join("doc"),
+ _ => panic!("doc mode {:?} not expected", mode),
};
let rustdoc = self.rustdoc(compiler);
self.clear_if_dirty(&my_out, &rustdoc);
}
if cmd != "install" {
- cargo.arg("--target").arg(target);
+ cargo.arg("--target").arg(target.rustc_target_arg());
} else {
assert_eq!(target, compiler.host);
}
compiler.stage
};
- let mut rustflags = Rustflags::new(&target);
+ let mut rustflags = Rustflags::new(target);
if stage != 0 {
if let Ok(s) = env::var("CARGOFLAGS_NOT_BOOTSTRAP") {
cargo.args(s.split_whitespace());
// FIXME: It might be better to use the same value for both `RUSTFLAGS` and `RUSTDOCFLAGS`,
// but this breaks CI. At the very least, stage0 `rustdoc` needs `--cfg bootstrap`. See
// #71458.
- let rustdocflags = rustflags.clone();
+ let mut rustdocflags = rustflags.clone();
if let Ok(s) = env::var("CARGOFLAGS") {
cargo.args(s.split_whitespace());
// argument manually via `-C link-args=-Wl,-rpath,...`. Plus isn't it
// fun to pass a flag to a tool to pass a flag to pass a flag to a tool
// to change a flag in a binary?
- if self.config.rust_rpath && util::use_host_linker(&target) {
+ if self.config.rust_rpath && util::use_host_linker(target) {
let rpath = if target.contains("apple") {
// Note that we need to take one extra step on macOS to also pass
// `-Wl,-instal_name,@rpath/...` to get things to work right. To
}
if let Some(target_linker) = self.linker(target, can_use_lld) {
- let target = crate::envify(&target);
+ let target = crate::envify(&target.triple);
cargo.env(&format!("CARGO_TARGET_{}_LINKER", target), target_linker);
}
if !(["build", "check", "clippy", "fix", "rustc"].contains(&cmd)) && want_rustdoc {
if self.config.deny_warnings {
lint_flags.push("-Dwarnings");
+ rustdocflags.arg("-Dwarnings");
}
// FIXME(#58633) hide "unused attribute" errors in incremental
// are always ignored in dependencies. Eventually this should be
// fixed via better support from Cargo.
cargo.env("RUSTC_LINT_FLAGS", lint_flags.join(" "));
+
+ rustdocflags.arg("-Winvalid_codeblock_attributes");
}
if let Mode::Rustc | Mode::Codegen = mode {
}
};
let cc = ccacheify(&self.cc(target));
- cargo.env(format!("CC_{}", target), &cc);
+ cargo.env(format!("CC_{}", target.triple), &cc);
let cflags = self.cflags(target, GitRepo::Rustc).join(" ");
- cargo.env(format!("CFLAGS_{}", target), cflags.clone());
+ cargo.env(format!("CFLAGS_{}", target.triple), cflags.clone());
if let Some(ar) = self.ar(target) {
let ranlib = format!("{} s", ar.display());
- cargo.env(format!("AR_{}", target), ar).env(format!("RANLIB_{}", target), ranlib);
+ cargo
+ .env(format!("AR_{}", target.triple), ar)
+ .env(format!("RANLIB_{}", target.triple), ranlib);
}
if let Ok(cxx) = self.cxx(target) {
let cxx = ccacheify(&cxx);
cargo
- .env(format!("CXX_{}", target), &cxx)
- .env(format!("CXXFLAGS_{}", target), cflags);
+ .env(format!("CXX_{}", target.triple), &cxx)
+ .env(format!("CXXFLAGS_{}", target.triple), cflags);
}
}
// Environment variables *required* throughout the build
//
// FIXME: should update code to not require this env var
- cargo.env("CFG_COMPILER_HOST_TRIPLE", target);
+ cargo.env("CFG_COMPILER_HOST_TRIPLE", target.triple);
// Set this for all builds to make sure doc builds also get it.
cargo.env("CFG_RELEASE_CHANNEL", &self.config.channel);
struct Rustflags(String);
impl Rustflags {
- fn new(target: &str) -> Rustflags {
+ fn new(target: TargetSelection) -> Rustflags {
let mut ret = Rustflags(String::new());
// Inherit `RUSTFLAGS` by default ...
// ... and also handle target-specific env RUSTFLAGS if they're
// configured.
- let target_specific = format!("CARGO_TARGET_{}_RUSTFLAGS", crate::envify(target));
+ let target_specific = format!("CARGO_TARGET_{}_RUSTFLAGS", crate::envify(&target.triple));
ret.env(&target_specific);
ret
}
impl Cargo {
+ pub fn rustdocflag(&mut self, arg: &str) -> &mut Cargo {
+ self.rustdocflags.arg(arg);
+ self
+ }
pub fn rustflag(&mut self, arg: &str) -> &mut Cargo {
self.rustflags.arg(arg);
self
}
pub fn env(&mut self, key: impl AsRef<OsStr>, value: impl AsRef<OsStr>) -> &mut Cargo {
+ // These are managed through rustflag/rustdocflag interfaces.
+ assert_ne!(key.as_ref(), "RUSTFLAGS");
+ assert_ne!(key.as_ref(), "RUSTDOCFLAGS");
self.command.env(key.as_ref(), value.as_ref());
self
}
use super::*;
-use crate::config::Config;
+use crate::config::{Config, TargetSelection};
use std::thread;
use pretty_assertions::assert_eq;
.join(&thread::current().name().unwrap_or("unknown").replace(":", "-"));
t!(fs::create_dir_all(&dir));
config.out = dir;
- config.build = INTERNER.intern_str("A");
+ config.build = TargetSelection::from_user("A");
config.hosts = vec![config.build]
.into_iter()
- .chain(host.iter().map(|s| INTERNER.intern_str(s)))
+ .chain(host.iter().map(|s| TargetSelection::from_user(s)))
.collect::<Vec<_>>();
config.targets = config
.hosts
.clone()
.into_iter()
- .chain(target.iter().map(|s| INTERNER.intern_str(s)))
+ .chain(target.iter().map(|s| TargetSelection::from_user(s)))
.collect::<Vec<_>>();
config
}
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
- let a = INTERNER.intern_str("A");
+ let a = TargetSelection::from_user("A");
assert_eq!(first(builder.cache.all::<dist::Docs>()), &[dist::Docs { host: a },]);
assert_eq!(first(builder.cache.all::<dist::Mingw>()), &[dist::Mingw { host: a },]);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
#[test]
fn dist_only_cross_host() {
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
let mut build = Build::new(configure(&["B"], &[]));
build.config.docs = false;
build.config.extended = true;
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
+ let c = TargetSelection::from_user("C");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
+ let c = TargetSelection::from_user("C");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Dist), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
assert_eq!(
first(builder.cache.all::<dist::Docs>()),
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
+ let c = TargetSelection::from_user("C");
assert_eq!(
first(builder.cache.all::<compile::Std>()),
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Build), &[]);
- let a = INTERNER.intern_str("A");
- let b = INTERNER.intern_str("B");
- let c = INTERNER.intern_str("C");
+ let a = TargetSelection::from_user("A");
+ let b = TargetSelection::from_user("B");
+ let c = TargetSelection::from_user("C");
assert_eq!(
first(builder.cache.all::<compile::Std>()),
let build = Build::new(config);
let mut builder = Builder::new(&build);
- let host = INTERNER.intern_str("A");
+ let host = TargetSelection::from_user("A");
builder
.run_step_descriptions(&[StepDescription::from::<test::Crate>()], &["src/libstd".into()]);
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Doc), &[]);
- let a = INTERNER.intern_str("A");
+ let a = TargetSelection::from_user("A");
// error_index_generator uses stage 1 to share rustdoc artifacts with the
// rustdoc tool.
let build = Build::new(config);
let mut builder = Builder::new(&build);
builder.run_step_descriptions(&Builder::get_step_descriptions(Kind::Test), &[]);
- let a = INTERNER.intern_str("A");
+ let a = TargetSelection::from_user("A");
// error_index_generator uses stage 1 to share rustdoc artifacts with the
// rustdoc tool.
use build_helper::output;
-use crate::cache::Interned;
-use crate::config::Target;
+use crate::config::{Target, TargetSelection};
use crate::{Build, GitRepo};
// The `cc` crate doesn't provide a way to obtain a path to the detected archiver,
// so use some simplified logic here. First we respect the environment variable `AR`, then
// try to infer the archiver path from the C compiler path.
// In the future this logic should be replaced by calling into the `cc` crate.
-fn cc2ar(cc: &Path, target: &str) -> Option<PathBuf> {
- if let Some(ar) = env::var_os(format!("AR_{}", target.replace("-", "_"))) {
+fn cc2ar(cc: &Path, target: TargetSelection) -> Option<PathBuf> {
+ if let Some(ar) = env::var_os(format!("AR_{}", target.triple.replace("-", "_"))) {
Some(PathBuf::from(ar))
} else if let Some(ar) = env::var_os("AR") {
Some(PathBuf::from(ar))
.opt_level(2)
.warnings(false)
.debug(false)
- .target(&target)
- .host(&build.build);
+ .target(&target.triple)
+ .host(&build.build.triple);
match build.crt_static(target) {
Some(a) => {
cfg.static_crt(a);
let ar = if let ar @ Some(..) = config.and_then(|c| c.ar.clone()) {
ar
} else {
- cc2ar(compiler.path(), &target)
+ cc2ar(compiler.path(), target)
};
- build.cc.insert(target, compiler);
+ build.cc.insert(target, compiler.clone());
let cflags = build.cflags(target, GitRepo::Rustc);
// If we use llvm-libunwind, we will need a C++ compiler as well for all targets
.warnings(false)
.debug(false)
.cpp(true)
- .target(&target)
- .host(&build.build);
+ .target(&target.triple)
+ .host(&build.build.triple);
let cxx_configured = if let Some(cxx) = config.and_then(|c| c.cxx.as_ref()) {
cfg.compiler(cxx);
build.cxx.insert(target, compiler);
}
- build.verbose(&format!("CC_{} = {:?}", &target, build.cc(target)));
- build.verbose(&format!("CFLAGS_{} = {:?}", &target, cflags));
+ build.verbose(&format!("CC_{} = {:?}", &target.triple, build.cc(target)));
+ build.verbose(&format!("CFLAGS_{} = {:?}", &target.triple, cflags));
if let Ok(cxx) = build.cxx(target) {
- build.verbose(&format!("CXX_{} = {:?}", &target, cxx));
- build.verbose(&format!("CXXFLAGS_{} = {:?}", &target, cflags));
+ build.verbose(&format!("CXX_{} = {:?}", &target.triple, cxx));
+ build.verbose(&format!("CXXFLAGS_{} = {:?}", &target.triple, cflags));
}
if let Some(ar) = ar {
- build.verbose(&format!("AR_{} = {:?}", &target, ar));
+ build.verbose(&format!("AR_{} = {:?}", &target.triple, ar));
build.ar.insert(target, ar);
}
}
fn set_compiler(
cfg: &mut cc::Build,
compiler: Language,
- target: Interned<String>,
+ target: TargetSelection,
config: Option<&Target>,
build: &Build,
) {
- match &*target {
+ match &*target.triple {
// When compiling for android we may have the NDK configured in the
// config.toml in which case we look there. Otherwise the default
// compiler already takes into account the triple in question.
t if t.contains("android") => {
if let Some(ndk) = config.and_then(|c| c.ndk.as_ref()) {
let target = target
+ .triple
.replace("armv7neon", "arm")
.replace("armv7", "arm")
.replace("thumbv7neon", "arm")
use crate::Build;
// The version number
-pub const CFG_RELEASE_NUM: &str = "1.46.0";
+pub const CFG_RELEASE_NUM: &str = "1.47.0";
pub struct GitInfo {
inner: Option<Info>,
//! Implementation of compiling the compiler and standard library, in "check"-based modes.
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
-use crate::cache::Interned;
use crate::compile::{add_to_sysroot, run_cargo, rustc_cargo, std_cargo};
+use crate::config::TargetSelection;
use crate::tool::{prepare_tool_cargo, SourceType};
use crate::{Compiler, Mode};
use std::path::PathBuf;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Std {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
fn args(kind: Kind) -> Vec<String> {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Rustc {
($name:ident, $path:expr, $source_type:expr) => {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct $name {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for $name {
println!(
"Checking {} artifacts ({} -> {})",
stringify!($name).to_lowercase(),
- &compiler.host,
- target
+ &compiler.host.triple,
+ target.triple
);
run_cargo(
builder,
fn stamp(
builder: &Builder<'_>,
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
) -> PathBuf {
builder
.cargo_out(compiler, Mode::ToolRustc, target)
/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
-fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: Interned<String>) -> PathBuf {
+fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf {
builder.cargo_out(compiler, Mode::Std, target).join(".libstd-check.stamp")
}
/// Cargo's output path for librustc in a given stage, compiled by a particular
/// compiler for the specified target.
-fn librustc_stamp(builder: &Builder<'_>, compiler: Compiler, target: Interned<String>) -> PathBuf {
+fn librustc_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf {
builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc-check.stamp")
}
rm_rf(&build.out.join("dist"));
for host in &build.hosts {
- let entries = match build.out.join(host).read_dir() {
+ let entries = match build.out.join(host.triple).read_dir() {
Ok(iter) => iter,
Err(_) => continue,
};
use crate::builder::Cargo;
use crate::builder::{Builder, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
+use crate::config::TargetSelection;
use crate::dist;
use crate::native;
use crate::tool::SourceType;
#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Std {
- pub target: Interned<String>,
+ pub target: TargetSelection,
pub compiler: Compiler,
}
fn copy_third_party_objects(
builder: &Builder<'_>,
compiler: &Compiler,
- target: Interned<String>,
+ target: TargetSelection,
) -> Vec<(PathBuf, DependencyType)> {
let mut target_deps = vec![];
fn copy_self_contained_objects(
builder: &Builder<'_>,
compiler: &Compiler,
- target: Interned<String>,
+ target: TargetSelection,
) -> Vec<(PathBuf, DependencyType)> {
- // cfg(bootstrap)
- // Remove when upgrading bootstrap compiler.
- let libdir_self_contained = if compiler.stage == 0 {
- builder.sysroot_libdir(*compiler, target).to_path_buf()
- } else {
- builder.sysroot_libdir(*compiler, target).join("self-contained")
- };
+ let libdir_self_contained = builder.sysroot_libdir(*compiler, target).join("self-contained");
t!(fs::create_dir_all(&libdir_self_contained));
let mut target_deps = vec![];
/// Configure cargo to compile the standard library, adding appropriate env vars
/// and such.
-pub fn std_cargo(builder: &Builder<'_>, target: Interned<String>, stage: u32, cargo: &mut Cargo) {
+pub fn std_cargo(builder: &Builder<'_>, target: TargetSelection, stage: u32, cargo: &mut Cargo) {
if let Some(target) = env::var_os("MACOSX_STD_DEPLOYMENT_TARGET") {
cargo.env("MACOSX_DEPLOYMENT_TARGET", target);
}
struct StdLink {
pub compiler: Compiler,
pub target_compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for StdLink {
fn copy_sanitizers(
builder: &Builder<'_>,
compiler: &Compiler,
- target: Interned<String>,
+ target: TargetSelection,
) -> Vec<PathBuf> {
let runtimes: Vec<native::SanitizerRuntime> = builder.ensure(native::Sanitizers { target });
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct StartupObjects {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for StartupObjects {
.arg("--cfg")
.arg("bootstrap")
.arg("--target")
- .arg(target)
+ .arg(target.rustc_target_arg())
.arg("--emit=obj")
.arg("-o")
.arg(dst_file)
#[derive(Debug, PartialOrd, Ord, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rustc {
- pub target: Interned<String>,
+ pub target: TargetSelection,
pub compiler: Compiler,
}
}
}
-pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: Interned<String>) {
+pub fn rustc_cargo(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) {
cargo
.arg("--features")
.arg(builder.rustc_features())
rustc_cargo_env(builder, cargo, target);
}
-pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: Interned<String>) {
+pub fn rustc_cargo_env(builder: &Builder<'_>, cargo: &mut Cargo, target: TargetSelection) {
// Set some configuration variables picked up by build scripts and
// the compiler alike
cargo
struct RustcLink {
pub compiler: Compiler,
pub target_compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for RustcLink {
/// Cargo's output path for the standard library in a given stage, compiled
/// by a particular compiler for the specified target.
-pub fn libstd_stamp(
- builder: &Builder<'_>,
- compiler: Compiler,
- target: Interned<String>,
-) -> PathBuf {
+pub fn libstd_stamp(builder: &Builder<'_>, compiler: Compiler, target: TargetSelection) -> PathBuf {
builder.cargo_out(compiler, Mode::Std, target).join(".libstd.stamp")
}
pub fn librustc_stamp(
builder: &Builder<'_>,
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
) -> PathBuf {
builder.cargo_out(compiler, Mode::Rustc, target).join(".librustc.stamp")
}
pub fn compiler_file(
builder: &Builder<'_>,
compiler: &Path,
- target: Interned<String>,
+ target: TargetSelection,
file: &str,
) -> PathBuf {
let mut cmd = Command::new(compiler);
fn run(self, builder: &Builder<'_>) -> Interned<PathBuf> {
let compiler = self.compiler;
let sysroot = if compiler.stage == 0 {
- builder.out.join(&compiler.host).join("stage0-sysroot")
+ builder.out.join(&compiler.host.triple).join("stage0-sysroot")
} else {
- builder.out.join(&compiler.host).join(format!("stage{}", compiler.stage))
+ builder.out.join(&compiler.host.triple).join(format!("stage{}", compiler.stage))
};
let _ = fs::remove_dir_all(&sysroot);
t!(fs::create_dir_all(&sysroot));
let libdir = builder.sysroot_libdir(target_compiler, target_compiler.host);
if let Some(lld_install) = lld_install {
- let src_exe = exe("lld", &target_compiler.host);
- let dst_exe = exe("rust-lld", &target_compiler.host);
+ let src_exe = exe("lld", target_compiler.host);
+ let dst_exe = exe("rust-lld", target_compiler.host);
// we prepend this bin directory to the user PATH when linking Rust binaries. To
// avoid shadowing the system LLD we rename the LLD we provide to `rust-lld`.
let dst = libdir.parent().unwrap().join("bin");
// Link the compiler binary itself into place
let out_dir = builder.cargo_out(build_compiler, Mode::Rustc, host);
- let rustc = out_dir.join(exe("rustc_binary", &*host));
+ let rustc = out_dir.join(exe("rustc_binary", host));
let bindir = sysroot.join("bin");
t!(fs::create_dir_all(&bindir));
let compiler = builder.rustc(target_compiler);
use std::collections::{HashMap, HashSet};
use std::env;
use std::ffi::OsString;
+use std::fmt;
use std::fs;
use std::path::{Path, PathBuf};
use std::process;
pub docs: bool,
pub locked_deps: bool,
pub vendor: bool,
- pub target_config: HashMap<Interned<String>, Target>,
+ pub target_config: HashMap<TargetSelection, Target>,
pub full_bootstrap: bool,
pub extended: bool,
pub tools: Option<HashSet<String>>,
pub rust_thin_lto_import_instr_limit: Option<u32>,
pub rust_remap_debuginfo: bool,
- pub build: Interned<String>,
- pub hosts: Vec<Interned<String>>,
- pub targets: Vec<Interned<String>>,
+ pub build: TargetSelection,
+ pub hosts: Vec<TargetSelection>,
+ pub targets: Vec<TargetSelection>,
pub local_rebuild: bool,
pub jemalloc: bool,
pub control_flow_guard: bool,
pub out: PathBuf,
}
+#[derive(Debug, Copy, Clone, Default, PartialEq, Eq, PartialOrd, Ord, Hash)]
+pub struct TargetSelection {
+ pub triple: Interned<String>,
+ file: Option<Interned<String>>,
+}
+
+impl TargetSelection {
+ pub fn from_user(selection: &str) -> Self {
+ let path = Path::new(selection);
+
+ let (triple, file) = if path.exists() {
+ let triple = path
+ .file_stem()
+ .expect("Target specification file has no file stem")
+ .to_str()
+ .expect("Target specification file stem is not UTF-8");
+
+ (triple, Some(selection))
+ } else {
+ (selection, None)
+ };
+
+ let triple = INTERNER.intern_str(triple);
+ let file = file.map(|f| INTERNER.intern_str(f));
+
+ Self { triple, file }
+ }
+
+ pub fn rustc_target_arg(&self) -> &str {
+ self.file.as_ref().unwrap_or(&self.triple)
+ }
+
+ pub fn contains(&self, needle: &str) -> bool {
+ self.triple.contains(needle)
+ }
+
+ pub fn starts_with(&self, needle: &str) -> bool {
+ self.triple.starts_with(needle)
+ }
+
+ pub fn ends_with(&self, needle: &str) -> bool {
+ self.triple.ends_with(needle)
+ }
+}
+
+impl fmt::Display for TargetSelection {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ write!(f, "{}", self.triple)?;
+ if let Some(file) = self.file {
+ write!(f, "({})", file)?;
+ }
+ Ok(())
+ }
+}
+
+impl PartialEq<&str> for TargetSelection {
+ fn eq(&self, other: &&str) -> bool {
+ self.triple == *other
+ }
+}
+
/// Per-target configuration stored in the global configuration structure.
#[derive(Default)]
pub struct Target {
config.missing_tools = false;
// set by bootstrap.py
- config.build = INTERNER.intern_str(&env::var("BUILD").expect("'BUILD' to be set"));
+ config.build = TargetSelection::from_user(&env::var("BUILD").expect("'BUILD' to be set"));
config.src = Config::path_from_python("SRC");
config.out = Config::path_from_python("BUILD_DIR");
let build = toml.build.clone().unwrap_or_default();
// set by bootstrap.py
config.hosts.push(config.build.clone());
- for host in build.host.iter() {
- let host = INTERNER.intern_str(host);
+ for host in build.host.iter().map(|h| TargetSelection::from_user(h)) {
if !config.hosts.contains(&host) {
config.hosts.push(host);
}
}
- for target in
- config.hosts.iter().cloned().chain(build.target.iter().map(|s| INTERNER.intern_str(s)))
+ for target in config
+ .hosts
+ .iter()
+ .copied()
+ .chain(build.target.iter().map(|h| TargetSelection::from_user(h)))
{
if !config.targets.contains(&target) {
config.targets.push(target);
target.wasi_root = cfg.wasi_root.clone().map(PathBuf::from);
target.qemu_rootfs = cfg.qemu_rootfs.clone().map(PathBuf::from);
- config.target_config.insert(INTERNER.intern_string(triple.clone()), target);
+ config.target_config.insert(TargetSelection::from_user(triple), target);
}
}
use crate::cache::{Interned, INTERNER};
use crate::channel;
use crate::compile;
+use crate::config::TargetSelection;
use crate::tool::{self, Tool};
use crate::util::{exe, is_dylib, timeit};
use crate::{Compiler, DependencyType, Mode, LLVM_TOOLS};
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Docs {
- pub host: Interned<String>,
+ pub host: TargetSelection,
}
impl Step for Docs {
let name = pkgname(builder, "rust-docs");
if !builder.config.docs {
- return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, host.triple));
}
builder.default_doc(None);
builder.info(&format!("Dist docs ({})", host));
let _time = timeit(builder);
- let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host.triple));
let _ = fs::remove_dir_all(&image);
let dst = image.join("share/doc/rust/html");
.arg(&tmpdir(builder))
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", name, host))
+ .arg(format!("--package-name={}-{}", name, host.triple))
.arg("--component-name=rust-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--bulk-dirs=share/doc/rust/html");
builder.run(&mut cmd);
builder.remove_dir(&image);
- distdir(builder).join(format!("{}-{}.tar.gz", name, host))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, host.triple))
}
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustcDocs {
- pub host: Interned<String>,
+ pub host: TargetSelection,
}
impl Step for RustcDocs {
let name = pkgname(builder, "rustc-docs");
if !builder.config.compiler_docs {
- return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, host.triple));
}
builder.default_doc(None);
- let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host.triple));
let _ = fs::remove_dir_all(&image);
let dst = image.join("share/doc/rust/html");
.arg(&tmpdir(builder))
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", name, host))
+ .arg(format!("--package-name={}-{}", name, host.triple))
.arg("--component-name=rustc-docs")
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--bulk-dirs=share/doc/rust/html");
builder.run(&mut cmd);
builder.remove_dir(&image);
- distdir(builder).join(format!("{}-{}.tar.gz", name, host))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, host.triple))
}
}
fn make_win_dist(
rust_root: &Path,
plat_root: &Path,
- target_triple: Interned<String>,
+ target: TargetSelection,
builder: &Builder<'_>,
) {
//Ask gcc where it keeps its stuff
- let mut cmd = Command::new(builder.cc(target_triple));
+ let mut cmd = Command::new(builder.cc(target));
cmd.arg("-print-search-dirs");
let gcc_out = output(&mut cmd);
}
}
- let compiler = if target_triple == "i686-pc-windows-gnu" {
+ let compiler = if target == "i686-pc-windows-gnu" {
"i686-w64-mingw32-gcc.exe"
- } else if target_triple == "x86_64-pc-windows-gnu" {
+ } else if target == "x86_64-pc-windows-gnu" {
"x86_64-w64-mingw32-gcc.exe"
} else {
"gcc.exe"
};
let target_tools = [compiler, "ld.exe", "dlltool.exe", "libwinpthread-1.dll"];
let mut rustc_dlls = vec!["libwinpthread-1.dll"];
- if target_triple.starts_with("i686-") {
+ if target.starts_with("i686-") {
rustc_dlls.push("libgcc_s_dw2-1.dll");
} else {
rustc_dlls.push("libgcc_s_seh-1.dll");
let target_bin_dir = plat_root
.join("lib")
.join("rustlib")
- .join(target_triple)
+ .join(target.triple)
.join("bin")
.join("self-contained");
fs::create_dir_all(&target_bin_dir).expect("creating target_bin_dir failed");
let target_lib_dir = plat_root
.join("lib")
.join("rustlib")
- .join(target_triple)
+ .join(target.triple)
.join("lib")
.join("self-contained");
fs::create_dir_all(&target_lib_dir).expect("creating target_lib_dir failed");
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Mingw {
- pub host: Interned<String>,
+ pub host: TargetSelection,
}
impl Step for Mingw {
builder.info(&format!("Dist mingw ({})", host));
let _time = timeit(builder);
let name = pkgname(builder, "rust-mingw");
- let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host.triple));
let _ = fs::remove_dir_all(&image);
t!(fs::create_dir_all(&image));
.arg(&tmpdir(builder))
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", name, host))
+ .arg(format!("--package-name={}-{}", name, host.triple))
.arg("--component-name=rust-mingw")
.arg("--legacy-manifest-dirs=rustlib,cargo");
builder.run(&mut cmd);
t!(fs::remove_dir_all(&image));
- Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, host.triple)))
}
}
let host = self.compiler.host;
let name = pkgname(builder, "rustc");
- let image = tmpdir(builder).join(format!("{}-{}-image", name, host));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, host.triple));
let _ = fs::remove_dir_all(&image);
- let overlay = tmpdir(builder).join(format!("{}-{}-overlay", name, host));
+ let overlay = tmpdir(builder).join(format!("{}-{}-overlay", name, host.triple));
let _ = fs::remove_dir_all(&overlay);
// Prepare the rustc "image", what will actually end up getting installed
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, host))
+ .arg(format!("--package-name={}-{}", name, host.triple))
.arg("--component-name=rustc")
.arg("--legacy-manifest-dirs=rustlib,cargo");
- builder.info(&format!("Dist rustc stage{} ({})", compiler.stage, host));
+ builder.info(&format!("Dist rustc stage{} ({})", compiler.stage, host.triple));
let _time = timeit(builder);
builder.run(&mut cmd);
builder.remove_dir(&image);
builder.remove_dir(&overlay);
- return distdir(builder).join(format!("{}-{}.tar.gz", name, host));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, host.triple));
fn prepare_image(builder: &Builder<'_>, compiler: Compiler, image: &Path) {
let host = compiler.host;
// Copy over lld if it's there
if builder.config.lld_enabled {
- let exe = exe("rust-lld", &compiler.host);
+ let exe = exe("rust-lld", compiler.host);
let src =
builder.sysroot_libdir(compiler, host).parent().unwrap().join("bin").join(&exe);
// for the rationale about this rename check `compile::copy_lld_to_sysroot`
- let dst = image.join("lib/rustlib").join(&*host).join("bin").join(&exe);
+ let dst = image.join("lib/rustlib").join(&*host.triple).join("bin").join(&exe);
t!(fs::create_dir_all(&dst.parent().unwrap()));
builder.copy(&src, &dst);
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct DebuggerScripts {
pub sysroot: Interned<PathBuf>,
- pub host: Interned<String>,
+ pub host: TargetSelection,
}
impl Step for DebuggerScripts {
}
/// Copy stamped files into an image's `target/lib` directory.
-fn copy_target_libs(builder: &Builder<'_>, target: &str, image: &Path, stamp: &Path) {
- let dst = image.join("lib/rustlib").join(target).join("lib");
+fn copy_target_libs(builder: &Builder<'_>, target: TargetSelection, image: &Path, stamp: &Path) {
+ let dst = image.join("lib/rustlib").join(target.triple).join("lib");
let self_contained_dst = dst.join("self-contained");
t!(fs::create_dir_all(&dst));
t!(fs::create_dir_all(&self_contained_dst));
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Std {
let target = self.target;
let name = pkgname(builder, "rust-std");
- let archive = distdir(builder).join(format!("{}-{}.tar.gz", name, target));
+ let archive = distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple));
if skip_host_target_lib(builder, compiler) {
return archive;
}
builder.ensure(compile::Std { compiler, target });
- let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, target.triple));
let _ = fs::remove_dir_all(&image);
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
let stamp = compile::libstd_stamp(builder, compiler_to_use, target);
- copy_target_libs(builder, &target, &image, &stamp);
+ copy_target_libs(builder, target, &image, &stamp);
let mut cmd = rust_installer(builder);
cmd.arg("generate")
.arg(&tmpdir(builder))
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", name, target))
- .arg(format!("--component-name=rust-std-{}", target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
+ .arg(format!("--component-name=rust-std-{}", target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo");
builder
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustcDev {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for RustcDev {
let target = self.target;
let name = pkgname(builder, "rustc-dev");
- let archive = distdir(builder).join(format!("{}-{}.tar.gz", name, target));
+ let archive = distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple));
if skip_host_target_lib(builder, compiler) {
return archive;
}
builder.ensure(compile::Rustc { compiler, target });
- let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, target.triple));
let _ = fs::remove_dir_all(&image);
let compiler_to_use = builder.compiler_for(compiler.stage, compiler.host, target);
let stamp = compile::librustc_stamp(builder, compiler_to_use, target);
- copy_target_libs(builder, &target, &image, &stamp);
+ copy_target_libs(builder, target, &image, &stamp);
let mut cmd = rust_installer(builder);
cmd.arg("generate")
.arg(&tmpdir(builder))
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", name, target))
- .arg(format!("--component-name=rustc-dev-{}", target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
+ .arg(format!("--component-name=rustc-dev-{}", target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo");
builder.info(&format!(
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Analysis {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Analysis {
let name = pkgname(builder, "rust-analysis");
if compiler.host != builder.config.build {
- return distdir(builder).join(format!("{}-{}.tar.gz", name, target));
+ return distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple));
}
builder.ensure(compile::Std { compiler, target });
- let image = tmpdir(builder).join(format!("{}-{}-image", name, target));
+ let image = tmpdir(builder).join(format!("{}-{}-image", name, target.triple));
let src = builder
.stage_out(compiler, Mode::Std)
- .join(target)
+ .join(target.triple)
.join(builder.cargo_dir())
.join("deps");
let image_src = src.join("save-analysis");
- let dst = image.join("lib/rustlib").join(target).join("analysis");
+ let dst = image.join("lib/rustlib").join(target.triple).join("analysis");
t!(fs::create_dir_all(&dst));
builder.info(&format!("image_src: {:?}, dst: {:?}", image_src, dst));
builder.cp_r(&image_src, &dst);
.arg(&tmpdir(builder))
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", name, target))
- .arg(format!("--component-name=rust-analysis-{}", target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
+ .arg(format!("--component-name=rust-analysis-{}", target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo");
builder.info("Dist analysis");
let _time = timeit(builder);
builder.run(&mut cmd);
builder.remove_dir(&image);
- distdir(builder).join(format!("{}-{}.tar.gz", name, target))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple))
}
}
// (essentially libstd and all of its path dependencies)
let std_src_dirs = [
"src/build_helper",
+ "src/backtrace",
"src/liballoc",
"src/libcore",
"src/libpanic_abort",
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Cargo {
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--component-name=cargo")
.arg("--legacy-manifest-dirs=rustlib,cargo");
builder.info(&format!("Dist cargo stage{} ({})", compiler.stage, target));
let _time = timeit(builder);
builder.run(&mut cmd);
- distdir(builder).join(format!("{}-{}.tar.gz", name, target))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple))
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rls {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Rls {
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=rls-preview");
- builder.info(&format!("Dist RLS stage{} ({})", compiler.stage, target));
+ builder.info(&format!("Dist RLS stage{} ({})", compiler.stage, target.triple));
let _time = timeit(builder);
builder.run(&mut cmd);
- Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple)))
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustAnalyzer {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for RustAnalyzer {
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=rust-analyzer-preview");
builder.info(&format!("Dist rust-analyzer stage{} ({})", compiler.stage, target));
let _time = timeit(builder);
builder.run(&mut cmd);
- distdir(builder).join(format!("{}-{}.tar.gz", name, target))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple))
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Clippy {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Clippy {
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=clippy-preview");
builder.info(&format!("Dist clippy stage{} ({})", compiler.stage, target));
let _time = timeit(builder);
builder.run(&mut cmd);
- distdir(builder).join(format!("{}-{}.tar.gz", name, target))
+ distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple))
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Miri {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Miri {
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=miri-preview");
builder.info(&format!("Dist miri stage{} ({})", compiler.stage, target));
let _time = timeit(builder);
builder.run(&mut cmd);
- Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple)))
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustfmt {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Rustfmt {
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=rustfmt-preview");
builder.info(&format!("Dist Rustfmt stage{} ({})", compiler.stage, target));
let _time = timeit(builder);
builder.run(&mut cmd);
- Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple)))
}
}
#[derive(Debug, PartialOrd, Ord, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Extended {
stage: u32,
- host: Interned<String>,
- target: Interned<String>,
+ host: TargetSelection,
+ target: TargetSelection,
}
impl Step for Extended {
.arg(&work)
.arg("--output-dir")
.arg(&distdir(builder))
- .arg(format!("--package-name={}-{}", pkgname(builder, "rust"), target))
+ .arg(format!("--package-name={}-{}", pkgname(builder, "rust"), target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--input-tarballs")
.arg(input_tarballs)
let prepare = |name: &str| {
builder.create_dir(&pkg.join(name));
builder.cp_r(
- &work.join(&format!("{}-{}", pkgname(builder, name), target)),
+ &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)),
&pkg.join(name),
);
builder.install(&etc.join("pkg/postinstall"), &pkg.join(name), 0o755);
.arg(xform(&etc.join("pkg/Distribution.xml")))
.arg("--resources")
.arg(pkg.join("res"))
- .arg(distdir(builder).join(format!("{}-{}.pkg", pkgname(builder, "rust"), target)))
+ .arg(distdir(builder).join(format!(
+ "{}-{}.pkg",
+ pkgname(builder, "rust"),
+ target.triple
+ )))
.arg("--package-path")
.arg(&pkg);
let _time = timeit(builder);
let prepare = |name: &str| {
builder.create_dir(&exe.join(name));
let dir = if name == "rust-std" || name == "rust-analysis" {
- format!("{}-{}", name, target)
+ format!("{}-{}", name, target.triple)
} else if name == "rls" {
"rls-preview".to_string()
} else if name == "rust-analyzer" {
name.to_string()
};
builder.cp_r(
- &work.join(&format!("{}-{}", pkgname(builder, name), target)).join(dir),
+ &work.join(&format!("{}-{}", pkgname(builder, name), target.triple)).join(dir),
&exe.join(name),
);
builder.remove(&exe.join(name).join("manifest.in"));
builder.install(&etc.join("gfx/dialogbg.bmp"), &exe, 0o644);
builder.info(&format!("building `msi` installer with {:?}", light));
- let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target);
+ let filename = format!("{}-{}.msi", pkgname(builder, "rust"), target.triple);
let mut cmd = Command::new(&light);
cmd.arg("-nologo")
.arg("-ext")
}
}
-fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: Interned<String>) {
+fn add_env(builder: &Builder<'_>, cmd: &mut Command, target: TargetSelection) {
let mut parts = channel::CFG_RELEASE_NUM.split('.');
cmd.env("CFG_RELEASE_INFO", builder.rust_version())
.env("CFG_RELEASE_NUM", channel::CFG_RELEASE_NUM)
.env("CFG_VER_BUILD", "0") // just needed to build
.env("CFG_PACKAGE_VERS", builder.rust_package_vers())
.env("CFG_PACKAGE_NAME", pkgname(builder, "rust"))
- .env("CFG_BUILD", target)
+ .env("CFG_BUILD", target.triple)
.env("CFG_CHANNEL", &builder.config.channel);
if target.contains("windows-gnu") {
///
/// Note: This function does not yet support Windows, but we also don't support
/// linking LLVM tools dynamically on Windows yet.
-fn maybe_install_llvm(builder: &Builder<'_>, target: Interned<String>, dst_libdir: &Path) {
+fn maybe_install_llvm(builder: &Builder<'_>, target: TargetSelection, dst_libdir: &Path) {
let src_libdir = builder.llvm_out(target).join("lib");
if target.contains("apple-darwin") {
}
/// Maybe add libLLVM.so to the target lib-dir for linking.
-pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: Interned<String>, sysroot: &Path) {
- let dst_libdir = sysroot.join("lib/rustlib").join(&*target).join("lib");
+pub fn maybe_install_llvm_target(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) {
+ let dst_libdir = sysroot.join("lib/rustlib").join(&*target.triple).join("lib");
maybe_install_llvm(builder, target, &dst_libdir);
}
/// Maybe add libLLVM.so to the runtime lib-dir for rustc itself.
-pub fn maybe_install_llvm_runtime(builder: &Builder<'_>, target: Interned<String>, sysroot: &Path) {
+pub fn maybe_install_llvm_runtime(builder: &Builder<'_>, target: TargetSelection, sysroot: &Path) {
let dst_libdir =
sysroot.join(builder.sysroot_libdir_relative(Compiler { stage: 1, host: target }));
maybe_install_llvm(builder, target, &dst_libdir);
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct LlvmTools {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for LlvmTools {
// Prepare the image directory
let src_bindir = builder.llvm_out(target).join("bin");
- let dst_bindir = image.join("lib/rustlib").join(&*target).join("bin");
+ let dst_bindir = image.join("lib/rustlib").join(&*target.triple).join("bin");
t!(fs::create_dir_all(&dst_bindir));
for tool in LLVM_TOOLS {
- let exe = src_bindir.join(exe(tool, &target));
+ let exe = src_bindir.join(exe(tool, target));
builder.install(&exe, &dst_bindir, 0o755);
}
.arg(&distdir(builder))
.arg("--non-installed-overlay")
.arg(&overlay)
- .arg(format!("--package-name={}-{}", name, target))
+ .arg(format!("--package-name={}-{}", name, target.triple))
.arg("--legacy-manifest-dirs=rustlib,cargo")
.arg("--component-name=llvm-tools-preview");
builder.run(&mut cmd);
- Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target)))
+ Some(distdir(builder).join(format!("{}-{}.tar.gz", name, target.triple)))
}
}
use crate::builder::{Builder, Compiler, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
use crate::compile;
-use crate::config::Config;
+use crate::config::{Config, TargetSelection};
use crate::tool::{self, prepare_tool_cargo, SourceType, Tool};
use crate::util::symlink_dir;
$(
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct $name {
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for $name {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct UnstableBook {
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for UnstableBook {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
struct RustbookSrc {
- target: Interned<String>,
+ target: TargetSelection,
name: Interned<String>,
src: Interned<PathBuf>,
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct TheBook {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for TheBook {
fn invoke_rustdoc(
builder: &Builder<'_>,
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
markdown: &str,
) {
let out = builder.doc_out(target);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Standalone {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for Standalone {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Std {
pub stage: u32,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Std {
let compiler = builder.compiler(stage, builder.config.build);
builder.ensure(compile::Std { compiler, target });
- let out_dir = builder.stage_out(compiler, Mode::Std).join(target).join("doc");
+ let out_dir = builder.stage_out(compiler, Mode::Std).join(target.triple).join("doc");
- // Here what we're doing is creating a *symlink* (directory junction on
- // Windows) to the final output location. This is not done as an
- // optimization but rather for correctness. We've got three trees of
- // documentation, one for std, one for test, and one for rustc. It's then
- // our job to merge them all together.
- //
- // Unfortunately rustbuild doesn't know nearly as well how to merge doc
- // trees as rustdoc does itself, so instead of actually having three
- // separate trees we just have rustdoc output to the same location across
- // all of them.
- //
- // This way rustdoc generates output directly into the output, and rustdoc
- // will also directly handle merging.
- let my_out = builder.crate_doc_out(target);
- t!(symlink_dir_force(&builder.config, &my_out, &out_dir));
t!(fs::copy(builder.src.join("src/doc/rust.css"), out.join("rust.css")));
let run_cargo_rustdoc_for = |package: &str| {
builder.cargo(compiler, Mode::Std, SourceType::InTree, target, "rustdoc");
compile::std_cargo(builder, target, compiler.stage, &mut cargo);
- cargo.arg("-p").arg(package);
- // Create all crate output directories first to make sure rustdoc uses
- // relative links.
- // FIXME: Cargo should probably do this itself.
- t!(fs::create_dir_all(out_dir.join(package)));
cargo
+ .arg("-p")
+ .arg(package)
.arg("--")
.arg("--markdown-css")
.arg("rust.css")
// folder structure, that would also build internal crates that we do
// not want to show in documentation. These crates will later be visited
// by the rustc step, so internal documentation will show them.
- let krates = ["alloc", "core", "std", "proc_macro", "test"];
+ //
+ // Note that the order here is important! The crates need to be
+ // processed starting from the leaves, otherwise rustdoc will not
+ // create correct links between crates because rustdoc depends on the
+ // existence of the output directories to know if it should be a local
+ // or remote link.
+ let krates = ["core", "alloc", "std", "proc_macro", "test"];
for krate in &krates {
run_cargo_rustdoc_for(krate);
}
- builder.cp_r(&my_out, &out);
+ builder.cp_r(&out_dir, &out);
// Look for src/libstd, src/libcore etc in the `x.py doc` arguments and
// open the corresponding rendered docs.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustc {
stage: u32,
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for Rustc {
// Build rustc.
builder.ensure(compile::Rustc { compiler, target });
- // We do not symlink to the same shared folder that already contains std library
- // documentation from previous steps as we do not want to include that.
- let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target).join("doc");
+ // This uses a shared directory so that librustdoc documentation gets
+ // correctly built and merged with the rustc documentation. This is
+ // needed because rustdoc is built in a different directory from
+ // rustc. rustdoc needs to be able to see everything, for example when
+ // merging the search index, or generating local (relative) links.
+ let out_dir = builder.stage_out(compiler, Mode::Rustc).join(target.triple).join("doc");
t!(symlink_dir_force(&builder.config, &out, &out_dir));
// Build cargo command.
let mut cargo = builder.cargo(compiler, Mode::Rustc, SourceType::InTree, target, "doc");
- cargo.env(
- "RUSTDOCFLAGS",
- "--document-private-items \
- --enable-index-page -Zunstable-options",
- );
+ cargo.rustdocflag("--document-private-items");
+ cargo.rustdocflag("--enable-index-page");
+ cargo.rustdocflag("-Zunstable-options");
compile::rustc_cargo(builder, &mut cargo, target);
// Only include compiler crates, no dependencies of those, such as `libc`.
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Rustdoc {
stage: u32,
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for Rustdoc {
builder.ensure(tool::Rustdoc { compiler });
// Symlink compiler docs to the output directory of rustdoc documentation.
- let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target).join("doc");
+ let out_dir = builder.stage_out(compiler, Mode::ToolRustc).join(target.triple).join("doc");
t!(fs::create_dir_all(&out_dir));
t!(symlink_dir_force(&builder.config, &out, &out_dir));
cargo.arg("--no-deps");
cargo.arg("-p").arg("rustdoc");
- cargo.env("RUSTDOCFLAGS", "--document-private-items");
+ cargo.rustdocflag("--document-private-items");
builder.run(&mut cargo.into());
}
}
#[derive(Ord, PartialOrd, Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct ErrorIndex {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for ErrorIndex {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct UnstableBookGen {
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for UnstableBookGen {
use getopts::Options;
use crate::builder::Builder;
-use crate::cache::{Interned, INTERNER};
-use crate::config::Config;
+use crate::config::{Config, TargetSelection};
use crate::{Build, DocTests};
/// Deserialized version of all flags for this compile.
pub stage: Option<u32>,
pub keep_stage: Vec<u32>,
- pub host: Vec<Interned<String>>,
- pub target: Vec<Interned<String>>,
+ pub host: Vec<TargetSelection>,
+ pub target: Vec<TargetSelection>,
pub config: Option<PathBuf>,
pub jobs: Option<u32>,
pub cmd: Subcommand,
.collect(),
host: split(&matches.opt_strs("host"))
.into_iter()
- .map(|x| INTERNER.intern_string(x))
+ .map(|x| TargetSelection::from_user(&x))
.collect::<Vec<_>>(),
target: split(&matches.opt_strs("target"))
.into_iter()
- .map(|x| INTERNER.intern_string(x))
+ .map(|x| TargetSelection::from_user(&x))
.collect::<Vec<_>>(),
config: cfg_file,
jobs: matches.opt_str("jobs").map(|j| j.parse().expect("`jobs` should be a number")),
use crate::Compiler;
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
-use crate::cache::Interned;
-use crate::config::Config;
+use crate::config::{Config, TargetSelection};
-pub fn install_docs(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_docs(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "docs", "rust-docs", stage, Some(host));
}
-pub fn install_std(builder: &Builder<'_>, stage: u32, target: Interned<String>) {
+pub fn install_std(builder: &Builder<'_>, stage: u32, target: TargetSelection) {
install_sh(builder, "std", "rust-std", stage, Some(target));
}
-pub fn install_cargo(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_cargo(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "cargo", "cargo", stage, Some(host));
}
-pub fn install_rls(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_rls(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "rls", "rls", stage, Some(host));
}
-pub fn install_rust_analyzer(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+
+pub fn install_rust_analyzer(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "rust-analyzer", "rust-analyzer", stage, Some(host));
}
-pub fn install_clippy(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+
+pub fn install_clippy(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "clippy", "clippy", stage, Some(host));
}
-pub fn install_miri(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_miri(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "miri", "miri", stage, Some(host));
}
-pub fn install_rustfmt(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_rustfmt(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "rustfmt", "rustfmt", stage, Some(host));
}
-pub fn install_analysis(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_analysis(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "analysis", "rust-analysis", stage, Some(host));
}
pub fn install_src(builder: &Builder<'_>, stage: u32) {
install_sh(builder, "src", "rust-src", stage, None);
}
-pub fn install_rustc(builder: &Builder<'_>, stage: u32, host: Interned<String>) {
+pub fn install_rustc(builder: &Builder<'_>, stage: u32, host: TargetSelection) {
install_sh(builder, "rustc", "rustc", stage, Some(host));
}
package: &str,
name: &str,
stage: u32,
- host: Option<Interned<String>>,
+ host: Option<TargetSelection>,
) {
builder.info(&format!("Install {} stage{} ({:?})", package, stage, host));
t!(fs::create_dir_all(&empty_dir));
let package_name = if let Some(host) = host {
- format!("{}-{}", pkgname(builder, name), host)
+ format!("{}-{}", pkgname(builder, name), host.triple)
} else {
pkgname(builder, name)
};
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl $name {
config.extended && config.tools.as_ref()
.map_or(true, |t| t.contains($path))
}
-
- #[allow(dead_code)]
- fn should_install(builder: &Builder<'_>) -> bool {
- builder.config.tools.as_ref().map_or(false, |t| t.contains($path))
- }
}
impl Step for $name {
install_cargo(builder, self.compiler.stage, self.target);
};
Rls, "rls", Self::should_build(_config), only_hosts: true, {
- if builder.ensure(dist::Rls { compiler: self.compiler, target: self.target }).is_some() ||
- Self::should_install(builder) {
+ if builder.ensure(dist::Rls { compiler: self.compiler, target: self.target }).is_some() {
install_rls(builder, self.compiler.stage, self.target);
} else {
builder.info(
};
RustAnalyzer, "rust-analyzer", Self::should_build(_config), only_hosts: true, {
builder.ensure(dist::RustAnalyzer { compiler: self.compiler, target: self.target });
- if Self::should_install(builder) {
- install_rust_analyzer(builder, self.compiler.stage, self.target);
- } else {
- builder.info(
- &format!("skipping Install rust-analyzer stage{} ({})", self.compiler.stage, self.target),
- );
- }
+ install_rust_analyzer(builder, self.compiler.stage, self.target);
};
Clippy, "clippy", Self::should_build(_config), only_hosts: true, {
builder.ensure(dist::Clippy { compiler: self.compiler, target: self.target });
- if Self::should_install(builder) {
- install_clippy(builder, self.compiler.stage, self.target);
- } else {
- builder.info(
- &format!("skipping Install clippy stage{} ({})", self.compiler.stage, self.target),
- );
- }
+ install_clippy(builder, self.compiler.stage, self.target);
};
Miri, "miri", Self::should_build(_config), only_hosts: true, {
- if builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }).is_some() ||
- Self::should_install(builder) {
+ if builder.ensure(dist::Miri { compiler: self.compiler, target: self.target }).is_some() {
install_miri(builder, self.compiler.stage, self.target);
} else {
builder.info(
if builder.ensure(dist::Rustfmt {
compiler: self.compiler,
target: self.target
- }).is_some() || Self::should_install(builder) {
+ }).is_some() {
install_rustfmt(builder, self.compiler.stage, self.target);
} else {
builder.info(
use build_helper::{mtime, output, run, run_suppressed, t, try_run, try_run_suppressed};
use filetime::FileTime;
+use crate::config::TargetSelection;
use crate::util::{exe, libdir, CiEnv};
mod builder;
#[derive(Eq, PartialOrd, Ord, PartialEq, Clone, Copy, Hash, Debug)]
pub struct Compiler {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
#[derive(PartialEq, Eq, Copy, Clone, Debug)]
verbosity: usize,
// Targets for which to build
- build: Interned<String>,
- hosts: Vec<Interned<String>>,
- targets: Vec<Interned<String>>,
+ build: TargetSelection,
+ hosts: Vec<TargetSelection>,
+ targets: Vec<TargetSelection>,
// Stage 0 (downloaded) compiler, lld and cargo or their local rust equivalents
initial_rustc: PathBuf,
// Runtime state filled in later on
// C/C++ compilers and archiver for all targets
- cc: HashMap<Interned<String>, cc::Tool>,
- cxx: HashMap<Interned<String>, cc::Tool>,
- ar: HashMap<Interned<String>, PathBuf>,
- ranlib: HashMap<Interned<String>, PathBuf>,
+ cc: HashMap<TargetSelection, cc::Tool>,
+ cxx: HashMap<TargetSelection, cc::Tool>,
+ ar: HashMap<TargetSelection, PathBuf>,
+ ranlib: HashMap<TargetSelection, PathBuf>,
// Miscellaneous
crates: HashMap<Interned<String>, Crate>,
is_sudo: bool,
delayed_failures: RefCell<Vec<String>>,
prerelease_version: Cell<Option<u32>>,
tool_artifacts:
- RefCell<HashMap<Interned<String>, HashMap<String, (&'static str, PathBuf, Vec<String>)>>>,
+ RefCell<HashMap<TargetSelection, HashMap<String, (&'static str, PathBuf, Vec<String>)>>>,
}
#[derive(Debug)]
output(
Command::new(&config.initial_rustc)
.arg("--target")
- .arg(config.build)
+ .arg(config.build.rustc_target_arg())
.arg("--print")
.arg("target-libdir"),
)
}
pub fn build_triple(&self) -> &[Interned<String>] {
- unsafe { slice::from_raw_parts(&self.build, 1) }
+ slice::from_ref(&self.build.triple)
}
/// Executes the entire build, as configured by the flags and configuration.
}
fn tools_dir(&self, compiler: Compiler) -> PathBuf {
- let out = self.out.join(&*compiler.host).join(format!("stage{}-tools-bin", compiler.stage));
+ let out = self
+ .out
+ .join(&*compiler.host.triple)
+ .join(format!("stage{}-tools-bin", compiler.stage));
t!(fs::create_dir_all(&out));
out
}
Mode::ToolBootstrap => "-bootstrap-tools",
Mode::ToolStd | Mode::ToolRustc => "-tools",
};
- self.out.join(&*compiler.host).join(format!("stage{}{}", compiler.stage, suffix))
+ self.out.join(&*compiler.host.triple).join(format!("stage{}{}", compiler.stage, suffix))
}
/// Returns the root output directory for all Cargo output in a given stage,
/// running a particular compiler, whether or not we're building the
/// standard library, and targeting the specified architecture.
- fn cargo_out(&self, compiler: Compiler, mode: Mode, target: Interned<String>) -> PathBuf {
- self.stage_out(compiler, mode).join(&*target).join(self.cargo_dir())
+ fn cargo_out(&self, compiler: Compiler, mode: Mode, target: TargetSelection) -> PathBuf {
+ self.stage_out(compiler, mode).join(&*target.triple).join(self.cargo_dir())
}
/// Root output directory for LLVM compiled for `target`
///
/// Note that if LLVM is configured externally then the directory returned
/// will likely be empty.
- fn llvm_out(&self, target: Interned<String>) -> PathBuf {
- self.out.join(&*target).join("llvm")
+ fn llvm_out(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("llvm")
}
- fn lld_out(&self, target: Interned<String>) -> PathBuf {
- self.out.join(&*target).join("lld")
+ fn lld_out(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("lld")
}
/// Output directory for all documentation for a target
- fn doc_out(&self, target: Interned<String>) -> PathBuf {
- self.out.join(&*target).join("doc")
+ fn doc_out(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("doc")
}
/// Output directory for all documentation for a target
- fn compiler_doc_out(&self, target: Interned<String>) -> PathBuf {
- self.out.join(&*target).join("compiler-doc")
+ fn compiler_doc_out(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("compiler-doc")
}
/// Output directory for some generated md crate documentation for a target (temporary)
- fn md_doc_out(&self, target: Interned<String>) -> Interned<PathBuf> {
- INTERNER.intern_path(self.out.join(&*target).join("md-doc"))
- }
-
- /// Output directory for all crate documentation for a target (temporary)
- ///
- /// The artifacts here are then copied into `doc_out` above.
- fn crate_doc_out(&self, target: Interned<String>) -> PathBuf {
- self.out.join(&*target).join("crate-docs")
+ fn md_doc_out(&self, target: TargetSelection) -> Interned<PathBuf> {
+ INTERNER.intern_path(self.out.join(&*target.triple).join("md-doc"))
}
/// Returns `true` if no custom `llvm-config` is set for the specified target.
///
/// If no custom `llvm-config` was specified then Rust's llvm will be used.
- fn is_rust_llvm(&self, target: Interned<String>) -> bool {
+ fn is_rust_llvm(&self, target: TargetSelection) -> bool {
match self.config.target_config.get(&target) {
Some(ref c) => c.llvm_config.is_none(),
None => true,
}
/// Returns the path to `FileCheck` binary for the specified target
- fn llvm_filecheck(&self, target: Interned<String>) -> PathBuf {
+ fn llvm_filecheck(&self, target: TargetSelection) -> PathBuf {
let target_config = self.config.target_config.get(&target);
if let Some(s) = target_config.and_then(|c| c.llvm_filecheck.as_ref()) {
s.to_path_buf()
} else if let Some(s) = target_config.and_then(|c| c.llvm_config.as_ref()) {
let llvm_bindir = output(Command::new(s).arg("--bindir"));
- let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", &*target));
+ let filecheck = Path::new(llvm_bindir.trim()).join(exe("FileCheck", target));
if filecheck.exists() {
filecheck
} else {
// llvm subdirectory of the libdir.
let llvm_libdir = output(Command::new(s).arg("--libdir"));
let lib_filecheck =
- Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", &*target));
+ Path::new(llvm_libdir.trim()).join("llvm").join(exe("FileCheck", target));
if lib_filecheck.exists() {
lib_filecheck
} else {
} else {
base
};
- base.join("bin").join(exe("FileCheck", &*target))
+ base.join("bin").join(exe("FileCheck", target))
}
}
/// Directory for libraries built from C/C++ code and shared between stages.
- fn native_dir(&self, target: Interned<String>) -> PathBuf {
- self.out.join(&*target).join("native")
+ fn native_dir(&self, target: TargetSelection) -> PathBuf {
+ self.out.join(&*target.triple).join("native")
}
/// Root output directory for rust_test_helpers library compiled for
/// `target`
- fn test_helpers_out(&self, target: Interned<String>) -> PathBuf {
+ fn test_helpers_out(&self, target: TargetSelection) -> PathBuf {
self.native_dir(target).join("rust-test-helpers")
}
/// Returns the libdir of the snapshot compiler.
fn rustc_snapshot_libdir(&self) -> PathBuf {
- self.rustc_snapshot_sysroot().join(libdir(&self.config.build))
+ self.rustc_snapshot_sysroot().join(libdir(self.config.build))
}
/// Returns the sysroot of the snapshot compiler.
}
/// Returns the path to the C compiler for the target specified.
- fn cc(&self, target: Interned<String>) -> &Path {
+ fn cc(&self, target: TargetSelection) -> &Path {
self.cc[&target].path()
}
/// Returns a list of flags to pass to the C compiler for the target
/// specified.
- fn cflags(&self, target: Interned<String>, which: GitRepo) -> Vec<String> {
+ fn cflags(&self, target: TargetSelection, which: GitRepo) -> Vec<String> {
// Filter out -O and /O (the optimization flags) that we picked up from
// cc-rs because the build scripts will determine that for themselves.
let mut base = self.cc[&target]
// Work around an apparently bad MinGW / GCC optimization,
// See: http://lists.llvm.org/pipermail/cfe-dev/2016-December/051980.html
// See: https://gcc.gnu.org/bugzilla/show_bug.cgi?id=78936
- if &*target == "i686-pc-windows-gnu" {
+ if &*target.triple == "i686-pc-windows-gnu" {
base.push("-fno-omit-frame-pointer".into());
}
}
/// Returns the path to the `ar` archive utility for the target specified.
- fn ar(&self, target: Interned<String>) -> Option<&Path> {
+ fn ar(&self, target: TargetSelection) -> Option<&Path> {
self.ar.get(&target).map(|p| &**p)
}
/// Returns the path to the `ranlib` utility for the target specified.
- fn ranlib(&self, target: Interned<String>) -> Option<&Path> {
+ fn ranlib(&self, target: TargetSelection) -> Option<&Path> {
self.ranlib.get(&target).map(|p| &**p)
}
/// Returns the path to the C++ compiler for the target specified.
- fn cxx(&self, target: Interned<String>) -> Result<&Path, String> {
+ fn cxx(&self, target: TargetSelection) -> Result<&Path, String> {
match self.cxx.get(&target) {
Some(p) => Ok(p.path()),
None => {
}
/// Returns the path to the linker for the given target if it needs to be overridden.
- fn linker(&self, target: Interned<String>, can_use_lld: bool) -> Option<&Path> {
+ fn linker(&self, target: TargetSelection, can_use_lld: bool) -> Option<&Path> {
if let Some(linker) = self.config.target_config.get(&target).and_then(|c| c.linker.as_ref())
{
Some(linker)
} else if target != self.config.build
- && util::use_host_linker(&target)
+ && util::use_host_linker(target)
&& !target.contains("msvc")
{
Some(self.cc(target))
}
/// Returns if this target should statically link the C runtime, if specified
- fn crt_static(&self, target: Interned<String>) -> Option<bool> {
+ fn crt_static(&self, target: TargetSelection) -> Option<bool> {
if target.contains("pc-windows-msvc") {
Some(true)
} else {
}
/// Returns the "musl root" for this `target`, if defined
- fn musl_root(&self, target: Interned<String>) -> Option<&Path> {
+ fn musl_root(&self, target: TargetSelection) -> Option<&Path> {
self.config
.target_config
.get(&target)
}
/// Returns the "musl libdir" for this `target`.
- fn musl_libdir(&self, target: Interned<String>) -> Option<PathBuf> {
+ fn musl_libdir(&self, target: TargetSelection) -> Option<PathBuf> {
let t = self.config.target_config.get(&target)?;
if let libdir @ Some(_) = &t.musl_libdir {
return libdir.clone();
}
/// Returns the sysroot for the wasi target, if defined
- fn wasi_root(&self, target: Interned<String>) -> Option<&Path> {
+ fn wasi_root(&self, target: TargetSelection) -> Option<&Path> {
self.config.target_config.get(&target).and_then(|t| t.wasi_root.as_ref()).map(|p| &**p)
}
/// Returns `true` if this is a no-std `target`, if defined
- fn no_std(&self, target: Interned<String>) -> Option<bool> {
+ fn no_std(&self, target: TargetSelection) -> Option<bool> {
self.config.target_config.get(&target).map(|t| t.no_std)
}
/// Returns `true` if the target will be tested using the `remote-test-client`
/// and `remote-test-server` binaries.
- fn remote_tested(&self, target: Interned<String>) -> bool {
+ fn remote_tested(&self, target: TargetSelection) -> bool {
self.qemu_rootfs(target).is_some()
|| target.contains("android")
|| env::var_os("TEST_DEVICE_ADDR").is_some()
///
/// If `Some` is returned then that means that tests for this target are
/// emulated with QEMU and binaries will need to be shipped to the emulator.
- fn qemu_rootfs(&self, target: Interned<String>) -> Option<&Path> {
+ fn qemu_rootfs(&self, target: TargetSelection) -> Option<&Path> {
self.config.target_config.get(&target).and_then(|t| t.qemu_rootfs.as_ref()).map(|p| &**p)
}
///
/// When all of these conditions are met the build will lift artifacts from
/// the previous stage forward.
- fn force_use_stage1(&self, compiler: Compiler, target: Interned<String>) -> bool {
+ fn force_use_stage1(&self, compiler: Compiler, target: TargetSelection) -> bool {
!self.config.full_bootstrap
&& compiler.stage >= 2
&& (self.hosts.iter().any(|h| *h == target) || target == self.build)
self.rust_version()
}
- fn llvm_link_tools_dynamically(&self, target: Interned<String>) -> bool {
+ fn llvm_link_tools_dynamically(&self, target: TargetSelection) -> bool {
target.contains("linux-gnu") || target.contains("apple-darwin")
}
use build_helper::{output, t};
use crate::builder::{Builder, RunConfig, ShouldRun, Step};
-use crate::cache::Interned;
use crate::channel;
+use crate::config::TargetSelection;
use crate::util::{self, exe};
use crate::GitRepo;
use build_helper::up_to_date;
// if not).
pub fn prebuilt_llvm_config(
builder: &Builder<'_>,
- target: Interned<String>,
+ target: TargetSelection,
) -> Result<PathBuf, Meta> {
// If we're using a custom LLVM bail out here, but we can only use a
// custom LLVM for the build triple.
let root = "src/llvm-project/llvm";
let out_dir = builder.llvm_out(target);
+
let mut llvm_config_ret_dir = builder.llvm_out(builder.config.build);
if !builder.config.build.contains("msvc") || builder.config.ninja {
llvm_config_ret_dir.push("build");
}
llvm_config_ret_dir.push("bin");
- let build_llvm_config = llvm_config_ret_dir.join(exe("llvm-config", &*builder.config.build));
+ let build_llvm_config = llvm_config_ret_dir.join(exe("llvm-config", builder.config.build));
let stamp = out_dir.join("llvm-finished-building");
let stamp = HashStamp::new(stamp, builder.in_tree_llvm_info.sha());
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Llvm {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Llvm {
/// Compile LLVM for `target`.
fn run(self, builder: &Builder<'_>) -> PathBuf {
let target = self.target;
+ let target_native = if self.target.starts_with("riscv") {
+ // RISC-V target triples in Rust is not named the same as C compiler target triples.
+ // This converts Rust RISC-V target triples to C compiler triples.
+ let idx = target.triple.find('-').unwrap();
+
+ format!("riscv{}{}", &target.triple[5..7], &target.triple[idx..])
+ } else {
+ target.to_string()
+ };
let Meta { stamp, build_llvm_config, out_dir, root } =
match prebuilt_llvm_config(builder, target) {
.define("LLVM_ENABLE_BINDINGS", "OFF")
.define("LLVM_ENABLE_Z3_SOLVER", "OFF")
.define("LLVM_PARALLEL_COMPILE_JOBS", builder.jobs().to_string())
- .define("LLVM_TARGET_ARCH", target.split('-').next().unwrap())
- .define("LLVM_DEFAULT_TARGET_TRIPLE", target);
+ .define("LLVM_TARGET_ARCH", target_native.split('-').next().unwrap())
+ .define("LLVM_DEFAULT_TARGET_TRIPLE", target_native);
if !target.contains("netbsd") {
cfg.define("LLVM_ENABLE_ZLIB", "ON");
}
}
+ if target.starts_with("riscv") {
+ // In RISC-V, using C++ atomics require linking to `libatomic` but the LLVM build
+ // system check cannot detect this. Therefore it is set manually here.
+ if !builder.config.llvm_tools_enabled {
+ cfg.define("CMAKE_EXE_LINKER_FLAGS", "-latomic");
+ } else {
+ cfg.define("CMAKE_EXE_LINKER_FLAGS", "-latomic -static-libstdc++");
+ }
+ cfg.define("CMAKE_SHARED_LINKER_FLAGS", "-latomic");
+ }
+
if target.contains("msvc") {
cfg.define("LLVM_USE_CRT_DEBUG", "MT");
cfg.define("LLVM_USE_CRT_RELEASE", "MT");
fn configure_cmake(
builder: &Builder<'_>,
- target: Interned<String>,
+ target: TargetSelection,
cfg: &mut cmake::Config,
use_compiler_launcher: bool,
) {
if builder.config.ninja {
cfg.generator("Ninja");
}
- cfg.target(&target).host(&builder.config.build);
+ cfg.target(&target.triple).host(&builder.config.build.triple);
let sanitize_cc = |cc: &Path| {
if target.contains("msvc") {
cfg.define("CMAKE_C_COMPILER", sanitize_cc(&wrap_cc))
.define("CMAKE_CXX_COMPILER", sanitize_cc(&wrap_cc));
cfg.env("SCCACHE_PATH", builder.config.ccache.as_ref().unwrap())
- .env("SCCACHE_TARGET", target)
+ .env("SCCACHE_TARGET", target.triple)
.env("SCCACHE_CC", &cc)
.env("SCCACHE_CXX", &cxx);
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Lld {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Lld {
// brittle and will break over time. If anyone knows better how to
// cross-compile LLD it would be much appreciated to fix this!
if target != builder.config.build {
- cfg.env("LLVM_CONFIG_SHIM_REPLACE", &builder.config.build)
- .env("LLVM_CONFIG_SHIM_REPLACE_WITH", &target)
+ cfg.env("LLVM_CONFIG_SHIM_REPLACE", &builder.config.build.triple)
+ .env("LLVM_CONFIG_SHIM_REPLACE_WITH", &target.triple)
.define(
"LLVM_TABLEGEN_EXE",
llvm_config.with_file_name("llvm-tblgen").with_extension(EXE_EXTENSION),
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct TestHelpers {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for TestHelpers {
cfg.cargo_metadata(false)
.out_dir(&dst)
- .target(&target)
- .host(&builder.config.build)
+ .target(&target.triple)
+ .host(&builder.config.build.triple)
.opt_level(0)
.warnings(false)
.debug(false)
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Sanitizers {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Sanitizers {
let mut cfg = cmake::Config::new(&compiler_rt_dir);
cfg.profile("Release");
- cfg.define("CMAKE_C_COMPILER_TARGET", self.target);
+ cfg.define("CMAKE_C_COMPILER_TARGET", self.target.triple);
cfg.define("COMPILER_RT_BUILD_BUILTINS", "OFF");
cfg.define("COMPILER_RT_BUILD_CRT", "OFF");
cfg.define("COMPILER_RT_BUILD_LIBFUZZER", "OFF");
/// Returns sanitizers available on a given target.
fn supported_sanitizers(
out_dir: &Path,
- target: Interned<String>,
+ target: TargetSelection,
channel: &str,
) -> Vec<SanitizerRuntime> {
let darwin_libs = |os: &str, components: &[&str]| -> Vec<SanitizerRuntime> {
.collect()
};
- match &*target {
+ match &*target.triple {
"aarch64-fuchsia" => common_libs("fuchsia", "aarch64", &["asan"]),
"aarch64-unknown-linux-gnu" => {
common_libs("linux", "aarch64", &["asan", "lsan", "msan", "tsan"])
panic!("the iOS target is only supported on macOS");
}
- build.config.target_config.entry(target.clone()).or_insert(Target::from_triple(target));
+ build
+ .config
+ .target_config
+ .entry(target.clone())
+ .or_insert(Target::from_triple(&target.triple));
if target.contains("-none-") || target.contains("nvptx") {
if build.no_std(*target) == Some(false) {
use crate::builder::{Builder, Compiler, Kind, RunConfig, ShouldRun, Step};
use crate::cache::{Interned, INTERNER};
use crate::compile;
+use crate::config::TargetSelection;
use crate::dist;
use crate::flags::Subcommand;
use crate::native;
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Linkcheck {
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Linkcheck {
let _time = util::timeit(&builder);
try_run(
builder,
- builder.tool_cmd(Tool::Linkchecker).arg(builder.out.join(host).join("doc")),
+ builder.tool_cmd(Tool::Linkchecker).arg(builder.out.join(host.triple).join("doc")),
);
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Cargotest {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Cargotest {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Cargo {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Cargo {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rls {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Rls {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Rustfmt {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Rustfmt {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Miri {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Miri {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CompiletestTest {
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for CompiletestTest {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct Clippy {
stage: u32,
- host: Interned<String>,
+ host: TargetSelection,
}
impl Step for Clippy {
cargo.env("RUSTC_TEST_SUITE", builder.rustc(compiler));
cargo.env("RUSTC_LIB_PATH", builder.rustc_libdir(compiler));
let host_libs = builder.stage_out(compiler, Mode::ToolRustc).join(builder.cargo_dir());
- let target_libs =
- builder.stage_out(compiler, Mode::ToolRustc).join(&self.host).join(builder.cargo_dir());
+ let target_libs = builder
+ .stage_out(compiler, Mode::ToolRustc)
+ .join(&self.host.triple)
+ .join(builder.cargo_dir());
cargo.env("HOST_LIBS", host_libs);
cargo.env("TARGET_LIBS", target_libs);
// clippy tests need to find the driver
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustdocJSStd {
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for RustdocJSStd {
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustdocJSNotStd {
- pub host: Interned<String>,
- pub target: Interned<String>,
+ pub host: TargetSelection,
+ pub target: TargetSelection,
pub compiler: Compiler,
}
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RustdocUi {
- pub host: Interned<String>,
- pub target: Interned<String>,
+ pub host: TargetSelection,
+ pub target: TargetSelection,
pub compiler: Compiler,
}
}
}
-fn testdir(builder: &Builder<'_>, host: Interned<String>) -> PathBuf {
- builder.out.join(host).join("test")
+fn testdir(builder: &Builder<'_>, host: TargetSelection) -> PathBuf {
+ builder.out.join(host.triple).join("test")
}
macro_rules! default_test {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct $name {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for $name {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
struct Compiletest {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
mode: &'static str,
suite: &'static str,
path: &'static str,
cmd.arg("--rustdoc-path").arg(builder.rustdoc(compiler));
}
+ if mode == "run-make" && suite.ends_with("fulldeps") {
+ cmd.arg("--rust-demangler-path").arg(builder.tool_exe(Tool::RustDemangler));
+ }
+
cmd.arg("--src-base").arg(builder.src.join("src/test").join(suite));
cmd.arg("--build-base").arg(testdir(builder, compiler.host).join(suite));
cmd.arg("--stage-id").arg(format!("stage{}-{}", compiler.stage, target));
cmd.arg("--mode").arg(mode);
- cmd.arg("--target").arg(target);
- cmd.arg("--host").arg(&*compiler.host);
+ cmd.arg("--target").arg(target.rustc_target_arg());
+ cmd.arg("--host").arg(&*compiler.host.triple);
cmd.arg("--llvm-filecheck").arg(builder.llvm_filecheck(builder.config.build));
if builder.config.cmd.bless() {
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CrateLibrustc {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
test_kind: TestKind,
krate: Interned<String>,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CrateNotDefault {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
test_kind: TestKind,
krate: &'static str,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub struct Crate {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
pub mode: Mode,
pub test_kind: TestKind,
pub krate: Interned<String>,
if target.contains("emscripten") {
cargo.env(
- format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
+ format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)),
builder.config.nodejs.as_ref().expect("nodejs not configured"),
);
} else if target.starts_with("wasm32") {
let node = builder.config.nodejs.as_ref().expect("nodejs not configured");
let runner =
format!("{} {}/src/etc/wasm32-shim.js", node.display(), builder.src.display());
- cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target)), &runner);
+ cargo.env(format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)), &runner);
} else if builder.remote_tested(target) {
cargo.env(
- format!("CARGO_TARGET_{}_RUNNER", envify(&target)),
+ format!("CARGO_TARGET_{}_RUNNER", envify(&target.triple)),
format!("{} run 0", builder.tool_exe(Tool::RemoteTestClient).display()),
);
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct CrateRustdoc {
- host: Interned<String>,
+ host: TargetSelection,
test_kind: TestKind,
}
#[derive(Debug, Copy, Clone, PartialEq, Eq, Hash)]
pub struct RemoteCopyLibs {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
}
impl Step for RemoteCopyLibs {
// Spawn the emulator and wait for it to come online
let tool = builder.tool_exe(Tool::RemoteTestClient);
let mut cmd = Command::new(&tool);
- cmd.arg("spawn-emulator").arg(target).arg(&server).arg(builder.out.join("tmp"));
+ cmd.arg("spawn-emulator").arg(target.triple).arg(&server).arg(builder.out.join("tmp"));
if let Some(rootfs) = builder.qemu_rootfs(target) {
cmd.arg(rootfs);
}
.current_dir(&dir),
);
builder.run(
- Command::new(build_helper::make(&builder.config.build)).arg("check").current_dir(&dir),
+ Command::new(build_helper::make(&builder.config.build.triple))
+ .arg("check")
+ .current_dir(&dir),
);
// Now make sure that rust-src has all of libstd's dependencies
use build_helper::t;
use crate::builder::{Builder, Cargo as CargoCommand, RunConfig, ShouldRun, Step};
-use crate::cache::Interned;
use crate::channel;
use crate::channel::GitInfo;
use crate::compile;
+use crate::config::TargetSelection;
use crate::toolstate::ToolState;
use crate::util::{add_dylib_path, exe};
use crate::Compiler;
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
struct ToolBuild {
compiler: Compiler,
- target: Interned<String>,
+ target: TargetSelection,
tool: &'static str,
path: &'static str,
mode: Mode,
.and_then(|p| p.file_name())
.and_then(|p| p.to_str())
.unwrap();
- if maybe_target != &*target {
+ if maybe_target != &*target.triple {
continue;
}
}
}
} else {
let cargo_out =
- builder.cargo_out(compiler, self.mode, target).join(exe(tool, &compiler.host));
- let bin = builder.tools_dir(compiler).join(exe(tool, &compiler.host));
+ builder.cargo_out(compiler, self.mode, target).join(exe(tool, compiler.host));
+ let bin = builder.tools_dir(compiler).join(exe(tool, compiler.host));
builder.copy(&cargo_out, &bin);
Some(bin)
}
builder: &Builder<'_>,
compiler: Compiler,
mode: Mode,
- target: Interned<String>,
+ target: TargetSelection,
command: &'static str,
path: &'static str,
source_type: SourceType,
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for $name {
Compiletest, "src/tools/compiletest", "compiletest", is_unstable_tool = true;
BuildManifest, "src/tools/build-manifest", "build-manifest";
RemoteTestClient, "src/tools/remote-test-client", "remote-test-client";
+ RustDemangler, "src/tools/rust-demangler", "rust-demangler";
RustInstaller, "src/tools/rust-installer", "fabricate", is_external_tool = true;
RustdocTheme, "src/tools/rustdoc-themes", "rustdoc-themes";
ExpandYamlAnchors, "src/tools/expand-yaml-anchors", "expand-yaml-anchors";
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct RemoteTestServer {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for RemoteTestServer {
if !target_compiler.is_snapshot(builder) {
panic!("rustdoc in stage 0 must be snapshot rustdoc");
}
- return builder.initial_rustc.with_file_name(exe("rustdoc", &target_compiler.host));
+ return builder.initial_rustc.with_file_name(exe("rustdoc", target_compiler.host));
}
let target = target_compiler.host;
// Similar to `compile::Assemble`, build with the previous stage's compiler. Otherwise
// rustdoc a different name.
let tool_rustdoc = builder
.cargo_out(build_compiler, Mode::ToolRustc, target)
- .join(exe("rustdoc_tool_binary", &target_compiler.host));
+ .join(exe("rustdoc_tool_binary", target_compiler.host));
// don't create a stage0-sysroot/bin directory.
if target_compiler.stage > 0 {
let sysroot = builder.sysroot(target_compiler);
let bindir = sysroot.join("bin");
t!(fs::create_dir_all(&bindir));
- let bin_rustdoc = bindir.join(exe("rustdoc", &*target_compiler.host));
+ let bin_rustdoc = bindir.join(exe("rustdoc", target_compiler.host));
let _ = fs::remove_file(&bin_rustdoc);
builder.copy(&tool_rustdoc, &bin_rustdoc);
bin_rustdoc
#[derive(Debug, Copy, Clone, Hash, PartialEq, Eq)]
pub struct Cargo {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
}
impl Step for Cargo {
#[derive(Debug, Clone, Hash, PartialEq, Eq)]
pub struct $name {
pub compiler: Compiler,
- pub target: Interned<String>,
+ pub target: TargetSelection,
pub extra_features: Vec<String>,
}
use build_helper::t;
use crate::builder::Builder;
-use crate::cache::Interned;
-use crate::config::Config;
+use crate::config::{Config, TargetSelection};
/// Returns the `name` as the filename of a static library for `target`.
-pub fn staticlib(name: &str, target: &str) -> String {
+pub fn staticlib(name: &str, target: TargetSelection) -> String {
if target.contains("windows") { format!("{}.lib", name) } else { format!("lib{}.a", name) }
}
/// Given an executable called `name`, return the filename for the
/// executable for a particular target.
-pub fn exe(name: &str, target: &str) -> String {
+pub fn exe(name: &str, target: TargetSelection) -> String {
if target.contains("windows") { format!("{}.exe", name) } else { name.to_string() }
}
/// Returns the corresponding relative library directory that the compiler's
/// dylibs will be found in.
-pub fn libdir(target: &str) -> &'static str {
+pub fn libdir(target: TargetSelection) -> &'static str {
if target.contains("windows") { "bin" } else { "lib" }
}
}
}
-pub fn use_host_linker(target: &Interned<String>) -> bool {
+pub fn use_host_linker(target: TargetSelection) -> bool {
// FIXME: this information should be gotten by checking the linker flavor
// of the rustc target
!(target.contains("emscripten")
dist-powerpc-linux: {}
dist-powerpc64-linux: {}
dist-powerpc64le-linux: {}
+ dist-riscv64-linux: {}
dist-s390x-linux: {}
dist-x86_64-freebsd: {}
dist-x86_64-illumos: {}
Images will output artifacts in an `obj` dir at the root of a repository.
+To match conditions in rusts CI, also set the environment variable `DEPLOY=1`, e.g.:
+```
+DEPLOY=1 ./src/ci/docker/run.sh x86_64-gnu
+```
+
**NOTE**: Re-using the same `obj` dir with different docker images with
the same target triple (e.g. `dist-x86_64-linux` and `dist-various-1`)
may result in strange linker errors, due shared library versions differing between platforms.
### Generating a `.config` file
+**NOTE:** Existing Dockerfiles can also be a good guide for the process and order
+of script execution.
+
If you have a `linux-cross` image lying around you can use that and skip the
next two steps.
-- First we spin up a container and copy `build_toolchain_root.sh` into it. All
+- First we spin up a container and copy all scripts into it. All
these steps are outside the container:
```
-# Note: We use ubuntu:15.10 because that's the "base" of linux-cross Docker
-# image
-$ docker run -it ubuntu:15.10 bash
+# Note: We use ubuntu:16.04 because that's the "base" of linux-cross Docker
+# image, or simply run ./src/ci/docker/run.sh once, which will download the correct
+# one and you can check it out with `docker images`
+$ docker run -it ubuntu:16.04 bash
+# in another terminal:
$ docker ps
CONTAINER ID IMAGE COMMAND CREATED STATUS PORTS NAMES
-cfbec05ed730 ubuntu:15.10 "bash" 16 seconds ago Up 15 seconds drunk_murdock
-$ docker cp build_toolchain_root.sh drunk_murdock:/
+cfbec05ed730 ubuntu:16.04 "bash" 16 seconds ago Up 15 seconds drunk_murdock
+$ docker cp src/ci/docker/scripts drunk_murdock:/tmp/
```
- Then inside the container we build crosstool-ng by simply calling the bash
script we copied in the previous step:
```
-$ bash build_toolchain_root.sh
+$ cd /tmp/scripts
+# Download packages necessary for building
+$ bash ./cross-apt-packages.sh
+# Download and build crosstool-ng
+$ bash ./crosstool-ng.sh
+```
+
+- In case you want to adjust or start from an existing config, copy that
+ to the container. `crosstool-ng` will automatically load `./.config` if
+ present. Otherwise one can use the TUI to load any config-file.
+
+```
+$ docker cp arm-linux-gnueabi.config drunk_murdock:/tmp/.config
```
- Now, inside the container run the following command to configure the
section and come back.
```
+$ cd /tmp/
$ ct-ng menuconfig
```
meaningful name. This is done outside the container.
```
-$ docker drunk_murdock:/.config arm-linux-gnueabi.config
+$ docker cp drunk_murdock:/tmp/.config arm-linux-gnueabi.config
```
- Now you can shutdown the container or repeat the two last steps to generate a
-FROM centos:5
+# We use Debian 6 (glibc 2.11, kernel 2.6.32) as a common base for other
+# distros that still need Rust support: RHEL 6 (glibc 2.12, kernel 2.6.32) and
+# SLES 11 SP4 (glibc 2.11, kernel 3.0).
+FROM debian:6
WORKDIR /build
-# Centos 5 is EOL and is no longer available from the usual mirrors, so switch
-# to http://vault.centos.org/
-RUN sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
-RUN sed -i 's/mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo
-RUN sed -i 's|#\(baseurl.*\)mirror.centos.org/centos/$releasever|\1vault.centos.org/5.11|' /etc/yum.repos.d/*.repo
+# Debian 6 is EOL and no longer available from the usual mirrors,
+# so we'll need to switch to http://archive.debian.org/
+RUN sed -i '/updates/d' /etc/apt/sources.list && \
+ sed -i 's/httpredir/archive/' /etc/apt/sources.list
-RUN yum upgrade -y && yum install -y \
- curl \
+RUN apt-get update && \
+ apt-get install --allow-unauthenticated -y --no-install-recommends \
+ automake \
bzip2 \
+ ca-certificates \
+ curl \
+ file \
+ g++ \
+ g++-multilib \
gcc \
- gcc-c++ \
+ gcc-multilib \
+ git \
+ lib32z1-dev \
+ libedit-dev \
+ libncurses-dev \
make \
- glibc-devel \
+ patch \
perl \
- zlib-devel \
- file \
- xz \
- which \
- pkgconfig \
+ pkg-config \
+ unzip \
wget \
- autoconf \
- gettext
+ xz-utils \
+ zlib1g-dev
ENV PATH=/rustroot/bin:$PATH
-ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
+ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
+RUN mkdir /home/user
COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
COPY host-x86_64/dist-x86_64-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
-# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
+# The `curl` binary on Debian 6 doesn't support SNI which is needed for fetching
# some https urls we have, so install a new version of libcurl + curl which is
# using the openssl we just built previously.
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY host-x86_64/dist-x86_64-linux/build-curl.sh /tmp/
-RUN ./build-curl.sh
+RUN ./build-curl.sh && apt-get remove -y curl
# binutils < 2.22 has a bug where the 32-bit executables it generates
# immediately segfault in Rust, so we need to install our own binutils.
COPY host-x86_64/dist-x86_64-linux/build-binutils.sh /tmp/
RUN ./build-binutils.sh
-# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
-# only has 2.6.4, so build our own
-COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
-RUN ./build-cmake.sh
-
-# Need a newer version of gcc than centos has to compile LLVM nowadays
+# Need at least GCC 5.1 to compile LLVM nowadays
COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/
-RUN ./build-gcc.sh
+RUN ./build-gcc.sh && apt-get remove -y gcc g++
-# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
+# Debian 6 has Python 2.6 by default, but LLVM needs 2.7+
COPY host-x86_64/dist-x86_64-linux/build-python.sh /tmp/
RUN ./build-python.sh
-# Now build LLVM+Clang 7, afterwards configuring further compilations to use the
+# LLVM needs cmake 3.4.3 or higher, and is planning to raise to 3.13.4.
+COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
+RUN ./build-cmake.sh
+
+# Now build LLVM+Clang, afterwards configuring further compilations to use the
# clang/clang++ compilers.
-COPY host-x86_64/dist-x86_64-linux/build-clang.sh host-x86_64/dist-x86_64-linux/llvm-project-centos.patch /tmp/
+COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
RUN ./build-clang.sh
ENV CC=clang CXX=clang++
-# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
-# cloning, so download and build it here.
-COPY host-x86_64/dist-x86_64-linux/build-git.sh /tmp/
-RUN ./build-git.sh
-
-# for sanitizers, we need kernel headers files newer than the ones CentOS ships
-# with so we install newer ones here
-COPY host-x86_64/dist-x86_64-linux/build-headers.sh /tmp/
-RUN ./build-headers.sh
-
-# OpenSSL requires a more recent version of perl
-# with so we install newer ones here
-COPY host-x86_64/dist-x86_64-linux/build-perl.sh /tmp/
-RUN ./build-perl.sh
-
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
# libcurl, instead it should compile its own.
ENV LIBCURL_NO_PKG_CONFIG 1
+# There was a bad interaction between "old" 32-bit binaries on current 64-bit
+# kernels with selinux enabled, where ASLR mmap would sometimes choose a low
+# address and then block it for being below `vm.mmap_min_addr` -> `EACCES`.
+# This is probably a kernel bug, but setting `ulimit -Hs` works around it.
+# See also `src/ci/run.sh` where this takes effect.
+ENV SET_HARD_RLIMIT_STACK 1
+
ENV DIST_REQUIRE_ALL_TOOLS 1
--- /dev/null
+FROM ubuntu:18.04
+
+COPY scripts/cross-apt-packages.sh /scripts/
+RUN sh /scripts/cross-apt-packages.sh
+
+COPY host-x86_64/dist-riscv64-linux/crosstool-ng.sh /scripts/
+RUN sh /scripts/crosstool-ng.sh
+
+COPY scripts/rustbuild-setup.sh /scripts/
+RUN sh /scripts/rustbuild-setup.sh
+USER rustbuild
+WORKDIR /tmp
+
+COPY host-x86_64/dist-riscv64-linux/build-toolchains.sh host-x86_64/dist-riscv64-linux/riscv64-unknown-linux-gnu.config /tmp/
+RUN ./build-toolchains.sh
+
+USER root
+
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+ENV PATH=$PATH:/x-tools/riscv64-unknown-linux-gnu/bin
+
+ENV CC_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-gcc \
+ AR_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-ar \
+ CXX_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-g++
+
+ENV HOSTS=riscv64gc-unknown-linux-gnu
+
+ENV RUST_CONFIGURE_ARGS --enable-extended --disable-docs
+ENV SCRIPT python3 ../x.py dist --target $HOSTS --host $HOSTS
--- /dev/null
+#!/usr/bin/env bash
+
+set -ex
+
+hide_output() {
+ set +x
+ on_err="
+echo ERROR: An error was encountered with the build.
+cat /tmp/build.log
+exit 1
+"
+ trap "$on_err" ERR
+ bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
+ PING_LOOP_PID=$!
+ $@ &> /tmp/build.log
+ rm /tmp/build.log
+ trap - ERR
+ kill $PING_LOOP_PID
+ set -x
+}
+
+mkdir build
+cd build
+cp ../riscv64-unknown-linux-gnu.config .config
+hide_output ct-ng build
+cd ..
+rm -rf build
--- /dev/null
+set -ex
+
+# Mirrored from https://github.com/crosstool-ng/crosstool-ng/archive/crosstool-ng-1.24.0.tar.gz
+url="https://ci-mirrors.rust-lang.org/rustc/crosstool-ng-1.24.0.tar.gz"
+curl -Lf $url | tar xzf -
+cd crosstool-ng-crosstool-ng-1.24.0
+./bootstrap
+./configure --prefix=/usr/local
+make -j$(nproc)
+make install
+cd ..
+rm -rf crosstool-ng-crosstool-ng-1.24.0
--- /dev/null
+#
+# Automatically generated file; DO NOT EDIT.
+# crosstool-NG Configuration
+#
+CT_CONFIGURE_has_static_link=y
+CT_CONFIGURE_has_cxx11=y
+CT_CONFIGURE_has_wget=y
+CT_CONFIGURE_has_curl=y
+CT_CONFIGURE_has_make_3_81_or_newer=y
+CT_CONFIGURE_has_make_4_0_or_newer=y
+CT_CONFIGURE_has_libtool_2_4_or_newer=y
+CT_CONFIGURE_has_libtoolize_2_4_or_newer=y
+CT_CONFIGURE_has_autoconf_2_65_or_newer=y
+CT_CONFIGURE_has_autoreconf_2_65_or_newer=y
+CT_CONFIGURE_has_automake_1_15_or_newer=y
+CT_CONFIGURE_has_gnu_m4_1_4_12_or_newer=y
+CT_CONFIGURE_has_python_3_4_or_newer=y
+CT_CONFIGURE_has_bison_2_7_or_newer=y
+CT_CONFIGURE_has_python=y
+CT_CONFIGURE_has_git=y
+CT_CONFIGURE_has_md5sum=y
+CT_CONFIGURE_has_sha1sum=y
+CT_CONFIGURE_has_sha256sum=y
+CT_CONFIGURE_has_sha512sum=y
+CT_CONFIGURE_has_install_with_strip_program=y
+CT_CONFIG_VERSION_CURRENT="3"
+CT_CONFIG_VERSION="3"
+CT_MODULES=y
+
+#
+# Paths and misc options
+#
+
+#
+# crosstool-NG behavior
+#
+# CT_OBSOLETE is not set
+CT_EXPERIMENTAL=y
+# CT_ALLOW_BUILD_AS_ROOT is not set
+# CT_DEBUG_CT is not set
+
+#
+# Paths
+#
+CT_LOCAL_TARBALLS_DIR="${HOME}/src"
+CT_SAVE_TARBALLS=y
+# CT_TARBALLS_BUILDROOT_LAYOUT is not set
+CT_WORK_DIR="${CT_TOP_DIR}/.build"
+CT_BUILD_TOP_DIR="${CT_WORK_DIR:-${CT_TOP_DIR}/.build}/${CT_HOST:+HOST-${CT_HOST}/}${CT_TARGET}"
+CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
+CT_RM_RF_PREFIX_DIR=y
+CT_REMOVE_DOCS=y
+CT_INSTALL_LICENSES=y
+CT_PREFIX_DIR_RO=y
+CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
+# CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
+
+#
+# Downloading
+#
+CT_DOWNLOAD_AGENT_WGET=y
+# CT_DOWNLOAD_AGENT_CURL is not set
+# CT_DOWNLOAD_AGENT_NONE is not set
+# CT_FORBID_DOWNLOAD is not set
+# CT_FORCE_DOWNLOAD is not set
+CT_CONNECT_TIMEOUT=10
+CT_DOWNLOAD_WGET_OPTIONS="--passive-ftp --tries=3 -nc --progress=dot:binary"
+# CT_ONLY_DOWNLOAD is not set
+# CT_USE_MIRROR is not set
+CT_VERIFY_DOWNLOAD_DIGEST=y
+CT_VERIFY_DOWNLOAD_DIGEST_SHA512=y
+# CT_VERIFY_DOWNLOAD_DIGEST_SHA256 is not set
+# CT_VERIFY_DOWNLOAD_DIGEST_SHA1 is not set
+# CT_VERIFY_DOWNLOAD_DIGEST_MD5 is not set
+CT_VERIFY_DOWNLOAD_DIGEST_ALG="sha512"
+# CT_VERIFY_DOWNLOAD_SIGNATURE is not set
+
+#
+# Extracting
+#
+# CT_FORCE_EXTRACT is not set
+CT_OVERRIDE_CONFIG_GUESS_SUB=y
+# CT_ONLY_EXTRACT is not set
+CT_PATCH_BUNDLED=y
+# CT_PATCH_LOCAL is not set
+# CT_PATCH_BUNDLED_LOCAL is not set
+# CT_PATCH_LOCAL_BUNDLED is not set
+# CT_PATCH_NONE is not set
+CT_PATCH_ORDER="bundled"
+
+#
+# Build behavior
+#
+CT_PARALLEL_JOBS=0
+CT_LOAD=""
+CT_USE_PIPES=y
+CT_EXTRA_CFLAGS_FOR_BUILD=""
+CT_EXTRA_LDFLAGS_FOR_BUILD=""
+CT_EXTRA_CFLAGS_FOR_HOST=""
+CT_EXTRA_LDFLAGS_FOR_HOST=""
+# CT_CONFIG_SHELL_SH is not set
+# CT_CONFIG_SHELL_ASH is not set
+CT_CONFIG_SHELL_BASH=y
+# CT_CONFIG_SHELL_CUSTOM is not set
+CT_CONFIG_SHELL="${bash}"
+
+#
+# Logging
+#
+# CT_LOG_ERROR is not set
+# CT_LOG_WARN is not set
+# CT_LOG_INFO is not set
+# CT_LOG_EXTRA is not set
+CT_LOG_ALL=y
+# CT_LOG_DEBUG is not set
+CT_LOG_LEVEL_MAX="ALL"
+# CT_LOG_SEE_TOOLS_WARN is not set
+CT_LOG_TO_FILE=y
+CT_LOG_FILE_COMPRESS=y
+
+#
+# Target options
+#
+# CT_ARCH_ALPHA is not set
+# CT_ARCH_ARC is not set
+# CT_ARCH_ARM is not set
+# CT_ARCH_AVR is not set
+# CT_ARCH_M68K is not set
+# CT_ARCH_MICROBLAZE is not set
+# CT_ARCH_MIPS is not set
+# CT_ARCH_MOXIE is not set
+# CT_ARCH_MSP430 is not set
+# CT_ARCH_NIOS2 is not set
+# CT_ARCH_POWERPC is not set
+CT_ARCH_RISCV=y
+# CT_ARCH_S390 is not set
+# CT_ARCH_SH is not set
+# CT_ARCH_SPARC is not set
+# CT_ARCH_X86 is not set
+# CT_ARCH_XTENSA is not set
+CT_ARCH="riscv"
+CT_ARCH_CHOICE_KSYM="RISCV"
+CT_ARCH_TUNE=""
+CT_ARCH_RISCV_SHOW=y
+
+#
+# Options for riscv
+#
+CT_ARCH_RISCV_PKG_KSYM=""
+CT_ALL_ARCH_CHOICES="ALPHA ARC ARM AVR M68K MICROBLAZE MIPS MOXIE MSP430 NIOS2 POWERPC RISCV S390 SH SPARC X86 XTENSA"
+CT_ARCH_SUFFIX=""
+# CT_OMIT_TARGET_VENDOR is not set
+
+#
+# Generic target options
+#
+# CT_MULTILIB is not set
+# CT_DEMULTILIB is not set
+CT_ARCH_SUPPORTS_BOTH_MMU=y
+CT_ARCH_USE_MMU=y
+CT_ARCH_SUPPORTS_32=y
+CT_ARCH_SUPPORTS_64=y
+CT_ARCH_DEFAULT_32=y
+CT_ARCH_BITNESS=64
+# CT_ARCH_32 is not set
+CT_ARCH_64=y
+
+#
+# Target optimisations
+#
+CT_ARCH_SUPPORTS_WITH_ARCH=y
+CT_ARCH_SUPPORTS_WITH_ABI=y
+CT_ARCH_SUPPORTS_WITH_TUNE=y
+CT_ARCH_ARCH="rv64gc"
+CT_ARCH_ABI=""
+CT_TARGET_CFLAGS=""
+CT_TARGET_LDFLAGS=""
+
+#
+# Toolchain options
+#
+
+#
+# General toolchain options
+#
+CT_FORCE_SYSROOT=y
+CT_USE_SYSROOT=y
+CT_SYSROOT_NAME="sysroot"
+CT_SYSROOT_DIR_PREFIX=""
+CT_WANTS_STATIC_LINK=y
+CT_WANTS_STATIC_LINK_CXX=y
+# CT_STATIC_TOOLCHAIN is not set
+CT_SHOW_CT_VERSION=y
+CT_TOOLCHAIN_PKGVERSION=""
+CT_TOOLCHAIN_BUGURL=""
+
+#
+# Tuple completion and aliasing
+#
+CT_TARGET_VENDOR="unknown"
+CT_TARGET_ALIAS_SED_EXPR=""
+CT_TARGET_ALIAS=""
+
+#
+# Toolchain type
+#
+# CT_NATIVE is not set
+CT_CROSS=y
+# CT_CROSS_NATIVE is not set
+# CT_CANADIAN is not set
+CT_TOOLCHAIN_TYPE="cross"
+
+#
+# Build system
+#
+CT_BUILD=""
+CT_BUILD_PREFIX=""
+CT_BUILD_SUFFIX=""
+
+#
+# Misc options
+#
+# CT_TOOLCHAIN_ENABLE_NLS is not set
+
+#
+# Operating System
+#
+CT_KERNEL_SUPPORTS_SHARED_LIBS=y
+# CT_KERNEL_BARE_METAL is not set
+CT_KERNEL_LINUX=y
+CT_KERNEL="linux"
+CT_KERNEL_CHOICE_KSYM="LINUX"
+CT_KERNEL_LINUX_SHOW=y
+
+#
+# Options for linux
+#
+CT_KERNEL_LINUX_PKG_KSYM="LINUX"
+CT_LINUX_DIR_NAME="linux"
+CT_LINUX_PKG_NAME="linux"
+CT_LINUX_SRC_RELEASE=y
+# CT_LINUX_SRC_DEVEL is not set
+# CT_LINUX_SRC_CUSTOM is not set
+CT_LINUX_PATCH_GLOBAL=y
+# CT_LINUX_PATCH_BUNDLED is not set
+# CT_LINUX_PATCH_LOCAL is not set
+# CT_LINUX_PATCH_BUNDLED_LOCAL is not set
+# CT_LINUX_PATCH_LOCAL_BUNDLED is not set
+# CT_LINUX_PATCH_NONE is not set
+CT_LINUX_PATCH_ORDER="global"
+CT_LINUX_V_4_20=y
+# CT_LINUX_V_4_19 is not set
+# CT_LINUX_V_4_18 is not set
+# CT_LINUX_V_4_17 is not set
+# CT_LINUX_V_4_16 is not set
+# CT_LINUX_V_4_15 is not set
+# CT_LINUX_V_4_14 is not set
+# CT_LINUX_V_4_13 is not set
+# CT_LINUX_V_4_12 is not set
+# CT_LINUX_V_4_11 is not set
+# CT_LINUX_V_4_10 is not set
+# CT_LINUX_V_4_9 is not set
+# CT_LINUX_V_4_4 is not set
+# CT_LINUX_V_4_1 is not set
+# CT_LINUX_V_3_16 is not set
+# CT_LINUX_V_3_13 is not set
+# CT_LINUX_V_3_12 is not set
+# CT_LINUX_V_3_10 is not set
+# CT_LINUX_V_3_4 is not set
+# CT_LINUX_V_3_2 is not set
+# CT_LINUX_NO_VERSIONS is not set
+CT_LINUX_VERSION="4.20.8"
+CT_LINUX_MIRRORS="$(CT_Mirrors kernel.org linux ${CT_LINUX_VERSION})"
+CT_LINUX_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_LINUX_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_LINUX_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_LINUX_SIGNATURE_FORMAT="unpacked/.sign"
+CT_LINUX_later_than_4_8=y
+CT_LINUX_4_8_or_later=y
+CT_LINUX_later_than_3_7=y
+CT_LINUX_3_7_or_later=y
+CT_LINUX_later_than_3_2=y
+CT_LINUX_3_2_or_later=y
+CT_LINUX_REQUIRE_3_2_or_later=y
+CT_KERNEL_LINUX_VERBOSITY_0=y
+# CT_KERNEL_LINUX_VERBOSITY_1 is not set
+# CT_KERNEL_LINUX_VERBOSITY_2 is not set
+CT_KERNEL_LINUX_VERBOSE_LEVEL=0
+CT_KERNEL_LINUX_INSTALL_CHECK=y
+CT_ALL_KERNEL_CHOICES="BARE_METAL LINUX WINDOWS"
+
+#
+# Common kernel options
+#
+CT_SHARED_LIBS=y
+
+#
+# Binary utilities
+#
+CT_ARCH_BINFMT_ELF=y
+CT_BINUTILS_BINUTILS=y
+CT_BINUTILS="binutils"
+CT_BINUTILS_CHOICE_KSYM="BINUTILS"
+CT_BINUTILS_BINUTILS_SHOW=y
+
+#
+# Options for binutils
+#
+CT_BINUTILS_BINUTILS_PKG_KSYM="BINUTILS"
+CT_BINUTILS_DIR_NAME="binutils"
+CT_BINUTILS_USE_GNU=y
+CT_BINUTILS_USE="BINUTILS"
+CT_BINUTILS_PKG_NAME="binutils"
+CT_BINUTILS_SRC_RELEASE=y
+# CT_BINUTILS_SRC_DEVEL is not set
+# CT_BINUTILS_SRC_CUSTOM is not set
+CT_BINUTILS_PATCH_GLOBAL=y
+# CT_BINUTILS_PATCH_BUNDLED is not set
+# CT_BINUTILS_PATCH_LOCAL is not set
+# CT_BINUTILS_PATCH_BUNDLED_LOCAL is not set
+# CT_BINUTILS_PATCH_LOCAL_BUNDLED is not set
+# CT_BINUTILS_PATCH_NONE is not set
+CT_BINUTILS_PATCH_ORDER="global"
+CT_BINUTILS_V_2_32=y
+# CT_BINUTILS_V_2_31 is not set
+# CT_BINUTILS_V_2_30 is not set
+# CT_BINUTILS_V_2_29 is not set
+# CT_BINUTILS_V_2_28 is not set
+# CT_BINUTILS_V_2_27 is not set
+# CT_BINUTILS_V_2_26 is not set
+# CT_BINUTILS_NO_VERSIONS is not set
+CT_BINUTILS_VERSION="2.32"
+CT_BINUTILS_MIRRORS="$(CT_Mirrors GNU binutils) $(CT_Mirrors sourceware binutils/releases)"
+CT_BINUTILS_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_BINUTILS_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_BINUTILS_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
+CT_BINUTILS_SIGNATURE_FORMAT="packed/.sig"
+CT_BINUTILS_later_than_2_30=y
+CT_BINUTILS_2_30_or_later=y
+CT_BINUTILS_later_than_2_27=y
+CT_BINUTILS_2_27_or_later=y
+CT_BINUTILS_later_than_2_25=y
+CT_BINUTILS_2_25_or_later=y
+CT_BINUTILS_REQUIRE_2_25_or_later=y
+CT_BINUTILS_later_than_2_23=y
+CT_BINUTILS_2_23_or_later=y
+
+#
+# GNU binutils
+#
+CT_BINUTILS_HAS_HASH_STYLE=y
+CT_BINUTILS_HAS_GOLD=y
+CT_BINUTILS_HAS_PLUGINS=y
+CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
+CT_BINUTILS_FORCE_LD_BFD_DEFAULT=y
+CT_BINUTILS_LINKER_LD=y
+CT_BINUTILS_LINKERS_LIST="ld"
+CT_BINUTILS_LINKER_DEFAULT="bfd"
+# CT_BINUTILS_PLUGINS is not set
+CT_BINUTILS_RELRO=m
+CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
+# CT_BINUTILS_FOR_TARGET is not set
+CT_ALL_BINUTILS_CHOICES="BINUTILS"
+
+#
+# C-library
+#
+CT_LIBC_GLIBC=y
+# CT_LIBC_MUSL is not set
+# CT_LIBC_UCLIBC is not set
+CT_LIBC="glibc"
+CT_LIBC_CHOICE_KSYM="GLIBC"
+CT_THREADS="nptl"
+CT_LIBC_GLIBC_SHOW=y
+
+#
+# Options for glibc
+#
+CT_LIBC_GLIBC_PKG_KSYM="GLIBC"
+CT_GLIBC_DIR_NAME="glibc"
+CT_GLIBC_USE_GNU=y
+CT_GLIBC_USE="GLIBC"
+CT_GLIBC_PKG_NAME="glibc"
+CT_GLIBC_SRC_RELEASE=y
+# CT_GLIBC_SRC_DEVEL is not set
+# CT_GLIBC_SRC_CUSTOM is not set
+CT_GLIBC_PATCH_GLOBAL=y
+# CT_GLIBC_PATCH_BUNDLED is not set
+# CT_GLIBC_PATCH_LOCAL is not set
+# CT_GLIBC_PATCH_BUNDLED_LOCAL is not set
+# CT_GLIBC_PATCH_LOCAL_BUNDLED is not set
+# CT_GLIBC_PATCH_NONE is not set
+CT_GLIBC_PATCH_ORDER="global"
+CT_GLIBC_V_2_29=y
+# CT_GLIBC_NO_VERSIONS is not set
+CT_GLIBC_VERSION="2.29"
+CT_GLIBC_MIRRORS="$(CT_Mirrors GNU glibc)"
+CT_GLIBC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GLIBC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GLIBC_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
+CT_GLIBC_SIGNATURE_FORMAT="packed/.sig"
+CT_GLIBC_2_29_or_later=y
+CT_GLIBC_2_29_or_older=y
+CT_GLIBC_REQUIRE_2_29_or_later=y
+CT_GLIBC_later_than_2_27=y
+CT_GLIBC_2_27_or_later=y
+CT_GLIBC_later_than_2_26=y
+CT_GLIBC_2_26_or_later=y
+CT_GLIBC_later_than_2_25=y
+CT_GLIBC_2_25_or_later=y
+CT_GLIBC_later_than_2_24=y
+CT_GLIBC_2_24_or_later=y
+CT_GLIBC_later_than_2_23=y
+CT_GLIBC_2_23_or_later=y
+CT_GLIBC_later_than_2_20=y
+CT_GLIBC_2_20_or_later=y
+CT_GLIBC_later_than_2_17=y
+CT_GLIBC_2_17_or_later=y
+CT_GLIBC_later_than_2_14=y
+CT_GLIBC_2_14_or_later=y
+CT_GLIBC_DEP_KERNEL_HEADERS_VERSION=y
+CT_GLIBC_DEP_BINUTILS=y
+CT_GLIBC_DEP_GCC=y
+CT_GLIBC_DEP_PYTHON=y
+CT_GLIBC_BUILD_SSP=y
+CT_GLIBC_HAS_LIBIDN_ADDON=y
+# CT_GLIBC_USE_LIBIDN_ADDON is not set
+CT_GLIBC_NO_SPARC_V8=y
+CT_GLIBC_HAS_OBSOLETE_RPC=y
+CT_GLIBC_EXTRA_CONFIG_ARRAY=""
+CT_GLIBC_CONFIGPARMS=""
+CT_GLIBC_EXTRA_CFLAGS=""
+CT_GLIBC_ENABLE_OBSOLETE_RPC=y
+# CT_GLIBC_ENABLE_FORTIFIED_BUILD is not set
+# CT_GLIBC_DISABLE_VERSIONING is not set
+CT_GLIBC_OLDEST_ABI=""
+CT_GLIBC_FORCE_UNWIND=y
+# CT_GLIBC_LOCALES is not set
+CT_GLIBC_KERNEL_VERSION_NONE=y
+# CT_GLIBC_KERNEL_VERSION_AS_HEADERS is not set
+# CT_GLIBC_KERNEL_VERSION_CHOSEN is not set
+CT_GLIBC_MIN_KERNEL=""
+CT_GLIBC_SSP_DEFAULT=y
+# CT_GLIBC_SSP_NO is not set
+# CT_GLIBC_SSP_YES is not set
+# CT_GLIBC_SSP_ALL is not set
+# CT_GLIBC_SSP_STRONG is not set
+# CT_GLIBC_ENABLE_WERROR is not set
+CT_ALL_LIBC_CHOICES="AVR_LIBC BIONIC GLIBC MINGW_W64 MOXIEBOX MUSL NEWLIB NONE UCLIBC"
+CT_LIBC_SUPPORT_THREADS_ANY=y
+CT_LIBC_SUPPORT_THREADS_NATIVE=y
+
+#
+# Common C library options
+#
+CT_THREADS_NATIVE=y
+# CT_CREATE_LDSO_CONF is not set
+CT_LIBC_XLDD=y
+
+#
+# C compiler
+#
+CT_CC_CORE_PASSES_NEEDED=y
+CT_CC_CORE_PASS_1_NEEDED=y
+CT_CC_CORE_PASS_2_NEEDED=y
+CT_CC_SUPPORT_CXX=y
+CT_CC_SUPPORT_FORTRAN=y
+CT_CC_SUPPORT_ADA=y
+CT_CC_SUPPORT_OBJC=y
+CT_CC_SUPPORT_OBJCXX=y
+CT_CC_SUPPORT_GOLANG=y
+CT_CC_GCC=y
+CT_CC="gcc"
+CT_CC_CHOICE_KSYM="GCC"
+CT_CC_GCC_SHOW=y
+
+#
+# Options for gcc
+#
+CT_CC_GCC_PKG_KSYM="GCC"
+CT_GCC_DIR_NAME="gcc"
+CT_GCC_USE_GNU=y
+# CT_GCC_USE_LINARO is not set
+CT_GCC_USE="GCC"
+CT_GCC_PKG_NAME="gcc"
+CT_GCC_SRC_RELEASE=y
+# CT_GCC_SRC_DEVEL is not set
+# CT_GCC_SRC_CUSTOM is not set
+CT_GCC_PATCH_GLOBAL=y
+# CT_GCC_PATCH_BUNDLED is not set
+# CT_GCC_PATCH_LOCAL is not set
+# CT_GCC_PATCH_BUNDLED_LOCAL is not set
+# CT_GCC_PATCH_LOCAL_BUNDLED is not set
+# CT_GCC_PATCH_NONE is not set
+CT_GCC_PATCH_ORDER="global"
+CT_GCC_V_8=y
+# CT_GCC_V_7 is not set
+# CT_GCC_NO_VERSIONS is not set
+CT_GCC_VERSION="8.3.0"
+CT_GCC_MIRRORS="$(CT_Mirrors GNU gcc/gcc-${CT_GCC_VERSION}) $(CT_Mirrors sourceware gcc/releases/gcc-${CT_GCC_VERSION})"
+CT_GCC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GCC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GCC_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_GCC_SIGNATURE_FORMAT=""
+CT_GCC_later_than_7=y
+CT_GCC_7_or_later=y
+CT_GCC_REQUIRE_7_or_later=y
+CT_GCC_later_than_6=y
+CT_GCC_6_or_later=y
+CT_GCC_later_than_5=y
+CT_GCC_5_or_later=y
+CT_GCC_REQUIRE_5_or_later=y
+CT_GCC_later_than_4_9=y
+CT_GCC_4_9_or_later=y
+CT_GCC_REQUIRE_4_9_or_later=y
+CT_GCC_later_than_4_8=y
+CT_GCC_4_8_or_later=y
+CT_CC_GCC_HAS_LIBMPX=y
+CT_CC_GCC_ENABLE_CXX_FLAGS=""
+CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
+CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
+CT_CC_GCC_STATIC_LIBSTDCXX=y
+# CT_CC_GCC_SYSTEM_ZLIB is not set
+CT_CC_GCC_CONFIG_TLS=m
+
+#
+# Optimisation features
+#
+CT_CC_GCC_USE_GRAPHITE=y
+CT_CC_GCC_USE_LTO=y
+
+#
+# Settings for libraries running on target
+#
+CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y
+# CT_CC_GCC_LIBMUDFLAP is not set
+# CT_CC_GCC_LIBGOMP is not set
+# CT_CC_GCC_LIBSSP is not set
+# CT_CC_GCC_LIBQUADMATH is not set
+# CT_CC_GCC_LIBSANITIZER is not set
+
+#
+# Misc. obscure options.
+#
+CT_CC_CXA_ATEXIT=y
+# CT_CC_GCC_DISABLE_PCH is not set
+CT_CC_GCC_SJLJ_EXCEPTIONS=m
+CT_CC_GCC_LDBL_128=m
+# CT_CC_GCC_BUILD_ID is not set
+CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y
+# CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set
+# CT_CC_GCC_LNK_HASH_STYLE_GNU is not set
+# CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set
+CT_CC_GCC_LNK_HASH_STYLE=""
+CT_CC_GCC_DEC_FLOAT_AUTO=y
+# CT_CC_GCC_DEC_FLOAT_BID is not set
+# CT_CC_GCC_DEC_FLOAT_DPD is not set
+# CT_CC_GCC_DEC_FLOATS_NO is not set
+CT_ALL_CC_CHOICES="GCC"
+
+#
+# Additional supported languages:
+#
+CT_CC_LANG_CXX=y
+# CT_CC_LANG_FORTRAN is not set
+# CT_CC_LANG_ADA is not set
+# CT_CC_LANG_OBJC is not set
+# CT_CC_LANG_OBJCXX is not set
+# CT_CC_LANG_GOLANG is not set
+CT_CC_LANG_OTHERS=""
+
+#
+# Debug facilities
+#
+# CT_DEBUG_DUMA is not set
+CT_DEBUG_GDB=y
+CT_DEBUG_GDB_PKG_KSYM="GDB"
+CT_GDB_DIR_NAME="gdb"
+CT_GDB_USE_GNU=y
+CT_GDB_USE="GDB"
+CT_GDB_PKG_NAME="gdb"
+CT_GDB_SRC_RELEASE=y
+# CT_GDB_SRC_DEVEL is not set
+# CT_GDB_SRC_CUSTOM is not set
+CT_GDB_PATCH_GLOBAL=y
+# CT_GDB_PATCH_BUNDLED is not set
+# CT_GDB_PATCH_LOCAL is not set
+# CT_GDB_PATCH_BUNDLED_LOCAL is not set
+# CT_GDB_PATCH_LOCAL_BUNDLED is not set
+# CT_GDB_PATCH_NONE is not set
+CT_GDB_PATCH_ORDER="global"
+CT_GDB_V_8_2=y
+# CT_GDB_V_8_1 is not set
+# CT_GDB_V_8_0 is not set
+# CT_GDB_NO_VERSIONS is not set
+CT_GDB_VERSION="8.2.1"
+CT_GDB_MIRRORS="$(CT_Mirrors GNU gdb) $(CT_Mirrors sourceware gdb/releases)"
+CT_GDB_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GDB_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GDB_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_GDB_SIGNATURE_FORMAT=""
+CT_GDB_later_than_8_0=y
+CT_GDB_8_0_or_later=y
+CT_GDB_REQUIRE_8_0_or_later=y
+CT_GDB_later_than_7_12=y
+CT_GDB_7_12_or_later=y
+CT_GDB_later_than_7_2=y
+CT_GDB_7_2_or_later=y
+CT_GDB_later_than_7_0=y
+CT_GDB_7_0_or_later=y
+CT_GDB_CROSS=y
+# CT_GDB_CROSS_STATIC is not set
+# CT_GDB_CROSS_SIM is not set
+# CT_GDB_CROSS_PYTHON is not set
+CT_GDB_CROSS_EXTRA_CONFIG_ARRAY=""
+# CT_GDB_NATIVE is not set
+# CT_GDB_GDBSERVER is not set
+CT_GDB_HAS_PKGVERSION_BUGURL=y
+CT_GDB_HAS_PYTHON=y
+CT_GDB_INSTALL_GDBINIT=y
+CT_GDB_HAS_IPA_LIB=y
+# CT_DEBUG_LTRACE is not set
+# CT_DEBUG_STRACE is not set
+CT_ALL_DEBUG_CHOICES="DUMA GDB LTRACE STRACE"
+
+#
+# Companion libraries
+#
+# CT_COMPLIBS_CHECK is not set
+# CT_COMP_LIBS_CLOOG is not set
+CT_COMP_LIBS_EXPAT=y
+CT_COMP_LIBS_EXPAT_PKG_KSYM="EXPAT"
+CT_EXPAT_DIR_NAME="expat"
+CT_EXPAT_PKG_NAME="expat"
+CT_EXPAT_SRC_RELEASE=y
+# CT_EXPAT_SRC_DEVEL is not set
+# CT_EXPAT_SRC_CUSTOM is not set
+CT_EXPAT_PATCH_GLOBAL=y
+# CT_EXPAT_PATCH_BUNDLED is not set
+# CT_EXPAT_PATCH_LOCAL is not set
+# CT_EXPAT_PATCH_BUNDLED_LOCAL is not set
+# CT_EXPAT_PATCH_LOCAL_BUNDLED is not set
+# CT_EXPAT_PATCH_NONE is not set
+CT_EXPAT_PATCH_ORDER="global"
+CT_EXPAT_V_2_2=y
+# CT_EXPAT_NO_VERSIONS is not set
+CT_EXPAT_VERSION="2.2.6"
+CT_EXPAT_MIRRORS="http://downloads.sourceforge.net/project/expat/expat/${CT_EXPAT_VERSION}"
+CT_EXPAT_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_EXPAT_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_EXPAT_ARCHIVE_FORMATS=".tar.bz2"
+CT_EXPAT_SIGNATURE_FORMAT=""
+CT_COMP_LIBS_GETTEXT=y
+CT_COMP_LIBS_GETTEXT_PKG_KSYM="GETTEXT"
+CT_GETTEXT_DIR_NAME="gettext"
+CT_GETTEXT_PKG_NAME="gettext"
+CT_GETTEXT_SRC_RELEASE=y
+# CT_GETTEXT_SRC_DEVEL is not set
+# CT_GETTEXT_SRC_CUSTOM is not set
+CT_GETTEXT_PATCH_GLOBAL=y
+# CT_GETTEXT_PATCH_BUNDLED is not set
+# CT_GETTEXT_PATCH_LOCAL is not set
+# CT_GETTEXT_PATCH_BUNDLED_LOCAL is not set
+# CT_GETTEXT_PATCH_LOCAL_BUNDLED is not set
+# CT_GETTEXT_PATCH_NONE is not set
+CT_GETTEXT_PATCH_ORDER="global"
+CT_GETTEXT_V_0_19_8_1=y
+# CT_GETTEXT_NO_VERSIONS is not set
+CT_GETTEXT_VERSION="0.19.8.1"
+CT_GETTEXT_MIRRORS="$(CT_Mirrors GNU gettext)"
+CT_GETTEXT_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GETTEXT_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GETTEXT_ARCHIVE_FORMATS=".tar.xz .tar.lz .tar.gz"
+CT_GETTEXT_SIGNATURE_FORMAT="packed/.sig"
+CT_COMP_LIBS_GMP=y
+CT_COMP_LIBS_GMP_PKG_KSYM="GMP"
+CT_GMP_DIR_NAME="gmp"
+CT_GMP_PKG_NAME="gmp"
+CT_GMP_SRC_RELEASE=y
+# CT_GMP_SRC_DEVEL is not set
+# CT_GMP_SRC_CUSTOM is not set
+CT_GMP_PATCH_GLOBAL=y
+# CT_GMP_PATCH_BUNDLED is not set
+# CT_GMP_PATCH_LOCAL is not set
+# CT_GMP_PATCH_BUNDLED_LOCAL is not set
+# CT_GMP_PATCH_LOCAL_BUNDLED is not set
+# CT_GMP_PATCH_NONE is not set
+CT_GMP_PATCH_ORDER="global"
+CT_GMP_V_6_1=y
+# CT_GMP_NO_VERSIONS is not set
+CT_GMP_VERSION="6.1.2"
+CT_GMP_MIRRORS="https://gmplib.org/download/gmp https://gmplib.org/download/gmp/archive $(CT_Mirrors GNU gmp)"
+CT_GMP_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_GMP_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_GMP_ARCHIVE_FORMATS=".tar.xz .tar.lz .tar.bz2"
+CT_GMP_SIGNATURE_FORMAT="packed/.sig"
+CT_GMP_later_than_5_1_0=y
+CT_GMP_5_1_0_or_later=y
+CT_GMP_later_than_5_0_0=y
+CT_GMP_5_0_0_or_later=y
+CT_GMP_REQUIRE_5_0_0_or_later=y
+CT_COMP_LIBS_ISL=y
+CT_COMP_LIBS_ISL_PKG_KSYM="ISL"
+CT_ISL_DIR_NAME="isl"
+CT_ISL_PKG_NAME="isl"
+CT_ISL_SRC_RELEASE=y
+# CT_ISL_SRC_DEVEL is not set
+# CT_ISL_SRC_CUSTOM is not set
+CT_ISL_PATCH_GLOBAL=y
+# CT_ISL_PATCH_BUNDLED is not set
+# CT_ISL_PATCH_LOCAL is not set
+# CT_ISL_PATCH_BUNDLED_LOCAL is not set
+# CT_ISL_PATCH_LOCAL_BUNDLED is not set
+# CT_ISL_PATCH_NONE is not set
+CT_ISL_PATCH_ORDER="global"
+CT_ISL_V_0_20=y
+# CT_ISL_V_0_19 is not set
+# CT_ISL_V_0_18 is not set
+# CT_ISL_V_0_17 is not set
+# CT_ISL_V_0_16 is not set
+# CT_ISL_V_0_15 is not set
+# CT_ISL_NO_VERSIONS is not set
+CT_ISL_VERSION="0.20"
+CT_ISL_MIRRORS="http://isl.gforge.inria.fr"
+CT_ISL_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_ISL_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_ISL_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
+CT_ISL_SIGNATURE_FORMAT=""
+CT_ISL_later_than_0_18=y
+CT_ISL_0_18_or_later=y
+CT_ISL_later_than_0_15=y
+CT_ISL_0_15_or_later=y
+CT_ISL_REQUIRE_0_15_or_later=y
+CT_ISL_later_than_0_14=y
+CT_ISL_0_14_or_later=y
+CT_ISL_REQUIRE_0_14_or_later=y
+CT_ISL_later_than_0_13=y
+CT_ISL_0_13_or_later=y
+CT_ISL_later_than_0_12=y
+CT_ISL_0_12_or_later=y
+CT_ISL_REQUIRE_0_12_or_later=y
+# CT_COMP_LIBS_LIBELF is not set
+CT_COMP_LIBS_LIBICONV=y
+CT_COMP_LIBS_LIBICONV_PKG_KSYM="LIBICONV"
+CT_LIBICONV_DIR_NAME="libiconv"
+CT_LIBICONV_PKG_NAME="libiconv"
+CT_LIBICONV_SRC_RELEASE=y
+# CT_LIBICONV_SRC_DEVEL is not set
+# CT_LIBICONV_SRC_CUSTOM is not set
+CT_LIBICONV_PATCH_GLOBAL=y
+# CT_LIBICONV_PATCH_BUNDLED is not set
+# CT_LIBICONV_PATCH_LOCAL is not set
+# CT_LIBICONV_PATCH_BUNDLED_LOCAL is not set
+# CT_LIBICONV_PATCH_LOCAL_BUNDLED is not set
+# CT_LIBICONV_PATCH_NONE is not set
+CT_LIBICONV_PATCH_ORDER="global"
+CT_LIBICONV_V_1_15=y
+# CT_LIBICONV_NO_VERSIONS is not set
+CT_LIBICONV_VERSION="1.15"
+CT_LIBICONV_MIRRORS="$(CT_Mirrors GNU libiconv)"
+CT_LIBICONV_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_LIBICONV_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_LIBICONV_ARCHIVE_FORMATS=".tar.gz"
+CT_LIBICONV_SIGNATURE_FORMAT="packed/.sig"
+CT_COMP_LIBS_MPC=y
+CT_COMP_LIBS_MPC_PKG_KSYM="MPC"
+CT_MPC_DIR_NAME="mpc"
+CT_MPC_PKG_NAME="mpc"
+CT_MPC_SRC_RELEASE=y
+# CT_MPC_SRC_DEVEL is not set
+# CT_MPC_SRC_CUSTOM is not set
+CT_MPC_PATCH_GLOBAL=y
+# CT_MPC_PATCH_BUNDLED is not set
+# CT_MPC_PATCH_LOCAL is not set
+# CT_MPC_PATCH_BUNDLED_LOCAL is not set
+# CT_MPC_PATCH_LOCAL_BUNDLED is not set
+# CT_MPC_PATCH_NONE is not set
+CT_MPC_PATCH_ORDER="global"
+CT_MPC_V_1_1=y
+# CT_MPC_V_1_0 is not set
+# CT_MPC_NO_VERSIONS is not set
+CT_MPC_VERSION="1.1.0"
+CT_MPC_MIRRORS="http://www.multiprecision.org/downloads $(CT_Mirrors GNU mpc)"
+CT_MPC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_MPC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_MPC_ARCHIVE_FORMATS=".tar.gz"
+CT_MPC_SIGNATURE_FORMAT="packed/.sig"
+CT_MPC_1_1_0_or_later=y
+CT_MPC_1_1_0_or_older=y
+CT_COMP_LIBS_MPFR=y
+CT_COMP_LIBS_MPFR_PKG_KSYM="MPFR"
+CT_MPFR_DIR_NAME="mpfr"
+CT_MPFR_PKG_NAME="mpfr"
+CT_MPFR_SRC_RELEASE=y
+# CT_MPFR_SRC_DEVEL is not set
+# CT_MPFR_SRC_CUSTOM is not set
+CT_MPFR_PATCH_GLOBAL=y
+# CT_MPFR_PATCH_BUNDLED is not set
+# CT_MPFR_PATCH_LOCAL is not set
+# CT_MPFR_PATCH_BUNDLED_LOCAL is not set
+# CT_MPFR_PATCH_LOCAL_BUNDLED is not set
+# CT_MPFR_PATCH_NONE is not set
+CT_MPFR_PATCH_ORDER="global"
+CT_MPFR_V_4_0=y
+# CT_MPFR_V_3_1 is not set
+# CT_MPFR_NO_VERSIONS is not set
+CT_MPFR_VERSION="4.0.2"
+CT_MPFR_MIRRORS="http://www.mpfr.org/mpfr-${CT_MPFR_VERSION} $(CT_Mirrors GNU mpfr)"
+CT_MPFR_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_MPFR_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_MPFR_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz .zip"
+CT_MPFR_SIGNATURE_FORMAT="packed/.asc"
+CT_MPFR_later_than_4_0_0=y
+CT_MPFR_4_0_0_or_later=y
+CT_MPFR_later_than_3_0_0=y
+CT_MPFR_3_0_0_or_later=y
+CT_MPFR_REQUIRE_3_0_0_or_later=y
+CT_COMP_LIBS_NCURSES=y
+CT_COMP_LIBS_NCURSES_PKG_KSYM="NCURSES"
+CT_NCURSES_DIR_NAME="ncurses"
+CT_NCURSES_PKG_NAME="ncurses"
+CT_NCURSES_SRC_RELEASE=y
+# CT_NCURSES_SRC_DEVEL is not set
+# CT_NCURSES_SRC_CUSTOM is not set
+CT_NCURSES_PATCH_GLOBAL=y
+# CT_NCURSES_PATCH_BUNDLED is not set
+# CT_NCURSES_PATCH_LOCAL is not set
+# CT_NCURSES_PATCH_BUNDLED_LOCAL is not set
+# CT_NCURSES_PATCH_LOCAL_BUNDLED is not set
+# CT_NCURSES_PATCH_NONE is not set
+CT_NCURSES_PATCH_ORDER="global"
+CT_NCURSES_V_6_1=y
+# CT_NCURSES_V_6_0 is not set
+# CT_NCURSES_NO_VERSIONS is not set
+CT_NCURSES_VERSION="6.1"
+CT_NCURSES_MIRRORS="ftp://invisible-island.net/ncurses $(CT_Mirrors GNU ncurses)"
+CT_NCURSES_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_NCURSES_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_NCURSES_ARCHIVE_FORMATS=".tar.gz"
+CT_NCURSES_SIGNATURE_FORMAT="packed/.sig"
+# CT_NCURSES_NEW_ABI is not set
+CT_NCURSES_HOST_CONFIG_ARGS=""
+CT_NCURSES_HOST_DISABLE_DB=y
+CT_NCURSES_HOST_FALLBACKS="linux,xterm,xterm-color,xterm-256color,vt100"
+CT_NCURSES_TARGET_CONFIG_ARGS=""
+# CT_NCURSES_TARGET_DISABLE_DB is not set
+CT_NCURSES_TARGET_FALLBACKS=""
+CT_COMP_LIBS_ZLIB=y
+CT_COMP_LIBS_ZLIB_PKG_KSYM="ZLIB"
+CT_ZLIB_DIR_NAME="zlib"
+CT_ZLIB_PKG_NAME="zlib"
+CT_ZLIB_SRC_RELEASE=y
+# CT_ZLIB_SRC_DEVEL is not set
+# CT_ZLIB_SRC_CUSTOM is not set
+CT_ZLIB_PATCH_GLOBAL=y
+# CT_ZLIB_PATCH_BUNDLED is not set
+# CT_ZLIB_PATCH_LOCAL is not set
+# CT_ZLIB_PATCH_BUNDLED_LOCAL is not set
+# CT_ZLIB_PATCH_LOCAL_BUNDLED is not set
+# CT_ZLIB_PATCH_NONE is not set
+CT_ZLIB_PATCH_ORDER="global"
+CT_ZLIB_V_1_2_11=y
+# CT_ZLIB_NO_VERSIONS is not set
+CT_ZLIB_VERSION="1.2.11"
+CT_ZLIB_MIRRORS="http://downloads.sourceforge.net/project/libpng/zlib/${CT_ZLIB_VERSION}"
+CT_ZLIB_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
+CT_ZLIB_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
+CT_ZLIB_ARCHIVE_FORMATS=".tar.xz .tar.gz"
+CT_ZLIB_SIGNATURE_FORMAT="packed/.asc"
+CT_ALL_COMP_LIBS_CHOICES="CLOOG EXPAT GETTEXT GMP ISL LIBELF LIBICONV MPC MPFR NCURSES ZLIB"
+CT_LIBICONV_NEEDED=y
+CT_GETTEXT_NEEDED=y
+CT_GMP_NEEDED=y
+CT_MPFR_NEEDED=y
+CT_ISL_NEEDED=y
+CT_MPC_NEEDED=y
+CT_EXPAT_NEEDED=y
+CT_NCURSES_NEEDED=y
+CT_ZLIB_NEEDED=y
+CT_LIBICONV=y
+CT_GETTEXT=y
+CT_GMP=y
+CT_MPFR=y
+CT_ISL=y
+CT_MPC=y
+CT_EXPAT=y
+CT_NCURSES=y
+CT_ZLIB=y
+
+#
+# Companion tools
+#
+# CT_COMP_TOOLS_FOR_HOST is not set
+# CT_COMP_TOOLS_AUTOCONF is not set
+# CT_COMP_TOOLS_AUTOMAKE is not set
+# CT_COMP_TOOLS_BISON is not set
+# CT_COMP_TOOLS_DTC is not set
+# CT_COMP_TOOLS_LIBTOOL is not set
+# CT_COMP_TOOLS_M4 is not set
+# CT_COMP_TOOLS_MAKE is not set
+CT_ALL_COMP_TOOLS_CHOICES="AUTOCONF AUTOMAKE BISON DTC LIBTOOL M4 MAKE"
+
+#
+# Test suite
+#
+# CT_TEST_SUITE_GCC is not set
apt-get update && \
apt-get install -y --no-install-recommends gcc-arm-embedded
-COPY scripts/rustbuild-setup.sh host-x86_64/dist-various-1/build-riscv-toolchain.sh host-x86_64/dist-various-1/riscv64-unknown-linux-gnu.config host-x86_64/dist-various-1/crosstool-ng.sh /build/
-RUN ./crosstool-ng.sh
-
-# Crosstool-ng will refuse to build as root
-RUN sh ./rustbuild-setup.sh
-USER rustbuild
-
-RUN ./build-riscv-toolchain.sh
-
-USER root
-ENV PATH=/x-tools/riscv64-unknown-linux-gnu/bin:$PATH
-
COPY host-x86_64/dist-various-1/build-rumprun.sh /build
RUN ./build-rumprun.sh
ENV TARGETS=$TARGETS,riscv32imac-unknown-none-elf
ENV TARGETS=$TARGETS,riscv64imac-unknown-none-elf
ENV TARGETS=$TARGETS,riscv64gc-unknown-none-elf
-ENV TARGETS=$TARGETS,riscv64gc-unknown-linux-gnu
ENV TARGETS=$TARGETS,armebv7r-none-eabi
ENV TARGETS=$TARGETS,armebv7r-none-eabihf
ENV TARGETS=$TARGETS,armv7r-none-eabi
CFLAGS_aarch64_unknown_none_softfloat=-mstrict-align -march=armv8-a+nofp+nosimd \
CC_aarch64_unknown_none=aarch64-none-elf-gcc \
CFLAGS_aarch64_unknown_none=-mstrict-align -march=armv8-a+fp+simd \
- CC_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-gcc \
- AR_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-ar \
- CXX_riscv64gc_unknown_linux_gnu=riscv64-unknown-linux-gnu-g++ \
CC_riscv32i_unknown_none_elf=false \
CC_riscv32imc_unknown_none_elf=false \
CC_riscv32imac_unknown_none_elf=false \
+++ /dev/null
-#!/usr/bin/env bash
-
-set -ex
-
-hide_output() {
- set +x
- on_err="
-echo ERROR: An error was encountered with the build.
-cat /tmp/build.log
-exit 1
-"
- trap "$on_err" ERR
- bash -c "while true; do sleep 30; echo \$(date) - building ...; done" &
- PING_LOOP_PID=$!
- $@ &> /tmp/build.log
- rm /tmp/build.log
- trap - ERR
- kill $PING_LOOP_PID
- set -x
-}
-
-mkdir -p /tmp/build-riscv
-cp riscv64-unknown-linux-gnu.config /tmp/build-riscv/.config
-cd /tmp/build-riscv
-hide_output ct-ng build
-cd ..
-rm -rf build-riscv
+++ /dev/null
-#!/bin/bash
-set -ex
-
-# Mirrored from https://github.com/crosstool-ng/crosstool-ng/archive/crosstool-ng-1.24.0.tar.gz
-url="https://ci-mirrors.rust-lang.org/rustc/crosstool-ng-1.24.0.tar.gz"
-curl -Lf $url | tar xzf -
-cd crosstool-ng-crosstool-ng-1.24.0
-./bootstrap
-./configure --prefix=/usr/local
-make -j$(nproc)
-make install
-cd ..
-rm -rf crosstool-ng-crosstool-ng-1.24.0
+++ /dev/null
-#
-# Automatically generated file; DO NOT EDIT.
-# crosstool-NG Configuration
-#
-CT_CONFIGURE_has_static_link=y
-CT_CONFIGURE_has_cxx11=y
-CT_CONFIGURE_has_wget=y
-CT_CONFIGURE_has_curl=y
-CT_CONFIGURE_has_make_3_81_or_newer=y
-CT_CONFIGURE_has_make_4_0_or_newer=y
-CT_CONFIGURE_has_libtool_2_4_or_newer=y
-CT_CONFIGURE_has_libtoolize_2_4_or_newer=y
-CT_CONFIGURE_has_autoconf_2_65_or_newer=y
-CT_CONFIGURE_has_autoreconf_2_65_or_newer=y
-CT_CONFIGURE_has_automake_1_15_or_newer=y
-CT_CONFIGURE_has_gnu_m4_1_4_12_or_newer=y
-CT_CONFIGURE_has_python_3_4_or_newer=y
-CT_CONFIGURE_has_bison_2_7_or_newer=y
-CT_CONFIGURE_has_python=y
-CT_CONFIGURE_has_dtc=y
-CT_CONFIGURE_has_svn=y
-CT_CONFIGURE_has_git=y
-CT_CONFIGURE_has_md5sum=y
-CT_CONFIGURE_has_sha1sum=y
-CT_CONFIGURE_has_sha256sum=y
-CT_CONFIGURE_has_sha512sum=y
-CT_CONFIGURE_has_install_with_strip_program=y
-CT_CONFIG_VERSION_CURRENT="3"
-CT_CONFIG_VERSION="3"
-CT_MODULES=y
-
-#
-# Paths and misc options
-#
-
-#
-# crosstool-NG behavior
-#
-# CT_OBSOLETE is not set
-CT_EXPERIMENTAL=y
-# CT_ALLOW_BUILD_AS_ROOT is not set
-# CT_DEBUG_CT is not set
-
-#
-# Paths
-#
-CT_LOCAL_TARBALLS_DIR="${HOME}/src"
-CT_SAVE_TARBALLS=y
-# CT_TARBALLS_BUILDROOT_LAYOUT is not set
-CT_WORK_DIR="${CT_TOP_DIR}/.build"
-CT_BUILD_TOP_DIR="${CT_WORK_DIR:-${CT_TOP_DIR}/.build}/${CT_HOST:+HOST-${CT_HOST}/}${CT_TARGET}"
-CT_PREFIX_DIR="/x-tools/${CT_TARGET}"
-CT_RM_RF_PREFIX_DIR=y
-CT_REMOVE_DOCS=y
-CT_INSTALL_LICENSES=y
-CT_PREFIX_DIR_RO=y
-CT_STRIP_HOST_TOOLCHAIN_EXECUTABLES=y
-# CT_STRIP_TARGET_TOOLCHAIN_EXECUTABLES is not set
-
-#
-# Downloading
-#
-CT_DOWNLOAD_AGENT_WGET=y
-# CT_DOWNLOAD_AGENT_CURL is not set
-# CT_DOWNLOAD_AGENT_NONE is not set
-# CT_FORBID_DOWNLOAD is not set
-# CT_FORCE_DOWNLOAD is not set
-CT_CONNECT_TIMEOUT=10
-CT_DOWNLOAD_WGET_OPTIONS="--passive-ftp --tries=3 -nc --progress=dot:binary"
-# CT_ONLY_DOWNLOAD is not set
-# CT_USE_MIRROR is not set
-CT_VERIFY_DOWNLOAD_DIGEST=y
-CT_VERIFY_DOWNLOAD_DIGEST_SHA512=y
-# CT_VERIFY_DOWNLOAD_DIGEST_SHA256 is not set
-# CT_VERIFY_DOWNLOAD_DIGEST_SHA1 is not set
-# CT_VERIFY_DOWNLOAD_DIGEST_MD5 is not set
-CT_VERIFY_DOWNLOAD_DIGEST_ALG="sha512"
-# CT_VERIFY_DOWNLOAD_SIGNATURE is not set
-
-#
-# Extracting
-#
-# CT_FORCE_EXTRACT is not set
-CT_OVERRIDE_CONFIG_GUESS_SUB=y
-# CT_ONLY_EXTRACT is not set
-CT_PATCH_BUNDLED=y
-# CT_PATCH_LOCAL is not set
-# CT_PATCH_BUNDLED_LOCAL is not set
-# CT_PATCH_LOCAL_BUNDLED is not set
-# CT_PATCH_NONE is not set
-CT_PATCH_ORDER="bundled"
-
-#
-# Build behavior
-#
-CT_PARALLEL_JOBS=0
-CT_LOAD=""
-CT_USE_PIPES=y
-CT_EXTRA_CFLAGS_FOR_BUILD=""
-CT_EXTRA_LDFLAGS_FOR_BUILD=""
-CT_EXTRA_CFLAGS_FOR_HOST=""
-CT_EXTRA_LDFLAGS_FOR_HOST=""
-# CT_CONFIG_SHELL_SH is not set
-# CT_CONFIG_SHELL_ASH is not set
-CT_CONFIG_SHELL_BASH=y
-# CT_CONFIG_SHELL_CUSTOM is not set
-CT_CONFIG_SHELL="${bash}"
-
-#
-# Logging
-#
-# CT_LOG_ERROR is not set
-# CT_LOG_WARN is not set
-# CT_LOG_INFO is not set
-# CT_LOG_EXTRA is not set
-CT_LOG_ALL=y
-# CT_LOG_DEBUG is not set
-CT_LOG_LEVEL_MAX="ALL"
-# CT_LOG_SEE_TOOLS_WARN is not set
-CT_LOG_TO_FILE=y
-CT_LOG_FILE_COMPRESS=y
-
-#
-# Target options
-#
-# CT_ARCH_ALPHA is not set
-# CT_ARCH_ARC is not set
-# CT_ARCH_ARM is not set
-# CT_ARCH_AVR is not set
-# CT_ARCH_M68K is not set
-# CT_ARCH_MICROBLAZE is not set
-# CT_ARCH_MIPS is not set
-# CT_ARCH_MOXIE is not set
-# CT_ARCH_MSP430 is not set
-# CT_ARCH_NIOS2 is not set
-# CT_ARCH_POWERPC is not set
-CT_ARCH_RISCV=y
-# CT_ARCH_S390 is not set
-# CT_ARCH_SH is not set
-# CT_ARCH_SPARC is not set
-# CT_ARCH_X86 is not set
-# CT_ARCH_XTENSA is not set
-CT_ARCH="riscv"
-CT_ARCH_CHOICE_KSYM="RISCV"
-CT_ARCH_TUNE=""
-CT_ARCH_RISCV_SHOW=y
-
-#
-# Options for riscv
-#
-CT_ARCH_RISCV_PKG_KSYM=""
-CT_ALL_ARCH_CHOICES="ALPHA ARC ARM AVR M68K MICROBLAZE MIPS MOXIE MSP430 NIOS2 POWERPC RISCV S390 SH SPARC X86 XTENSA"
-CT_ARCH_SUFFIX=""
-# CT_OMIT_TARGET_VENDOR is not set
-
-#
-# Generic target options
-#
-# CT_MULTILIB is not set
-# CT_DEMULTILIB is not set
-CT_ARCH_SUPPORTS_BOTH_MMU=y
-CT_ARCH_USE_MMU=y
-CT_ARCH_SUPPORTS_32=y
-CT_ARCH_SUPPORTS_64=y
-CT_ARCH_DEFAULT_32=y
-CT_ARCH_BITNESS=64
-# CT_ARCH_32 is not set
-CT_ARCH_64=y
-
-#
-# Target optimisations
-#
-CT_ARCH_SUPPORTS_WITH_ARCH=y
-CT_ARCH_SUPPORTS_WITH_ABI=y
-CT_ARCH_SUPPORTS_WITH_TUNE=y
-CT_ARCH_ARCH="rv64gc"
-CT_ARCH_ABI=""
-CT_TARGET_CFLAGS=""
-CT_TARGET_LDFLAGS=""
-
-#
-# Toolchain options
-#
-
-#
-# General toolchain options
-#
-CT_FORCE_SYSROOT=y
-CT_USE_SYSROOT=y
-CT_SYSROOT_NAME="sysroot"
-CT_SYSROOT_DIR_PREFIX=""
-CT_WANTS_STATIC_LINK=y
-CT_WANTS_STATIC_LINK_CXX=y
-# CT_STATIC_TOOLCHAIN is not set
-CT_SHOW_CT_VERSION=y
-CT_TOOLCHAIN_PKGVERSION=""
-CT_TOOLCHAIN_BUGURL=""
-
-#
-# Tuple completion and aliasing
-#
-CT_TARGET_VENDOR="unknown"
-CT_TARGET_ALIAS_SED_EXPR=""
-CT_TARGET_ALIAS=""
-
-#
-# Toolchain type
-#
-# CT_NATIVE is not set
-CT_CROSS=y
-# CT_CROSS_NATIVE is not set
-# CT_CANADIAN is not set
-CT_TOOLCHAIN_TYPE="cross"
-
-#
-# Build system
-#
-CT_BUILD=""
-CT_BUILD_PREFIX=""
-CT_BUILD_SUFFIX=""
-
-#
-# Misc options
-#
-# CT_TOOLCHAIN_ENABLE_NLS is not set
-
-#
-# Operating System
-#
-CT_KERNEL_SUPPORTS_SHARED_LIBS=y
-# CT_KERNEL_BARE_METAL is not set
-CT_KERNEL_LINUX=y
-CT_KERNEL="linux"
-CT_KERNEL_CHOICE_KSYM="LINUX"
-CT_KERNEL_LINUX_SHOW=y
-
-#
-# Options for linux
-#
-CT_KERNEL_LINUX_PKG_KSYM="LINUX"
-CT_LINUX_DIR_NAME="linux"
-CT_LINUX_PKG_NAME="linux"
-CT_LINUX_SRC_RELEASE=y
-# CT_LINUX_SRC_DEVEL is not set
-# CT_LINUX_SRC_CUSTOM is not set
-CT_LINUX_PATCH_GLOBAL=y
-# CT_LINUX_PATCH_BUNDLED is not set
-# CT_LINUX_PATCH_LOCAL is not set
-# CT_LINUX_PATCH_BUNDLED_LOCAL is not set
-# CT_LINUX_PATCH_LOCAL_BUNDLED is not set
-# CT_LINUX_PATCH_NONE is not set
-CT_LINUX_PATCH_ORDER="global"
-CT_LINUX_V_4_20=y
-# CT_LINUX_V_4_19 is not set
-# CT_LINUX_V_4_18 is not set
-# CT_LINUX_V_4_17 is not set
-# CT_LINUX_V_4_16 is not set
-# CT_LINUX_V_4_15 is not set
-# CT_LINUX_V_4_14 is not set
-# CT_LINUX_V_4_13 is not set
-# CT_LINUX_V_4_12 is not set
-# CT_LINUX_V_4_11 is not set
-# CT_LINUX_V_4_10 is not set
-# CT_LINUX_V_4_9 is not set
-# CT_LINUX_V_4_4 is not set
-# CT_LINUX_V_4_1 is not set
-# CT_LINUX_V_3_16 is not set
-# CT_LINUX_V_3_13 is not set
-# CT_LINUX_V_3_12 is not set
-# CT_LINUX_V_3_10 is not set
-# CT_LINUX_V_3_4 is not set
-# CT_LINUX_V_3_2 is not set
-# CT_LINUX_NO_VERSIONS is not set
-CT_LINUX_VERSION="4.20.8"
-CT_LINUX_MIRRORS="$(CT_Mirrors kernel.org linux ${CT_LINUX_VERSION})"
-CT_LINUX_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_LINUX_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_LINUX_ARCHIVE_FORMATS=".tar.xz .tar.gz"
-CT_LINUX_SIGNATURE_FORMAT="unpacked/.sign"
-CT_LINUX_later_than_4_8=y
-CT_LINUX_4_8_or_later=y
-CT_LINUX_later_than_3_7=y
-CT_LINUX_3_7_or_later=y
-CT_LINUX_later_than_3_2=y
-CT_LINUX_3_2_or_later=y
-CT_LINUX_REQUIRE_3_2_or_later=y
-CT_KERNEL_LINUX_VERBOSITY_0=y
-# CT_KERNEL_LINUX_VERBOSITY_1 is not set
-# CT_KERNEL_LINUX_VERBOSITY_2 is not set
-CT_KERNEL_LINUX_VERBOSE_LEVEL=0
-CT_KERNEL_LINUX_INSTALL_CHECK=y
-CT_ALL_KERNEL_CHOICES="BARE_METAL LINUX WINDOWS"
-
-#
-# Common kernel options
-#
-CT_SHARED_LIBS=y
-
-#
-# Binary utilities
-#
-CT_ARCH_BINFMT_ELF=y
-CT_BINUTILS_BINUTILS=y
-CT_BINUTILS="binutils"
-CT_BINUTILS_CHOICE_KSYM="BINUTILS"
-CT_BINUTILS_BINUTILS_SHOW=y
-
-#
-# Options for binutils
-#
-CT_BINUTILS_BINUTILS_PKG_KSYM="BINUTILS"
-CT_BINUTILS_DIR_NAME="binutils"
-CT_BINUTILS_USE_GNU=y
-CT_BINUTILS_USE="BINUTILS"
-CT_BINUTILS_PKG_NAME="binutils"
-CT_BINUTILS_SRC_RELEASE=y
-# CT_BINUTILS_SRC_DEVEL is not set
-# CT_BINUTILS_SRC_CUSTOM is not set
-CT_BINUTILS_PATCH_GLOBAL=y
-# CT_BINUTILS_PATCH_BUNDLED is not set
-# CT_BINUTILS_PATCH_LOCAL is not set
-# CT_BINUTILS_PATCH_BUNDLED_LOCAL is not set
-# CT_BINUTILS_PATCH_LOCAL_BUNDLED is not set
-# CT_BINUTILS_PATCH_NONE is not set
-CT_BINUTILS_PATCH_ORDER="global"
-CT_BINUTILS_V_2_32=y
-# CT_BINUTILS_V_2_31 is not set
-# CT_BINUTILS_V_2_30 is not set
-# CT_BINUTILS_V_2_29 is not set
-# CT_BINUTILS_V_2_28 is not set
-# CT_BINUTILS_V_2_27 is not set
-# CT_BINUTILS_V_2_26 is not set
-# CT_BINUTILS_NO_VERSIONS is not set
-CT_BINUTILS_VERSION="2.32"
-CT_BINUTILS_MIRRORS="$(CT_Mirrors GNU binutils) $(CT_Mirrors sourceware binutils/releases)"
-CT_BINUTILS_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_BINUTILS_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_BINUTILS_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
-CT_BINUTILS_SIGNATURE_FORMAT="packed/.sig"
-CT_BINUTILS_later_than_2_30=y
-CT_BINUTILS_2_30_or_later=y
-CT_BINUTILS_later_than_2_27=y
-CT_BINUTILS_2_27_or_later=y
-CT_BINUTILS_later_than_2_25=y
-CT_BINUTILS_2_25_or_later=y
-CT_BINUTILS_REQUIRE_2_25_or_later=y
-CT_BINUTILS_later_than_2_23=y
-CT_BINUTILS_2_23_or_later=y
-
-#
-# GNU binutils
-#
-CT_BINUTILS_HAS_HASH_STYLE=y
-CT_BINUTILS_HAS_GOLD=y
-CT_BINUTILS_HAS_PLUGINS=y
-CT_BINUTILS_HAS_PKGVERSION_BUGURL=y
-CT_BINUTILS_FORCE_LD_BFD_DEFAULT=y
-CT_BINUTILS_LINKER_LD=y
-CT_BINUTILS_LINKERS_LIST="ld"
-CT_BINUTILS_LINKER_DEFAULT="bfd"
-# CT_BINUTILS_PLUGINS is not set
-CT_BINUTILS_RELRO=m
-CT_BINUTILS_EXTRA_CONFIG_ARRAY=""
-# CT_BINUTILS_FOR_TARGET is not set
-CT_ALL_BINUTILS_CHOICES="BINUTILS"
-
-#
-# C-library
-#
-CT_LIBC_GLIBC=y
-# CT_LIBC_MUSL is not set
-# CT_LIBC_UCLIBC is not set
-CT_LIBC="glibc"
-CT_LIBC_CHOICE_KSYM="GLIBC"
-CT_THREADS="nptl"
-CT_LIBC_GLIBC_SHOW=y
-
-#
-# Options for glibc
-#
-CT_LIBC_GLIBC_PKG_KSYM="GLIBC"
-CT_GLIBC_DIR_NAME="glibc"
-CT_GLIBC_USE_GNU=y
-CT_GLIBC_USE="GLIBC"
-CT_GLIBC_PKG_NAME="glibc"
-CT_GLIBC_SRC_RELEASE=y
-# CT_GLIBC_SRC_DEVEL is not set
-# CT_GLIBC_SRC_CUSTOM is not set
-CT_GLIBC_PATCH_GLOBAL=y
-# CT_GLIBC_PATCH_BUNDLED is not set
-# CT_GLIBC_PATCH_LOCAL is not set
-# CT_GLIBC_PATCH_BUNDLED_LOCAL is not set
-# CT_GLIBC_PATCH_LOCAL_BUNDLED is not set
-# CT_GLIBC_PATCH_NONE is not set
-CT_GLIBC_PATCH_ORDER="global"
-CT_GLIBC_V_2_29=y
-# CT_GLIBC_NO_VERSIONS is not set
-CT_GLIBC_VERSION="2.29"
-CT_GLIBC_MIRRORS="$(CT_Mirrors GNU glibc)"
-CT_GLIBC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_GLIBC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_GLIBC_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
-CT_GLIBC_SIGNATURE_FORMAT="packed/.sig"
-CT_GLIBC_2_29_or_later=y
-CT_GLIBC_2_29_or_older=y
-CT_GLIBC_REQUIRE_2_29_or_later=y
-CT_GLIBC_later_than_2_27=y
-CT_GLIBC_2_27_or_later=y
-CT_GLIBC_later_than_2_26=y
-CT_GLIBC_2_26_or_later=y
-CT_GLIBC_later_than_2_25=y
-CT_GLIBC_2_25_or_later=y
-CT_GLIBC_later_than_2_24=y
-CT_GLIBC_2_24_or_later=y
-CT_GLIBC_later_than_2_23=y
-CT_GLIBC_2_23_or_later=y
-CT_GLIBC_later_than_2_20=y
-CT_GLIBC_2_20_or_later=y
-CT_GLIBC_later_than_2_17=y
-CT_GLIBC_2_17_or_later=y
-CT_GLIBC_later_than_2_14=y
-CT_GLIBC_2_14_or_later=y
-CT_GLIBC_DEP_KERNEL_HEADERS_VERSION=y
-CT_GLIBC_DEP_BINUTILS=y
-CT_GLIBC_DEP_GCC=y
-CT_GLIBC_DEP_PYTHON=y
-CT_GLIBC_BUILD_SSP=y
-CT_GLIBC_HAS_LIBIDN_ADDON=y
-# CT_GLIBC_USE_LIBIDN_ADDON is not set
-CT_GLIBC_NO_SPARC_V8=y
-CT_GLIBC_HAS_OBSOLETE_RPC=y
-CT_GLIBC_EXTRA_CONFIG_ARRAY=""
-CT_GLIBC_CONFIGPARMS=""
-CT_GLIBC_EXTRA_CFLAGS=""
-CT_GLIBC_ENABLE_OBSOLETE_RPC=y
-# CT_GLIBC_ENABLE_FORTIFIED_BUILD is not set
-# CT_GLIBC_DISABLE_VERSIONING is not set
-CT_GLIBC_OLDEST_ABI=""
-CT_GLIBC_FORCE_UNWIND=y
-# CT_GLIBC_LOCALES is not set
-CT_GLIBC_KERNEL_VERSION_NONE=y
-# CT_GLIBC_KERNEL_VERSION_AS_HEADERS is not set
-# CT_GLIBC_KERNEL_VERSION_CHOSEN is not set
-CT_GLIBC_MIN_KERNEL=""
-CT_GLIBC_SSP_DEFAULT=y
-# CT_GLIBC_SSP_NO is not set
-# CT_GLIBC_SSP_YES is not set
-# CT_GLIBC_SSP_ALL is not set
-# CT_GLIBC_SSP_STRONG is not set
-# CT_GLIBC_ENABLE_WERROR is not set
-CT_ALL_LIBC_CHOICES="AVR_LIBC BIONIC GLIBC MINGW_W64 MOXIEBOX MUSL NEWLIB NONE UCLIBC"
-CT_LIBC_SUPPORT_THREADS_ANY=y
-CT_LIBC_SUPPORT_THREADS_NATIVE=y
-
-#
-# Common C library options
-#
-CT_THREADS_NATIVE=y
-# CT_CREATE_LDSO_CONF is not set
-CT_LIBC_XLDD=y
-
-#
-# C compiler
-#
-CT_CC_CORE_PASSES_NEEDED=y
-CT_CC_CORE_PASS_1_NEEDED=y
-CT_CC_CORE_PASS_2_NEEDED=y
-CT_CC_SUPPORT_CXX=y
-CT_CC_SUPPORT_FORTRAN=y
-CT_CC_SUPPORT_ADA=y
-CT_CC_SUPPORT_OBJC=y
-CT_CC_SUPPORT_OBJCXX=y
-CT_CC_SUPPORT_GOLANG=y
-CT_CC_GCC=y
-CT_CC="gcc"
-CT_CC_CHOICE_KSYM="GCC"
-CT_CC_GCC_SHOW=y
-
-#
-# Options for gcc
-#
-CT_CC_GCC_PKG_KSYM="GCC"
-CT_GCC_DIR_NAME="gcc"
-CT_GCC_USE_GNU=y
-# CT_GCC_USE_LINARO is not set
-CT_GCC_USE="GCC"
-CT_GCC_PKG_NAME="gcc"
-CT_GCC_SRC_RELEASE=y
-# CT_GCC_SRC_DEVEL is not set
-# CT_GCC_SRC_CUSTOM is not set
-CT_GCC_PATCH_GLOBAL=y
-# CT_GCC_PATCH_BUNDLED is not set
-# CT_GCC_PATCH_LOCAL is not set
-# CT_GCC_PATCH_BUNDLED_LOCAL is not set
-# CT_GCC_PATCH_LOCAL_BUNDLED is not set
-# CT_GCC_PATCH_NONE is not set
-CT_GCC_PATCH_ORDER="global"
-CT_GCC_V_8=y
-# CT_GCC_V_7 is not set
-# CT_GCC_NO_VERSIONS is not set
-CT_GCC_VERSION="8.3.0"
-CT_GCC_MIRRORS="$(CT_Mirrors GNU gcc/gcc-${CT_GCC_VERSION}) $(CT_Mirrors sourceware gcc/releases/gcc-${CT_GCC_VERSION})"
-CT_GCC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_GCC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_GCC_ARCHIVE_FORMATS=".tar.xz .tar.gz"
-CT_GCC_SIGNATURE_FORMAT=""
-CT_GCC_later_than_7=y
-CT_GCC_7_or_later=y
-CT_GCC_REQUIRE_7_or_later=y
-CT_GCC_later_than_6=y
-CT_GCC_6_or_later=y
-CT_GCC_later_than_5=y
-CT_GCC_5_or_later=y
-CT_GCC_REQUIRE_5_or_later=y
-CT_GCC_later_than_4_9=y
-CT_GCC_4_9_or_later=y
-CT_GCC_REQUIRE_4_9_or_later=y
-CT_GCC_later_than_4_8=y
-CT_GCC_4_8_or_later=y
-CT_CC_GCC_HAS_LIBMPX=y
-CT_CC_GCC_ENABLE_CXX_FLAGS=""
-CT_CC_GCC_CORE_EXTRA_CONFIG_ARRAY=""
-CT_CC_GCC_EXTRA_CONFIG_ARRAY=""
-CT_CC_GCC_STATIC_LIBSTDCXX=y
-# CT_CC_GCC_SYSTEM_ZLIB is not set
-CT_CC_GCC_CONFIG_TLS=m
-
-#
-# Optimisation features
-#
-CT_CC_GCC_USE_GRAPHITE=y
-CT_CC_GCC_USE_LTO=y
-
-#
-# Settings for libraries running on target
-#
-CT_CC_GCC_ENABLE_TARGET_OPTSPACE=y
-# CT_CC_GCC_LIBMUDFLAP is not set
-# CT_CC_GCC_LIBGOMP is not set
-# CT_CC_GCC_LIBSSP is not set
-# CT_CC_GCC_LIBQUADMATH is not set
-# CT_CC_GCC_LIBSANITIZER is not set
-
-#
-# Misc. obscure options.
-#
-CT_CC_CXA_ATEXIT=y
-# CT_CC_GCC_DISABLE_PCH is not set
-CT_CC_GCC_SJLJ_EXCEPTIONS=m
-CT_CC_GCC_LDBL_128=m
-# CT_CC_GCC_BUILD_ID is not set
-CT_CC_GCC_LNK_HASH_STYLE_DEFAULT=y
-# CT_CC_GCC_LNK_HASH_STYLE_SYSV is not set
-# CT_CC_GCC_LNK_HASH_STYLE_GNU is not set
-# CT_CC_GCC_LNK_HASH_STYLE_BOTH is not set
-CT_CC_GCC_LNK_HASH_STYLE=""
-CT_CC_GCC_DEC_FLOAT_AUTO=y
-# CT_CC_GCC_DEC_FLOAT_BID is not set
-# CT_CC_GCC_DEC_FLOAT_DPD is not set
-# CT_CC_GCC_DEC_FLOATS_NO is not set
-CT_ALL_CC_CHOICES="GCC"
-
-#
-# Additional supported languages:
-#
-CT_CC_LANG_CXX=y
-# CT_CC_LANG_FORTRAN is not set
-# CT_CC_LANG_ADA is not set
-# CT_CC_LANG_OBJC is not set
-# CT_CC_LANG_OBJCXX is not set
-# CT_CC_LANG_GOLANG is not set
-CT_CC_LANG_OTHERS=""
-
-#
-# Debug facilities
-#
-# CT_DEBUG_DUMA is not set
-CT_DEBUG_GDB=y
-CT_DEBUG_GDB_PKG_KSYM="GDB"
-CT_GDB_DIR_NAME="gdb"
-CT_GDB_USE_GNU=y
-CT_GDB_USE="GDB"
-CT_GDB_PKG_NAME="gdb"
-CT_GDB_SRC_RELEASE=y
-# CT_GDB_SRC_DEVEL is not set
-# CT_GDB_SRC_CUSTOM is not set
-CT_GDB_PATCH_GLOBAL=y
-# CT_GDB_PATCH_BUNDLED is not set
-# CT_GDB_PATCH_LOCAL is not set
-# CT_GDB_PATCH_BUNDLED_LOCAL is not set
-# CT_GDB_PATCH_LOCAL_BUNDLED is not set
-# CT_GDB_PATCH_NONE is not set
-CT_GDB_PATCH_ORDER="global"
-CT_GDB_V_8_2=y
-# CT_GDB_V_8_1 is not set
-# CT_GDB_V_8_0 is not set
-# CT_GDB_NO_VERSIONS is not set
-CT_GDB_VERSION="8.2.1"
-CT_GDB_MIRRORS="$(CT_Mirrors GNU gdb) $(CT_Mirrors sourceware gdb/releases)"
-CT_GDB_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_GDB_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_GDB_ARCHIVE_FORMATS=".tar.xz .tar.gz"
-CT_GDB_SIGNATURE_FORMAT=""
-CT_GDB_later_than_8_0=y
-CT_GDB_8_0_or_later=y
-CT_GDB_REQUIRE_8_0_or_later=y
-CT_GDB_later_than_7_12=y
-CT_GDB_7_12_or_later=y
-CT_GDB_later_than_7_2=y
-CT_GDB_7_2_or_later=y
-CT_GDB_later_than_7_0=y
-CT_GDB_7_0_or_later=y
-CT_GDB_CROSS=y
-# CT_GDB_CROSS_STATIC is not set
-# CT_GDB_CROSS_SIM is not set
-# CT_GDB_CROSS_PYTHON is not set
-CT_GDB_CROSS_EXTRA_CONFIG_ARRAY=""
-# CT_GDB_NATIVE is not set
-# CT_GDB_GDBSERVER is not set
-CT_GDB_HAS_PKGVERSION_BUGURL=y
-CT_GDB_HAS_PYTHON=y
-CT_GDB_INSTALL_GDBINIT=y
-CT_GDB_HAS_IPA_LIB=y
-# CT_DEBUG_LTRACE is not set
-# CT_DEBUG_STRACE is not set
-CT_ALL_DEBUG_CHOICES="DUMA GDB LTRACE STRACE"
-
-#
-# Companion libraries
-#
-# CT_COMPLIBS_CHECK is not set
-# CT_COMP_LIBS_CLOOG is not set
-CT_COMP_LIBS_EXPAT=y
-CT_COMP_LIBS_EXPAT_PKG_KSYM="EXPAT"
-CT_EXPAT_DIR_NAME="expat"
-CT_EXPAT_PKG_NAME="expat"
-CT_EXPAT_SRC_RELEASE=y
-# CT_EXPAT_SRC_DEVEL is not set
-# CT_EXPAT_SRC_CUSTOM is not set
-CT_EXPAT_PATCH_GLOBAL=y
-# CT_EXPAT_PATCH_BUNDLED is not set
-# CT_EXPAT_PATCH_LOCAL is not set
-# CT_EXPAT_PATCH_BUNDLED_LOCAL is not set
-# CT_EXPAT_PATCH_LOCAL_BUNDLED is not set
-# CT_EXPAT_PATCH_NONE is not set
-CT_EXPAT_PATCH_ORDER="global"
-CT_EXPAT_V_2_2=y
-# CT_EXPAT_NO_VERSIONS is not set
-CT_EXPAT_VERSION="2.2.6"
-CT_EXPAT_MIRRORS="http://downloads.sourceforge.net/project/expat/expat/${CT_EXPAT_VERSION}"
-CT_EXPAT_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_EXPAT_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_EXPAT_ARCHIVE_FORMATS=".tar.bz2"
-CT_EXPAT_SIGNATURE_FORMAT=""
-CT_COMP_LIBS_GETTEXT=y
-CT_COMP_LIBS_GETTEXT_PKG_KSYM="GETTEXT"
-CT_GETTEXT_DIR_NAME="gettext"
-CT_GETTEXT_PKG_NAME="gettext"
-CT_GETTEXT_SRC_RELEASE=y
-# CT_GETTEXT_SRC_DEVEL is not set
-# CT_GETTEXT_SRC_CUSTOM is not set
-CT_GETTEXT_PATCH_GLOBAL=y
-# CT_GETTEXT_PATCH_BUNDLED is not set
-# CT_GETTEXT_PATCH_LOCAL is not set
-# CT_GETTEXT_PATCH_BUNDLED_LOCAL is not set
-# CT_GETTEXT_PATCH_LOCAL_BUNDLED is not set
-# CT_GETTEXT_PATCH_NONE is not set
-CT_GETTEXT_PATCH_ORDER="global"
-CT_GETTEXT_V_0_19_8_1=y
-# CT_GETTEXT_NO_VERSIONS is not set
-CT_GETTEXT_VERSION="0.19.8.1"
-CT_GETTEXT_MIRRORS="$(CT_Mirrors GNU gettext)"
-CT_GETTEXT_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_GETTEXT_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_GETTEXT_ARCHIVE_FORMATS=".tar.xz .tar.lz .tar.gz"
-CT_GETTEXT_SIGNATURE_FORMAT="packed/.sig"
-CT_COMP_LIBS_GMP=y
-CT_COMP_LIBS_GMP_PKG_KSYM="GMP"
-CT_GMP_DIR_NAME="gmp"
-CT_GMP_PKG_NAME="gmp"
-CT_GMP_SRC_RELEASE=y
-# CT_GMP_SRC_DEVEL is not set
-# CT_GMP_SRC_CUSTOM is not set
-CT_GMP_PATCH_GLOBAL=y
-# CT_GMP_PATCH_BUNDLED is not set
-# CT_GMP_PATCH_LOCAL is not set
-# CT_GMP_PATCH_BUNDLED_LOCAL is not set
-# CT_GMP_PATCH_LOCAL_BUNDLED is not set
-# CT_GMP_PATCH_NONE is not set
-CT_GMP_PATCH_ORDER="global"
-CT_GMP_V_6_1=y
-# CT_GMP_NO_VERSIONS is not set
-CT_GMP_VERSION="6.1.2"
-CT_GMP_MIRRORS="https://gmplib.org/download/gmp https://gmplib.org/download/gmp/archive $(CT_Mirrors GNU gmp)"
-CT_GMP_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_GMP_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_GMP_ARCHIVE_FORMATS=".tar.xz .tar.lz .tar.bz2"
-CT_GMP_SIGNATURE_FORMAT="packed/.sig"
-CT_GMP_later_than_5_1_0=y
-CT_GMP_5_1_0_or_later=y
-CT_GMP_later_than_5_0_0=y
-CT_GMP_5_0_0_or_later=y
-CT_GMP_REQUIRE_5_0_0_or_later=y
-CT_COMP_LIBS_ISL=y
-CT_COMP_LIBS_ISL_PKG_KSYM="ISL"
-CT_ISL_DIR_NAME="isl"
-CT_ISL_PKG_NAME="isl"
-CT_ISL_SRC_RELEASE=y
-# CT_ISL_SRC_DEVEL is not set
-# CT_ISL_SRC_CUSTOM is not set
-CT_ISL_PATCH_GLOBAL=y
-# CT_ISL_PATCH_BUNDLED is not set
-# CT_ISL_PATCH_LOCAL is not set
-# CT_ISL_PATCH_BUNDLED_LOCAL is not set
-# CT_ISL_PATCH_LOCAL_BUNDLED is not set
-# CT_ISL_PATCH_NONE is not set
-CT_ISL_PATCH_ORDER="global"
-CT_ISL_V_0_20=y
-# CT_ISL_V_0_19 is not set
-# CT_ISL_V_0_18 is not set
-# CT_ISL_V_0_17 is not set
-# CT_ISL_V_0_16 is not set
-# CT_ISL_V_0_15 is not set
-# CT_ISL_NO_VERSIONS is not set
-CT_ISL_VERSION="0.20"
-CT_ISL_MIRRORS="http://isl.gforge.inria.fr"
-CT_ISL_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_ISL_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_ISL_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz"
-CT_ISL_SIGNATURE_FORMAT=""
-CT_ISL_later_than_0_18=y
-CT_ISL_0_18_or_later=y
-CT_ISL_later_than_0_15=y
-CT_ISL_0_15_or_later=y
-CT_ISL_REQUIRE_0_15_or_later=y
-CT_ISL_later_than_0_14=y
-CT_ISL_0_14_or_later=y
-CT_ISL_REQUIRE_0_14_or_later=y
-CT_ISL_later_than_0_13=y
-CT_ISL_0_13_or_later=y
-CT_ISL_later_than_0_12=y
-CT_ISL_0_12_or_later=y
-CT_ISL_REQUIRE_0_12_or_later=y
-# CT_COMP_LIBS_LIBELF is not set
-CT_COMP_LIBS_LIBICONV=y
-CT_COMP_LIBS_LIBICONV_PKG_KSYM="LIBICONV"
-CT_LIBICONV_DIR_NAME="libiconv"
-CT_LIBICONV_PKG_NAME="libiconv"
-CT_LIBICONV_SRC_RELEASE=y
-# CT_LIBICONV_SRC_DEVEL is not set
-# CT_LIBICONV_SRC_CUSTOM is not set
-CT_LIBICONV_PATCH_GLOBAL=y
-# CT_LIBICONV_PATCH_BUNDLED is not set
-# CT_LIBICONV_PATCH_LOCAL is not set
-# CT_LIBICONV_PATCH_BUNDLED_LOCAL is not set
-# CT_LIBICONV_PATCH_LOCAL_BUNDLED is not set
-# CT_LIBICONV_PATCH_NONE is not set
-CT_LIBICONV_PATCH_ORDER="global"
-CT_LIBICONV_V_1_15=y
-# CT_LIBICONV_NO_VERSIONS is not set
-CT_LIBICONV_VERSION="1.15"
-CT_LIBICONV_MIRRORS="$(CT_Mirrors GNU libiconv)"
-CT_LIBICONV_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_LIBICONV_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_LIBICONV_ARCHIVE_FORMATS=".tar.gz"
-CT_LIBICONV_SIGNATURE_FORMAT="packed/.sig"
-CT_COMP_LIBS_MPC=y
-CT_COMP_LIBS_MPC_PKG_KSYM="MPC"
-CT_MPC_DIR_NAME="mpc"
-CT_MPC_PKG_NAME="mpc"
-CT_MPC_SRC_RELEASE=y
-# CT_MPC_SRC_DEVEL is not set
-# CT_MPC_SRC_CUSTOM is not set
-CT_MPC_PATCH_GLOBAL=y
-# CT_MPC_PATCH_BUNDLED is not set
-# CT_MPC_PATCH_LOCAL is not set
-# CT_MPC_PATCH_BUNDLED_LOCAL is not set
-# CT_MPC_PATCH_LOCAL_BUNDLED is not set
-# CT_MPC_PATCH_NONE is not set
-CT_MPC_PATCH_ORDER="global"
-CT_MPC_V_1_1=y
-# CT_MPC_V_1_0 is not set
-# CT_MPC_NO_VERSIONS is not set
-CT_MPC_VERSION="1.1.0"
-CT_MPC_MIRRORS="http://www.multiprecision.org/downloads $(CT_Mirrors GNU mpc)"
-CT_MPC_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_MPC_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_MPC_ARCHIVE_FORMATS=".tar.gz"
-CT_MPC_SIGNATURE_FORMAT="packed/.sig"
-CT_MPC_1_1_0_or_later=y
-CT_MPC_1_1_0_or_older=y
-CT_COMP_LIBS_MPFR=y
-CT_COMP_LIBS_MPFR_PKG_KSYM="MPFR"
-CT_MPFR_DIR_NAME="mpfr"
-CT_MPFR_PKG_NAME="mpfr"
-CT_MPFR_SRC_RELEASE=y
-# CT_MPFR_SRC_DEVEL is not set
-# CT_MPFR_SRC_CUSTOM is not set
-CT_MPFR_PATCH_GLOBAL=y
-# CT_MPFR_PATCH_BUNDLED is not set
-# CT_MPFR_PATCH_LOCAL is not set
-# CT_MPFR_PATCH_BUNDLED_LOCAL is not set
-# CT_MPFR_PATCH_LOCAL_BUNDLED is not set
-# CT_MPFR_PATCH_NONE is not set
-CT_MPFR_PATCH_ORDER="global"
-CT_MPFR_V_4_0=y
-# CT_MPFR_V_3_1 is not set
-# CT_MPFR_NO_VERSIONS is not set
-CT_MPFR_VERSION="4.0.2"
-CT_MPFR_MIRRORS="http://www.mpfr.org/mpfr-${CT_MPFR_VERSION} $(CT_Mirrors GNU mpfr)"
-CT_MPFR_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_MPFR_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_MPFR_ARCHIVE_FORMATS=".tar.xz .tar.bz2 .tar.gz .zip"
-CT_MPFR_SIGNATURE_FORMAT="packed/.asc"
-CT_MPFR_later_than_4_0_0=y
-CT_MPFR_4_0_0_or_later=y
-CT_MPFR_later_than_3_0_0=y
-CT_MPFR_3_0_0_or_later=y
-CT_MPFR_REQUIRE_3_0_0_or_later=y
-CT_COMP_LIBS_NCURSES=y
-CT_COMP_LIBS_NCURSES_PKG_KSYM="NCURSES"
-CT_NCURSES_DIR_NAME="ncurses"
-CT_NCURSES_PKG_NAME="ncurses"
-CT_NCURSES_SRC_RELEASE=y
-# CT_NCURSES_SRC_DEVEL is not set
-# CT_NCURSES_SRC_CUSTOM is not set
-CT_NCURSES_PATCH_GLOBAL=y
-# CT_NCURSES_PATCH_BUNDLED is not set
-# CT_NCURSES_PATCH_LOCAL is not set
-# CT_NCURSES_PATCH_BUNDLED_LOCAL is not set
-# CT_NCURSES_PATCH_LOCAL_BUNDLED is not set
-# CT_NCURSES_PATCH_NONE is not set
-CT_NCURSES_PATCH_ORDER="global"
-CT_NCURSES_V_6_1=y
-# CT_NCURSES_V_6_0 is not set
-# CT_NCURSES_NO_VERSIONS is not set
-CT_NCURSES_VERSION="6.1"
-CT_NCURSES_MIRRORS="ftp://invisible-island.net/ncurses $(CT_Mirrors GNU ncurses)"
-CT_NCURSES_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_NCURSES_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_NCURSES_ARCHIVE_FORMATS=".tar.gz"
-CT_NCURSES_SIGNATURE_FORMAT="packed/.sig"
-# CT_NCURSES_NEW_ABI is not set
-CT_NCURSES_HOST_CONFIG_ARGS=""
-CT_NCURSES_HOST_DISABLE_DB=y
-CT_NCURSES_HOST_FALLBACKS="linux,xterm,xterm-color,xterm-256color,vt100"
-CT_NCURSES_TARGET_CONFIG_ARGS=""
-# CT_NCURSES_TARGET_DISABLE_DB is not set
-CT_NCURSES_TARGET_FALLBACKS=""
-CT_COMP_LIBS_ZLIB=y
-CT_COMP_LIBS_ZLIB_PKG_KSYM="ZLIB"
-CT_ZLIB_DIR_NAME="zlib"
-CT_ZLIB_PKG_NAME="zlib"
-CT_ZLIB_SRC_RELEASE=y
-# CT_ZLIB_SRC_DEVEL is not set
-# CT_ZLIB_SRC_CUSTOM is not set
-CT_ZLIB_PATCH_GLOBAL=y
-# CT_ZLIB_PATCH_BUNDLED is not set
-# CT_ZLIB_PATCH_LOCAL is not set
-# CT_ZLIB_PATCH_BUNDLED_LOCAL is not set
-# CT_ZLIB_PATCH_LOCAL_BUNDLED is not set
-# CT_ZLIB_PATCH_NONE is not set
-CT_ZLIB_PATCH_ORDER="global"
-CT_ZLIB_V_1_2_11=y
-# CT_ZLIB_NO_VERSIONS is not set
-CT_ZLIB_VERSION="1.2.11"
-CT_ZLIB_MIRRORS="http://downloads.sourceforge.net/project/libpng/zlib/${CT_ZLIB_VERSION}"
-CT_ZLIB_ARCHIVE_FILENAME="@{pkg_name}-@{version}"
-CT_ZLIB_ARCHIVE_DIRNAME="@{pkg_name}-@{version}"
-CT_ZLIB_ARCHIVE_FORMATS=".tar.xz .tar.gz"
-CT_ZLIB_SIGNATURE_FORMAT="packed/.asc"
-CT_ALL_COMP_LIBS_CHOICES="CLOOG EXPAT GETTEXT GMP ISL LIBELF LIBICONV MPC MPFR NCURSES ZLIB"
-CT_LIBICONV_NEEDED=y
-CT_GETTEXT_NEEDED=y
-CT_GMP_NEEDED=y
-CT_MPFR_NEEDED=y
-CT_ISL_NEEDED=y
-CT_MPC_NEEDED=y
-CT_EXPAT_NEEDED=y
-CT_NCURSES_NEEDED=y
-CT_ZLIB_NEEDED=y
-CT_LIBICONV=y
-CT_GETTEXT=y
-CT_GMP=y
-CT_MPFR=y
-CT_ISL=y
-CT_MPC=y
-CT_EXPAT=y
-CT_NCURSES=y
-CT_ZLIB=y
-
-#
-# Companion tools
-#
-# CT_COMP_TOOLS_FOR_HOST is not set
-# CT_COMP_TOOLS_AUTOCONF is not set
-# CT_COMP_TOOLS_AUTOMAKE is not set
-# CT_COMP_TOOLS_BISON is not set
-# CT_COMP_TOOLS_DTC is not set
-# CT_COMP_TOOLS_LIBTOOL is not set
-# CT_COMP_TOOLS_M4 is not set
-# CT_COMP_TOOLS_MAKE is not set
-CT_ALL_COMP_TOOLS_CHOICES="AUTOCONF AUTOMAKE BISON DTC LIBTOOL M4 MAKE"
-
-#
-# Test suite
-#
-# CT_TEST_SUITE_GCC is not set
-FROM centos:5
+# We use Debian 6 (glibc 2.11, kernel 2.6.32) as a common base for other
+# distros that still need Rust support: RHEL 6 (glibc 2.12, kernel 2.6.32) and
+# SLES 11 SP4 (glibc 2.11, kernel 3.0).
+FROM debian:6
WORKDIR /build
-# Centos 5 is EOL and is no longer available from the usual mirrors, so switch
-# to http://vault.centos.org/
-RUN sed -i 's/enabled=1/enabled=0/' /etc/yum/pluginconf.d/fastestmirror.conf
-RUN sed -i 's/mirrorlist/#mirrorlist/' /etc/yum.repos.d/*.repo
-RUN sed -i 's|#\(baseurl.*\)mirror.centos.org/centos/$releasever|\1vault.centos.org/5.11|' /etc/yum.repos.d/*.repo
+# Debian 6 is EOL and no longer available from the usual mirrors,
+# so we'll need to switch to http://archive.debian.org/
+RUN sed -i '/updates/d' /etc/apt/sources.list && \
+ sed -i 's/httpredir/archive/' /etc/apt/sources.list
-RUN yum upgrade -y && yum install -y \
- curl \
+RUN apt-get update && \
+ apt-get install --allow-unauthenticated -y --no-install-recommends \
+ automake \
bzip2 \
+ ca-certificates \
+ curl \
+ file \
+ g++ \
+ g++-multilib \
gcc \
- gcc-c++ \
+ gcc-multilib \
+ git \
+ lib32z1-dev \
+ libedit-dev \
+ libncurses-dev \
make \
- glibc-devel \
+ patch \
perl \
- zlib-devel \
- file \
- xz \
- which \
- pkgconfig \
+ pkg-config \
+ unzip \
wget \
- autoconf \
- gettext
+ xz-utils \
+ zlib1g-dev
ENV PATH=/rustroot/bin:$PATH
-ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib
+ENV LD_LIBRARY_PATH=/rustroot/lib64:/rustroot/lib32:/rustroot/lib
ENV PKG_CONFIG_PATH=/rustroot/lib/pkgconfig
WORKDIR /tmp
+RUN mkdir /home/user
COPY host-x86_64/dist-x86_64-linux/shared.sh /tmp/
# We need a build of openssl which supports SNI to download artifacts from
COPY host-x86_64/dist-x86_64-linux/build-openssl.sh /tmp/
RUN ./build-openssl.sh
-# The `curl` binary on CentOS doesn't support SNI which is needed for fetching
+# The `curl` binary on Debian 6 doesn't support SNI which is needed for fetching
# some https urls we have, so install a new version of libcurl + curl which is
# using the openssl we just built previously.
#
# Note that we also disable a bunch of optional features of curl that we don't
# really need.
COPY host-x86_64/dist-x86_64-linux/build-curl.sh /tmp/
-RUN ./build-curl.sh
+RUN ./build-curl.sh && apt-get remove -y curl
# binutils < 2.22 has a bug where the 32-bit executables it generates
# immediately segfault in Rust, so we need to install our own binutils.
COPY host-x86_64/dist-x86_64-linux/build-binutils.sh /tmp/
RUN ./build-binutils.sh
-# libssh2 (a dependency of Cargo) requires cmake 2.8.11 or higher but CentOS
-# only has 2.6.4, so build our own
-COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
-RUN ./build-cmake.sh
-
-# Build a version of gcc capable of building LLVM 6
+# Need at least GCC 5.1 to compile LLVM nowadays
COPY host-x86_64/dist-x86_64-linux/build-gcc.sh /tmp/
-RUN ./build-gcc.sh
+RUN ./build-gcc.sh && apt-get remove -y gcc g++
-# CentOS 5.5 has Python 2.4 by default, but LLVM needs 2.7+
+# Debian 6 has Python 2.6 by default, but LLVM needs 2.7+
COPY host-x86_64/dist-x86_64-linux/build-python.sh /tmp/
RUN ./build-python.sh
-# Now build LLVM+Clang 7, afterwards configuring further compilations to use the
+# LLVM needs cmake 3.4.3 or higher, and is planning to raise to 3.13.4.
+COPY host-x86_64/dist-x86_64-linux/build-cmake.sh /tmp/
+RUN ./build-cmake.sh
+
+# Now build LLVM+Clang, afterwards configuring further compilations to use the
# clang/clang++ compilers.
-COPY host-x86_64/dist-x86_64-linux/build-clang.sh host-x86_64/dist-x86_64-linux/llvm-project-centos.patch /tmp/
+COPY host-x86_64/dist-x86_64-linux/build-clang.sh /tmp/
RUN ./build-clang.sh
ENV CC=clang CXX=clang++
-# Apparently CentOS 5.5 desn't have `git` in yum, but we're gonna need it for
-# cloning, so download and build it here.
-COPY host-x86_64/dist-x86_64-linux/build-git.sh /tmp/
-RUN ./build-git.sh
-
-# for sanitizers, we need kernel headers files newer than the ones CentOS ships
-# with so we install newer ones here
-COPY host-x86_64/dist-x86_64-linux/build-headers.sh /tmp/
-RUN ./build-headers.sh
-
-# OpenSSL requires a more recent version of perl
-# with so we install newer ones here
-COPY host-x86_64/dist-x86_64-linux/build-perl.sh /tmp/
-RUN ./build-perl.sh
-
COPY scripts/sccache.sh /scripts/
RUN sh /scripts/sccache.sh
curl -L https://github.com/llvm/llvm-project/archive/$LLVM.tar.gz | \
tar xzf - --strip-components=1
-yum install -y patch
-patch -Np1 < ../llvm-project-centos.patch
-
mkdir clang-build
cd clang-build
set -ex
source shared.sh
-curl https://cmake.org/files/v3.6/cmake-3.6.3.tar.gz | tar xzf -
+CMAKE=3.13.4
+curl -L https://github.com/Kitware/CMake/releases/download/v$CMAKE/cmake-$CMAKE.tar.gz | tar xzf -
mkdir cmake-build
cd cmake-build
-hide_output ../cmake-3.6.3/configure --prefix=/rustroot
+hide_output ../cmake-$CMAKE/configure --prefix=/rustroot
hide_output make -j10
hide_output make install
cd ..
rm -rf cmake-build
-rm -rf cmake-3.6.3
+rm -rf cmake-$CMAKE
cd ..
rm -rf curl-build
rm -rf curl-$VERSION
-yum erase -y curl
cd ..
rm -rf gcc-build
rm -rf gcc-$GCC
-yum erase -y gcc gcc-c++ binutils
cmake \
sudo \
gdb \
+ zlib1g-dev \
+ lib32z1-dev \
xz-utils
cmake \
sudo \
gdb \
+ zlib1g-dev \
+ lib32z1-dev \
xz-utils
+++ /dev/null
-#!/usr/bin/env python
-# A simple wrapper that forwards the arguments to bash, unless the
-# CI_OVERRIDE_SHELL environment variable is present: in that case the content
-# of that environment variable is used as the shell path.
-
-import os
-import sys
-import subprocess
-
-try:
- shell = os.environ["CI_OVERRIDE_SHELL"]
-except KeyError:
- shell = "bash"
-
-res = subprocess.call([shell] + sys.argv[1:])
-sys.exit(res)
steps:
- name: disable git crlf conversion
run: git config --global core.autocrlf false
- shell: bash
- name: checkout the source code
uses: actions/checkout@v1
defaults:
run:
- # While on Linux and macOS builders it just forwards the arguments to the
- # system bash, this wrapper allows switching from the host's bash.exe to
- # the one we install along with MSYS2 mid-build on Windows.
- #
- # Once the step to install MSYS2 is executed, the CI_OVERRIDE_SHELL
- # environment variable is set pointing to our MSYS2's bash.exe. From that
- # moment the host's bash.exe will not be called anymore.
- #
- # This is needed because we can't launch our own bash.exe from the host
- # bash.exe, as that would load two different cygwin1.dll in memory, causing
- # "cygwin heap mismatch" errors.
- shell: python src/ci/exec-with-shell.py {0}
+ # On Linux, macOS, and Windows, use the system-provided bash as the default
+ # shell. (This should only make a difference on Windows, where the default
+ # shell is PowerShell.)
+ shell: bash
jobs:
pr:
- name: dist-powerpc64le-linux
<<: *job-linux-xl
+ - name: dist-riscv64-linux
+ <<: *job-linux-xl
+
- name: dist-s390x-linux
<<: *job-linux-xl
- name: publish toolstate
run: src/ci/publish_toolstate.sh
+ shell: bash
env:
TOOLSTATE_REPO_ACCESS_TOKEN: ${{ secrets.TOOLSTATE_REPO_ACCESS_TOKEN }}
<<: *step
ulimit -c unlimited
fi
+# There was a bad interaction between "old" 32-bit binaries on current 64-bit
+# kernels with selinux enabled, where ASLR mmap would sometimes choose a low
+# address and then block it for being below `vm.mmap_min_addr` -> `EACCES`.
+# This is probably a kernel bug, but setting `ulimit -Hs` works around it.
+# See also `dist-i686-linux` where this setting is enabled.
+if [ "$SET_HARD_RLIMIT_STACK" = "1" ]; then
+ rlimit_stack=$(ulimit -Ss)
+ if [ "$rlimit_stack" != "" ]; then
+ ulimit -Hs "$rlimit_stack"
+ fi
+fi
+
ci_dir=`cd $(dirname $0) && pwd`
source "$ci_dir/shared.sh"
mkdir -p "${msys2Path}/home/${USERNAME}"
ciCommandAddPath "${msys2Path}/usr/bin"
- echo "switching shell to use our own bash"
- ciCommandSetEnv CI_OVERRIDE_SHELL "${msys2Path}/usr/bin/bash.exe"
-
# Detect the native Python version installed on the agent. On GitHub
# Actions, the C:\hostedtoolcache\windows\Python directory contains a
# subdirectory for each installed Python version.
[unstable-doc-cfg]: ../unstable-book/language-features/doc-cfg.html
[issue-doc-cfg]: https://github.com/rust-lang/rust/issues/43781
+### Adding your trait to the "Important Traits" dialog
+
+Rustdoc keeps a list of a few traits that are believed to be "fundamental" to a given type when
+implemented on it. These traits are intended to be the primary interface for their types, and are
+often the only thing available to be documented on their types. For this reason, Rustdoc will track
+when a given type implements one of these traits and call special attention to it when a function
+returns one of these types. This is the "Important Traits" dialog, visible as a circle-i button next
+to the function, which, when clicked, shows the dialog.
+
+In the standard library, the traits that qualify for inclusion are `Iterator`, `io::Read`, and
+`io::Write`. However, rather than being implemented as a hard-coded list, these traits have a
+special marker attribute on them: `#[doc(spotlight)]`. This means that you could apply this
+attribute to your own trait to include it in the "Important Traits" dialog in documentation.
+
+The `#[doc(spotlight)]` attribute currently requires the `#![feature(doc_spotlight)]` feature gate.
+For more information, see [its chapter in the Unstable Book][unstable-spotlight] and [its tracking
+issue][issue-spotlight].
+
+[unstable-spotlight]: ../unstable-book/language-features/doc-spotlight.html
+[issue-spotlight]: https://github.com/rust-lang/rust/issues/45040
+
### Exclude certain dependencies from documentation
The standard library uses several dependencies which, in turn, use several types and traits from the
--- /dev/null
+# `doc_spotlight`
+
+The tracking issue for this feature is: [#45040]
+
+The `doc_spotlight` feature allows the use of the `spotlight` parameter to the `#[doc]` attribute,
+to "spotlight" a specific trait on the return values of functions. Adding a `#[doc(spotlight)]`
+attribute to a trait definition will make rustdoc print extra information for functions which return
+a type that implements that trait. This attribute is applied to the `Iterator`, `io::Read`, and
+`io::Write` traits in the standard library.
+
+You can do this on your own traits, like this:
+
+```
+#![feature(doc_spotlight)]
+
+#[doc(spotlight)]
+pub trait MyTrait {}
+
+pub struct MyStruct;
+impl MyTrait for MyStruct {}
+
+/// The docs for this function will have an extra line about `MyStruct` implementing `MyTrait`,
+/// without having to write that yourself!
+pub fn my_fn() -> MyStruct { MyStruct }
+```
+
+This feature was originally implemented in PR [#45039].
+
+[#45040]: https://github.com/rust-lang/rust/issues/45040
+[#45039]: https://github.com/rust-lang/rust/pull/45039
//! A priority queue implemented with a binary heap.
//!
-//! Insertion and popping the largest element have `O(log(n))` time complexity.
-//! Checking the largest element is `O(1)`. Converting a vector to a binary heap
-//! can be done in-place, and has `O(n)` complexity. A binary heap can also be
-//! converted to a sorted vector in-place, allowing it to be used for an `O(n * log(n))`
+//! Insertion and popping the largest element have *O*(log(*n*)) time complexity.
+//! Checking the largest element is *O*(1). Converting a vector to a binary heap
+//! can be done in-place, and has *O*(*n*) complexity. A binary heap can also be
+//! converted to a sorted vector in-place, allowing it to be used for an *O*(*n* \* log(*n*))
//! in-place heapsort.
//!
//! # Examples
///
/// | [push] | [pop] | [peek]/[peek\_mut] |
/// |--------|-----------|--------------------|
-/// | O(1)~ | O(log(n)) | O(1) |
+/// | O(1)~ | *O*(log(*n*)) | *O*(1) |
///
/// The value for `push` is an expected cost; the method documentation gives a
/// more detailed analysis.
///
/// # Time complexity
///
- /// Cost is `O(1)` in the worst case.
+ /// Cost is *O*(1) in the worst case.
#[stable(feature = "binary_heap_peek_mut", since = "1.12.0")]
pub fn peek_mut(&mut self) -> Option<PeekMut<'_, T>> {
if self.is_empty() { None } else { Some(PeekMut { heap: self, sift: true }) }
///
/// # Time complexity
///
- /// The worst case cost of `pop` on a heap containing *n* elements is `O(log(n))`.
+ /// The worst case cost of `pop` on a heap containing *n* elements is *O*(log(*n*)).
#[stable(feature = "rust1", since = "1.0.0")]
pub fn pop(&mut self) -> Option<T> {
self.data.pop().map(|mut item| {
///
/// The expected cost of `push`, averaged over every possible ordering of
/// the elements being pushed, and over a sufficiently large number of
- /// pushes, is `O(1)`. This is the most meaningful cost metric when pushing
+ /// pushes, is *O*(1). This is the most meaningful cost metric when pushing
/// elements that are *not* already in any sorted pattern.
///
/// The time complexity degrades if elements are pushed in predominantly
/// ascending order. In the worst case, elements are pushed in ascending
- /// sorted order and the amortized cost per push is `O(log(n))` against a heap
+ /// sorted order and the amortized cost per push is *O*(log(*n*)) against a heap
/// containing *n* elements.
///
- /// The worst case cost of a *single* call to `push` is `O(n)`. The worst case
+ /// The worst case cost of a *single* call to `push` is *O*(*n*). The worst case
/// occurs when capacity is exhausted and needs a resize. The resize cost
/// has been amortized in the previous figures.
#[stable(feature = "rust1", since = "1.0.0")]
/// The remaining elements will be removed on drop in heap order.
///
/// Note:
- /// * `.drain_sorted()` is `O(n * log(n))`; much slower than `.drain()`.
+ /// * `.drain_sorted()` is *O*(*n* \* log(*n*)); much slower than `.drain()`.
/// You should use the latter for most cases.
///
/// # Examples
///
/// # Time complexity
///
- /// Cost is `O(1)` in the worst case.
+ /// Cost is *O*(1) in the worst case.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn peek(&self) -> Option<&T> {
self.data.get(0)
impl<T: Ord> From<Vec<T>> for BinaryHeap<T> {
/// Converts a `Vec<T>` into a `BinaryHeap<T>`.
///
- /// This conversion happens in-place, and has `O(n)` time complexity.
+ /// This conversion happens in-place, and has *O*(*n*) time complexity.
fn from(vec: Vec<T>) -> BinaryHeap<T> {
let mut heap = BinaryHeap { data: vec };
heap.rebuild();
/// any other key, as determined by the [`Ord`] trait, changes while it is in the map. This is
/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
-/// [`Ord`]: ../../std/cmp/trait.Ord.html
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
///
/// # Examples
///
let mut out_tree = BTreeMap { root: Some(node::Root::new_leaf()), length: 0 };
{
- let root = out_tree.root.as_mut().unwrap();
+ let root = out_tree.root.as_mut().unwrap(); // unwrap succeeds because we just wrapped
let mut out_node = match root.as_mut().force() {
Leaf(leaf) => leaf,
Internal(_) => unreachable!(),
}
Internal(internal) => {
let mut out_tree = clone_subtree(internal.first_edge().descend());
- out_tree.ensure_root_is_owned();
{
- // Ideally we'd use the return of ensure_root_is_owned
- // instead of re-unwrapping here but unfortunately that
- // borrows all of out_tree and we need access to the
- // length below.
- let mut out_node = out_tree.root.as_mut().unwrap().push_level();
+ let out_root = BTreeMap::ensure_is_owned(&mut out_tree.root);
+ let mut out_node = out_root.push_level();
let mut in_edge = internal.first_edge();
while let Ok(kv) = in_edge.right_kv() {
let (k, v) = kv.into_kv();
// Ord` constraint, which this method lacks.
BTreeMap { root: None, length: 0 }
} else {
- clone_subtree(self.root.as_ref().unwrap().as_ref())
+ clone_subtree(self.root.as_ref().unwrap().as_ref()) // unwrap succeeds because not empty
}
}
}
}
fn replace(&mut self, key: K) -> Option<K> {
- self.ensure_root_is_owned();
- match search::search_tree::<marker::Mut<'_>, K, (), K>(self.root.as_mut()?.as_mut(), &key) {
+ let root = Self::ensure_is_owned(&mut self.root);
+ match search::search_tree::<marker::Mut<'_>, K, (), K>(root.as_mut(), &key) {
Found(handle) => Some(mem::replace(handle.into_kv_mut().0, key)),
GoDown(handle) => {
VacantEntry { key, handle, length: &mut self.length, _marker: PhantomData }
/// This `struct` is created by the [`iter`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`iter`]: struct.BTreeMap.html#method.iter
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`iter`]: BTreeMap::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a, V: 'a> {
range: Range<'a, K, V>,
/// This `struct` is created by the [`iter_mut`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`iter_mut`]: struct.BTreeMap.html#method.iter_mut
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`iter_mut`]: BTreeMap::iter_mut
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct IterMut<'a, K: 'a, V: 'a> {
/// This `struct` is created by the [`into_iter`] method on [`BTreeMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
-/// [`into_iter`]: struct.BTreeMap.html#method.into_iter
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`into_iter`]: IntoIterator::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
front: Option<Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge>>,
/// This `struct` is created by the [`keys`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`keys`]: struct.BTreeMap.html#method.keys
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`keys`]: BTreeMap::keys
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
/// This `struct` is created by the [`values`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`values`]: struct.BTreeMap.html#method.values
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`values`]: BTreeMap::values
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
/// This `struct` is created by the [`values_mut`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`values_mut`]: struct.BTreeMap.html#method.values_mut
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`values_mut`]: BTreeMap::values_mut
#[stable(feature = "map_values_mut", since = "1.10.0")]
#[derive(Debug)]
pub struct ValuesMut<'a, K: 'a, V: 'a> {
/// This `struct` is created by the [`range`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`range`]: struct.BTreeMap.html#method.range
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`range`]: BTreeMap::range
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, K: 'a, V: 'a> {
front: Option<Handle<NodeRef<marker::Immut<'a>, K, V, marker::Leaf>, marker::Edge>>,
/// This `struct` is created by the [`range_mut`] method on [`BTreeMap`]. See its
/// documentation for more.
///
-/// [`range_mut`]: struct.BTreeMap.html#method.range_mut
-/// [`BTreeMap`]: struct.BTreeMap.html
+/// [`range_mut`]: BTreeMap::range_mut
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct RangeMut<'a, K: 'a, V: 'a> {
front: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
///
/// This `enum` is constructed from the [`entry`] method on [`BTreeMap`].
///
-/// [`BTreeMap`]: struct.BTreeMap.html
-/// [`entry`]: struct.BTreeMap.html#method.entry
+/// [`entry`]: BTreeMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// A vacant entry.
// Second, we build a tree from the sorted sequence in linear time.
self.from_sorted_iter(iter);
- self.fix_right_edge();
}
/// Constructs a double-ended iterator over a sub-range of elements in the map.
#[stable(feature = "rust1", since = "1.0.0")]
pub fn entry(&mut self, key: K) -> Entry<'_, K, V> {
// FIXME(@porglezomp) Avoid allocating if we don't insert
- self.ensure_root_is_owned();
- match search::search_tree(self.root.as_mut().unwrap().as_mut(), &key) {
+ let root = Self::ensure_is_owned(&mut self.root);
+ match search::search_tree(root.as_mut(), &key) {
Found(handle) => {
Occupied(OccupiedEntry { handle, length: &mut self.length, _marker: PhantomData })
}
}
fn from_sorted_iter<I: Iterator<Item = (K, V)>>(&mut self, iter: I) {
- self.ensure_root_is_owned();
- let mut cur_node = self.root.as_mut().unwrap().as_mut().last_leaf_edge().into_node();
+ let root = Self::ensure_is_owned(&mut self.root);
+ let mut cur_node = root.as_mut().last_leaf_edge().into_node();
// Iterate through all key-value pairs, pushing them into nodes at the right level.
for (key, value) in iter {
// Try to push key-value pair into the current leaf node.
self.length += 1;
}
+ Self::fix_right_edge(root)
}
- fn fix_right_edge(&mut self) {
+ fn fix_right_edge(root: &mut node::Root<K, V>) {
// Handle underfull nodes, start from the top.
- let mut cur_node = self.root.as_mut().unwrap().as_mut();
+ let mut cur_node = root.as_mut();
while let Internal(internal) = cur_node.force() {
// Check if right-most child is underfull.
let mut last_edge = internal.last_edge();
}
let total_num = self.len();
+ let left_root = self.root.as_mut().unwrap(); // unwrap succeeds because not empty
let mut right = Self::new();
- let right_root = right.ensure_root_is_owned();
- for _ in 0..(self.root.as_ref().unwrap().as_ref().height()) {
+ let right_root = Self::ensure_is_owned(&mut right.root);
+ for _ in 0..left_root.height() {
right_root.push_level();
}
{
- let mut left_node = self.root.as_mut().unwrap().as_mut();
- let mut right_node = right.root.as_mut().unwrap().as_mut();
+ let mut left_node = left_root.as_mut();
+ let mut right_node = right_root.as_mut();
loop {
let mut split_edge = match search::search_node(left_node, key) {
}
}
- self.fix_right_border();
- right.fix_left_border();
+ left_root.fix_right_border();
+ right_root.fix_left_border();
- if self.root.as_ref().unwrap().as_ref().height()
- < right.root.as_ref().unwrap().as_ref().height()
- {
+ if left_root.height() < right_root.height() {
self.recalc_length();
right.length = total_num - self.len();
} else {
self.length = dfs(self.root.as_ref().unwrap().as_ref());
}
-
- /// Removes empty levels on the top.
- fn fix_top(&mut self) {
- loop {
- {
- let node = self.root.as_ref().unwrap().as_ref();
- if node.height() == 0 || node.len() > 0 {
- break;
- }
- }
- self.root.as_mut().unwrap().pop_level();
- }
- }
-
- fn fix_right_border(&mut self) {
- self.fix_top();
-
- {
- let mut cur_node = self.root.as_mut().unwrap().as_mut();
-
- while let Internal(node) = cur_node.force() {
- let mut last_kv = node.last_kv();
-
- if last_kv.can_merge() {
- cur_node = last_kv.merge().descend();
- } else {
- let right_len = last_kv.reborrow().right_edge().descend().len();
- // `MINLEN + 1` to avoid readjust if merge happens on the next level.
- if right_len < node::MIN_LEN + 1 {
- last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
- }
- cur_node = last_kv.right_edge().descend();
- }
- }
- }
-
- self.fix_top();
- }
-
- /// The symmetric clone of `fix_right_border`.
- fn fix_left_border(&mut self) {
- self.fix_top();
-
- {
- let mut cur_node = self.root.as_mut().unwrap().as_mut();
-
- while let Internal(node) = cur_node.force() {
- let mut first_kv = node.first_kv();
-
- if first_kv.can_merge() {
- cur_node = first_kv.merge().descend();
- } else {
- let left_len = first_kv.reborrow().left_edge().descend().len();
- if left_len < node::MIN_LEN + 1 {
- first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
- }
- cur_node = first_kv.left_edge().descend();
- }
- }
- }
-
- self.fix_top();
- }
}
#[stable(feature = "rust1", since = "1.0.0")]
pred: F,
inner: DrainFilterInner<'a, K, V>,
}
+/// Most of the implementation of DrainFilter, independent of the type
+/// of the predicate, thus also serving for BTreeSet::DrainFilter.
pub(super) struct DrainFilterInner<'a, K: 'a, V: 'a> {
length: &'a mut usize,
cur_leaf_edge: Option<Handle<NodeRef<marker::Mut<'a>, K, V, marker::Leaf>, marker::Edge>>,
}
/// If the root node is the empty (non-allocated) root node, allocate our
- /// own node.
- fn ensure_root_is_owned(&mut self) -> &mut node::Root<K, V> {
- self.root.get_or_insert_with(node::Root::new_leaf)
+ /// own node. Is an associated function to avoid borrowing the entire BTreeMap.
+ fn ensure_is_owned(root: &mut Option<node::Root<K, V>>) -> &mut node::Root<K, V> {
+ root.get_or_insert_with(node::Root::new_leaf)
}
}
}
}
+impl<K, V> node::Root<K, V> {
+ /// Removes empty levels on the top, but keep an empty leaf if the entire tree is empty.
+ fn fix_top(&mut self) {
+ while self.height() > 0 && self.as_ref().len() == 0 {
+ self.pop_level();
+ }
+ }
+
+ fn fix_right_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut last_kv = node.last_kv();
+
+ if last_kv.can_merge() {
+ cur_node = last_kv.merge().descend();
+ } else {
+ let right_len = last_kv.reborrow().right_edge().descend().len();
+ // `MINLEN + 1` to avoid readjust if merge happens on the next level.
+ if right_len < node::MIN_LEN + 1 {
+ last_kv.bulk_steal_left(node::MIN_LEN + 1 - right_len);
+ }
+ cur_node = last_kv.right_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+
+ /// The symmetric clone of `fix_right_border`.
+ fn fix_left_border(&mut self) {
+ self.fix_top();
+
+ {
+ let mut cur_node = self.as_mut();
+
+ while let Internal(node) = cur_node.force() {
+ let mut first_kv = node.first_kv();
+
+ if first_kv.can_merge() {
+ cur_node = first_kv.merge().descend();
+ } else {
+ let left_len = first_kv.reborrow().left_edge().descend().len();
+ if left_len < node::MIN_LEN + 1 {
+ first_kv.bulk_steal_right(node::MIN_LEN + 1 - left_len);
+ }
+ cur_node = first_kv.left_edge().descend();
+ }
+ }
+ }
+
+ self.fix_top();
+ }
+}
+
enum UnderflowResult<'a, K, V> {
AtRoot,
Merged(Handle<NodeRef<marker::Mut<'a>, K, V, marker::Internal>, marker::Edge>, bool, usize),
impl<K, V> Handle<NodeRef<marker::Owned, K, V, marker::Leaf>, marker::Edge> {
/// Moves the leaf edge handle to the next leaf edge and returns the key and value
/// in between, while deallocating any node left behind.
- /// Unsafe for three reasons:
+ /// Unsafe for two reasons:
/// - The caller must ensure that the leaf edge is not the last one in the tree
/// and is not a handle previously resulting from counterpart `next_back_unchecked`.
- /// - If the leaf edge is the last edge of a node, that node and possibly ancestors
+ /// - Further use of the updated leaf edge handle is very dangerous. In particular,
+ /// if the leaf edge is the last edge of a node, that node and possibly ancestors
/// will be deallocated, while the reference to those nodes in the surviving ancestor
- /// is left dangling; thus further use of the leaf edge handle is dangerous.
- /// It is, however, safe to call this method again on the updated handle.
- /// if the two preconditions above hold.
- /// - Using the updated handle may well invalidate the returned references.
+ /// is left dangling.
+ /// The only safe way to proceed with the updated handle is to compare it, drop it,
+ /// call this method again subject to both preconditions listed in the first point,
+ /// or call counterpart `next_back_unchecked` subject to its preconditions.
pub unsafe fn next_unchecked(&mut self) -> (K, V) {
unsafe {
replace(self, |leaf_edge| {
/// Moves the leaf edge handle to the previous leaf edge and returns the key
/// and value in between, while deallocating any node left behind.
- /// Unsafe for three reasons:
+ /// Unsafe for two reasons:
/// - The caller must ensure that the leaf edge is not the first one in the tree
/// and is not a handle previously resulting from counterpart `next_unchecked`.
- /// - If the lead edge is the first edge of a node, that node and possibly ancestors
+ /// - Further use of the updated leaf edge handle is very dangerous. In particular,
+ /// if the leaf edge is the first edge of a node, that node and possibly ancestors
/// will be deallocated, while the reference to those nodes in the surviving ancestor
- /// is left dangling; thus further use of the leaf edge handle is dangerous.
- /// It is, however, safe to call this method again on the updated handle.
- /// if the two preconditions above hold.
- /// - Using the updated handle may well invalidate the returned references.
+ /// is left dangling.
+ /// The only safe way to proceed with the updated handle is to compare it, drop it,
+ /// call this method again subject to both preconditions listed in the first point,
+ /// or call counterpart `next_unchecked` subject to its preconditions.
pub unsafe fn next_back_unchecked(&mut self) -> (K, V) {
unsafe {
replace(self, |leaf_edge| {
data: LeafNode<K, V>,
/// The pointers to the children of this node. `len + 1` of these are considered
- /// initialized and valid.
+ /// initialized and valid. Although during the process of `into_iter` or `drop`,
+ /// some pointers are dangling while others still need to be traversed.
edges: [MaybeUninit<BoxedNode<K, V>>; 2 * B],
}
unsafe impl<K: Send, V: Send> Send for Root<K, V> {}
impl<K, V> Root<K, V> {
+ /// Returns the number of levels below the root.
+ pub fn height(&self) -> usize {
+ self.height
+ }
+
/// Returns a new owned tree, with its own root node that is initially empty.
pub fn new_leaf() -> Self {
Root { node: BoxedNode::from_leaf(Box::new(unsafe { LeafNode::new() })), height: 0 }
impl<'a, K, V, Type> NodeRef<marker::Mut<'a>, K, V, Type> {
/// Unsafely asserts to the compiler some static information about whether this
- /// node is a `Leaf`.
+ /// node is a `Leaf` or an `Internal`.
unsafe fn cast_unchecked<NewType>(&mut self) -> NodeRef<marker::Mut<'_>, K, V, NewType> {
NodeRef { height: self.height, node: self.node, root: self.root, _marker: PhantomData }
}
}
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::Leaf> {
- /// Adds a key/value pair the end of the node.
+ /// Adds a key/value pair to the end of the node.
pub fn push(&mut self, key: K, val: V) {
assert!(self.len() < CAPACITY);
}
impl<'a, K, V> NodeRef<marker::Mut<'a>, K, V, marker::LeafOrInternal> {
- /// Removes a key/value pair from the end of this node. If this is an internal node,
- /// also removes the edge that was to the right of that pair.
+ /// Removes a key/value pair from the end of this node and returns the pair.
+ /// If this is an internal node, also removes the edge that was to the right
+ /// of that pair and returns the orphaned node that this edge owned with its
+ /// parent erased.
pub fn pop(&mut self) -> (K, V, Option<Root<K, V>>) {
assert!(self.len() > 0);
}
/// Unsafely asserts to the compiler some static information about whether the underlying
- /// node of this handle is a `Leaf`.
+ /// node of this handle is a `Leaf` or an `Internal`.
unsafe fn cast_unchecked<NewType>(
&mut self,
) -> Handle<NodeRef<marker::Mut<'_>, K, V, NewType>, marker::Edge> {
/// to any other item, as determined by the [`Ord`] trait, changes while it is in the set. This is
/// normally only possible through [`Cell`], [`RefCell`], global state, I/O, or unsafe code.
///
-/// [`BTreeMap`]: struct.BTreeMap.html
-/// [`Ord`]: ../../std/cmp/trait.Ord.html
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+/// [`Ord`]: core::cmp::Ord
+/// [`Cell`]: core::cell::Cell
+/// [`RefCell`]: core::cell::RefCell
///
/// # Examples
///
/// This `struct` is created by the [`iter`] method on [`BTreeSet`].
/// See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`iter`]: struct.BTreeSet.html#method.iter
+/// [`iter`]: BTreeSet::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, T: 'a> {
iter: Keys<'a, T, ()>,
/// This `struct` is created by the [`into_iter`] method on [`BTreeSet`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`into_iter`]: struct.BTreeSet.html#method.into_iter
+/// [`into_iter`]: BTreeSet#method.into_iter
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Debug)]
pub struct IntoIter<T> {
/// This `struct` is created by the [`range`] method on [`BTreeSet`].
/// See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`range`]: struct.BTreeSet.html#method.range
+/// [`range`]: BTreeSet::range
#[derive(Debug)]
#[stable(feature = "btree_range", since = "1.17.0")]
pub struct Range<'a, T: 'a> {
/// This `struct` is created by the [`difference`] method on [`BTreeSet`].
/// See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`difference`]: struct.BTreeSet.html#method.difference
+/// [`difference`]: BTreeSet::difference
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a> {
inner: DifferenceInner<'a, T>,
/// This `struct` is created by the [`symmetric_difference`] method on
/// [`BTreeSet`]. See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`symmetric_difference`]: struct.BTreeSet.html#method.symmetric_difference
+/// [`symmetric_difference`]: BTreeSet::symmetric_difference
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SymmetricDifference<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
/// This `struct` is created by the [`intersection`] method on [`BTreeSet`].
/// See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`intersection`]: struct.BTreeSet.html#method.intersection
+/// [`intersection`]: BTreeSet::intersection
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a> {
inner: IntersectionInner<'a, T>,
/// This `struct` is created by the [`union`] method on [`BTreeSet`].
/// See its documentation for more.
///
-/// [`BTreeSet`]: struct.BTreeSet.html
-/// [`union`]: struct.BTreeSet.html#method.union
+/// [`union`]: BTreeSet::union
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Union<'a, T: 'a>(MergeIterInner<Iter<'a, T>>);
/// This reuses all the nodes from `other` and moves them into `self`. After
/// this operation, `other` becomes empty.
///
- /// This operation should compute in `O(1)` time and `O(1)` memory.
+ /// This operation should compute in *O*(1) time and *O*(1) memory.
///
/// # Examples
///
/// Returns `true` if the `LinkedList` is empty.
///
- /// This operation should compute in `O(1)` time.
+ /// This operation should compute in *O*(1) time.
///
/// # Examples
///
/// Returns the length of the `LinkedList`.
///
- /// This operation should compute in `O(1)` time.
+ /// This operation should compute in *O*(1) time.
///
/// # Examples
///
/// Removes all elements from the `LinkedList`.
///
- /// This operation should compute in `O(n)` time.
+ /// This operation should compute in *O*(*n*) time.
///
/// # Examples
///
/// Adds an element first in the list.
///
- /// This operation should compute in `O(1)` time.
+ /// This operation should compute in *O*(1) time.
///
/// # Examples
///
/// Removes the first element and returns it, or `None` if the list is
/// empty.
///
- /// This operation should compute in `O(1)` time.
+ /// This operation should compute in *O*(1) time.
///
/// # Examples
///
/// Appends an element to the back of a list.
///
- /// This operation should compute in `O(1)` time.
+ /// This operation should compute in *O*(1) time.
///
/// # Examples
///
/// Removes the last element from a list and returns it, or `None` if
/// it is empty.
///
- /// This operation should compute in `O(1)` time.
+ /// This operation should compute in *O*(1) time.
///
/// # Examples
///
/// Splits the list into two at the given index. Returns everything after the given index,
/// including the index.
///
- /// This operation should compute in `O(n)` time.
+ /// This operation should compute in *O*(*n*) time.
///
/// # Panics
///
/// Removes the element at the given index and returns it.
///
- /// This operation should compute in `O(n)` time.
+ /// This operation should compute in *O*(*n*) time.
///
/// # Panics
/// Panics if at >= len
//! A double-ended queue implemented with a growable ring buffer.
//!
-//! This queue has `O(1)` amortized inserts and removals from both ends of the
-//! container. It also has `O(1)` indexing like a vector. The contained elements
+//! This queue has *O*(1) amortized inserts and removals from both ends of the
+//! container. It also has *O*(1) indexing like a vector. The contained elements
//! are not required to be copyable, and the queue will be sendable if the
//! contained type is sendable.
/// Removes an element from anywhere in the `VecDeque` and returns it,
/// replacing it with the first element.
///
- /// This does not preserve ordering, but is `O(1)`.
+ /// This does not preserve ordering, but is *O*(1).
///
/// Returns `None` if `index` is out of bounds.
///
/// Removes an element from anywhere in the `VecDeque` and returns it, replacing it with the
/// last element.
///
- /// This does not preserve ordering, but is `O(1)`.
+ /// This does not preserve ordering, but is *O*(1).
///
/// Returns `None` if `index` is out of bounds.
///
///
/// # Complexity
///
- /// Takes `O(min(mid, len() - mid))` time and no extra space.
+ /// Takes `*O*(min(mid, len() - mid))` time and no extra space.
///
/// # Examples
///
///
/// # Complexity
///
- /// Takes `O(min(k, len() - k))` time and no extra space.
+ /// Takes `*O*(min(k, len() - k))` time and no extra space.
///
/// # Examples
///
/// [`Vec<T>`]: crate::vec::Vec
/// [`VecDeque<T>`]: crate::collections::VecDeque
///
- /// This never needs to re-allocate, but does need to do `O(n)` data movement if
+ /// This never needs to re-allocate, but does need to do *O*(*n*) data movement if
/// the circular buffer doesn't happen to be at the beginning of the allocation.
///
/// # Examples
/// ```
/// use std::collections::VecDeque;
///
- /// // This one is O(1).
+ /// // This one is *O*(1).
/// let deque: VecDeque<_> = (1..5).collect();
/// let ptr = deque.as_slices().0.as_ptr();
/// let vec = Vec::from(deque);
#![feature(const_generic_impls_guard)]
#![feature(const_generics)]
#![feature(const_in_array_repeat_expressions)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(cow_is_borrowed)]
#![feature(deque_range)]
#![feature(dispatch_from_dyn)]
/// While doing so, it attempts to find matches of a pattern. If it finds any, it
/// replaces them with the replacement string slice.
///
- /// [`String`]: string/struct.String.html
- ///
/// # Examples
///
/// Basic usage:
/// While doing so, it attempts to find matches of a pattern. If it finds any, it
/// replaces them with the replacement string slice at most `count` times.
///
- /// [`String`]: string/struct.String.html
- ///
/// # Examples
///
/// Basic usage:
/// the case, this function returns a [`String`] instead of modifying the
/// parameter in-place.
///
- /// [`String`]: string/struct.String.html
- ///
/// # Examples
///
/// Basic usage:
/// the case, this function returns a [`String`] instead of modifying the
/// parameter in-place.
///
- /// [`String`]: string/struct.String.html
- ///
/// # Examples
///
/// Basic usage:
/// Converts a [`Box<str>`] into a [`String`] without copying or allocating.
///
- /// [`String`]: string/struct.String.html
- /// [`Box<str>`]: boxed/struct.Box.html
+ /// [`Box<str>`]: Box
///
/// # Examples
///
///
/// This function will panic if the capacity would overflow.
///
- /// [`String`]: string/struct.String.html
- ///
/// # Examples
///
/// Basic usage:
/// assert_eq!("GRüßE, JüRGEN ❤", s.to_ascii_uppercase());
/// ```
///
- /// [`make_ascii_uppercase`]: #method.make_ascii_uppercase
+ /// [`make_ascii_uppercase`]: str::make_ascii_uppercase
/// [`to_uppercase`]: #method.to_uppercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
/// assert_eq!("grüße, jürgen ❤", s.to_ascii_lowercase());
/// ```
///
- /// [`make_ascii_lowercase`]: #method.make_ascii_lowercase
+ /// [`make_ascii_lowercase`]: str::make_ascii_lowercase
/// [`to_lowercase`]: #method.to_lowercase
#[stable(feature = "ascii_methods_on_intrinsics", since = "1.23.0")]
#[inline]
//! [`ToString`]s, and several error types that may result from working with
//! [`String`]s.
//!
-//! [`ToString`]: trait.ToString.html
-//!
//! # Examples
//!
//! There are multiple ways to create a new [`String`] from a string literal:
//! You can create a new [`String`] from an existing one by concatenating with
//! `+`:
//!
-//! [`String`]: struct.String.html
-//!
//! ```
//! let s = "Hello".to_string();
//!
/// contents of the string. It has a close relationship with its borrowed
/// counterpart, the primitive [`str`].
///
-/// [`str`]: ../../std/primitive.str.html
-///
/// # Examples
///
-/// You can create a `String` from a literal string with [`String::from`]:
+/// You can create a `String` from [a literal string][str] with [`String::from`]:
+///
+/// [`String::from`]: From::from
///
/// ```
/// let hello = String::from("Hello, world!");
/// hello.push_str("orld!");
/// ```
///
-/// [`String::from`]: #method.from
-/// [`char`]: ../../std/primitive.char.html
-/// [`push`]: #method.push
-/// [`push_str`]: #method.push_str
+/// [`push`]: String::push
+/// [`push_str`]: String::push_str
///
/// If you have a vector of UTF-8 bytes, you can create a `String` from it with
/// the [`from_utf8`] method:
/// assert_eq!("💖", sparkle_heart);
/// ```
///
-/// [`from_utf8`]: #method.from_utf8
+/// [`from_utf8`]: String::from_utf8
///
/// # UTF-8
///
/// The [`bytes`] and [`chars`] methods return iterators over the first
/// two, respectively.
///
-/// [`bytes`]: #method.bytes
-/// [`chars`]: #method.chars
+/// [`bytes`]: str::bytes
+/// [`chars`]: str::chars
///
/// # Deref
///
/// assert_eq!(String::from("Once upon a time..."), s);
/// ```
///
-/// [`as_ptr`]: #method.as_ptr
-/// [`len`]: #method.len
-/// [`capacity`]: #method.capacity
+/// [`as_ptr`]: str::as_ptr
+/// [`len`]: String::len
+/// [`capacity`]: String::capacity
///
/// If a `String` has enough capacity, adding elements to it will not
/// re-allocate. For example, consider this program:
/// }
/// ```
///
-/// [`with_capacity`]: #method.with_capacity
+/// [`with_capacity`]: String::with_capacity
///
/// We end up with a different output:
///
///
/// Here, there's no need to allocate more memory inside the loop.
///
-/// [`&str`]: ../../std/primitive.str.html
-/// [`Deref`]: ../../std/ops/trait.Deref.html
-/// [`as_str()`]: struct.String.html#method.as_str
+/// [`&str`]: str
+/// [`Deref`]: core::ops::Deref
+/// [`as_str()`]: String::as_str
#[derive(PartialOrd, Eq, Ord)]
#[cfg_attr(not(test), rustc_diagnostic_item = "string_type")]
#[stable(feature = "rust1", since = "1.0.0")]
/// [`into_bytes`] method will give back the byte vector that was used in the
/// conversion attempt.
///
-/// [`from_utf8`]: struct.String.html#method.from_utf8
-/// [`String`]: struct.String.html
-/// [`into_bytes`]: struct.FromUtf8Error.html#method.into_bytes
+/// [`from_utf8`]: String::from_utf8
+/// [`into_bytes`]: FromUtf8Error::into_bytes
///
/// The [`Utf8Error`] type provided by [`std::str`] represents an error that may
/// occur when converting a slice of [`u8`]s to a [`&str`]. In this sense, it's
/// an analogue to `FromUtf8Error`, and you can get one from a `FromUtf8Error`
/// through the [`utf8_error`] method.
///
-/// [`Utf8Error`]: ../../std/str/struct.Utf8Error.html
-/// [`std::str`]: ../../std/str/index.html
-/// [`u8`]: ../../std/primitive.u8.html
-/// [`&str`]: ../../std/primitive.str.html
-/// [`utf8_error`]: #method.utf8_error
+/// [`Utf8Error`]: core::str::Utf8Error
+/// [`std::str`]: core::str
+/// [`&str`]: str
+/// [`utf8_error`]: Self::utf8_error
///
/// # Examples
///
///
/// This type is the error type for the [`from_utf16`] method on [`String`].
///
-/// [`from_utf16`]: struct.String.html#method.from_utf16
-/// [`String`]: struct.String.html
-///
+/// [`from_utf16`]: String::from_utf16
/// # Examples
///
/// Basic usage:
/// consider the [`with_capacity`] method to prevent excessive
/// re-allocation.
///
- /// [`with_capacity`]: #method.with_capacity
+ /// [`with_capacity`]: String::with_capacity
///
/// # Examples
///
/// appending a bunch of data to the `String`, reducing the number of
/// reallocations it needs to do.
///
- /// [`capacity`]: #method.capacity
+ /// [`capacity`]: String::capacity
///
/// If the given capacity is `0`, no allocation will occur, and this method
/// is identical to the [`new`] method.
///
- /// [`new`]: #method.new
+ /// [`new`]: String::new
///
/// # Examples
///
/// See the docs for [`FromUtf8Error`] for more details on what you can do
/// with this error.
///
- /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
- /// [`String`]: struct.String.html
- /// [`u8`]: ../../std/primitive.u8.html
- /// [`Vec<u8>`]: ../../std/vec/struct.Vec.html
- /// [`&str`]: ../../std/primitive.str.html
- /// [`str::from_utf8`]: ../../std/str/fn.from_utf8.html
- /// [`into_bytes`]: struct.String.html#method.into_bytes
- /// [`FromUtf8Error`]: struct.FromUtf8Error.html
- /// [`Err`]: ../../std/result/enum.Result.html#variant.Err
+ /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
+ /// [`Vec<u8>`]: crate::vec::Vec
+ /// [`&str`]: str
+ /// [`into_bytes`]: String::into_bytes
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn from_utf8(vec: Vec<u8>) -> Result<String, FromUtf8Error> {
/// `from_utf8_lossy()` will replace any invalid UTF-8 sequences with
/// [`U+FFFD REPLACEMENT CHARACTER`][U+FFFD], which looks like this: �
///
- /// [`u8`]: ../../std/primitive.u8.html
/// [byteslice]: ../../std/primitive.slice.html
- /// [U+FFFD]: ../char/constant.REPLACEMENT_CHARACTER.html
+ /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
///
/// If you are sure that the byte slice is valid UTF-8, and you don't want
/// to incur the overhead of the conversion, there is an unsafe version
/// of this function, [`from_utf8_unchecked`], which has the same behavior
/// but skips the checks.
///
- /// [`from_utf8_unchecked`]: struct.String.html#method.from_utf8_unchecked
+ /// [`from_utf8_unchecked`]: String::from_utf8_unchecked
///
/// This function returns a [`Cow<'a, str>`]. If our byte slice is invalid
/// UTF-8, then we need to insert the replacement characters, which will
/// it's already valid UTF-8, we don't need a new allocation. This return
/// type allows us to handle both cases.
///
- /// [`Cow<'a, str>`]: ../../std/borrow/enum.Cow.html
+ /// [`Cow<'a, str>`]: crate::borrow::Cow
///
/// # Examples
///
/// Decode a UTF-16 encoded vector `v` into a `String`, returning [`Err`]
/// if `v` contains any invalid data.
///
- /// [`Err`]: ../../std/result/enum.Result.html#variant.Err
- ///
/// # Examples
///
/// Basic usage:
/// `from_utf16_lossy` returns a `String` since the UTF-16 to UTF-8
/// conversion requires a memory allocation.
///
- /// [`from_utf8_lossy`]: #method.from_utf8_lossy
- /// [`Cow<'a, str>`]: ../borrow/enum.Cow.html
- /// [U+FFFD]: ../char/constant.REPLACEMENT_CHARACTER.html
+ /// [`from_utf8_lossy`]: String::from_utf8_lossy
+ /// [`Cow<'a, str>`]: crate::borrow::Cow
+ /// [U+FFFD]: core::char::REPLACEMENT_CHARACTER
///
/// # Examples
///
/// into a `String` with the [`from_raw_parts`] function, allowing
/// the destructor to perform the cleanup.
///
- /// [`from_raw_parts`]: #method.from_raw_parts
+ /// [`from_raw_parts`]: String::from_raw_parts
///
/// # Examples
///
///
/// See the safe version, [`from_utf8`], for more details.
///
- /// [`from_utf8`]: struct.String.html#method.from_utf8
+ /// [`from_utf8`]: String::from_utf8
///
/// # Safety
///
///
/// Panics if the new capacity overflows [`usize`].
///
- /// [`reserve_exact`]: struct.String.html#method.reserve_exact
- /// [`usize`]: ../../std/primitive.usize.html
+ /// [`reserve_exact`]: String::reserve_exact
///
/// # Examples
///
/// Consider using the [`reserve`] method unless you absolutely know
/// better than the allocator.
///
- /// [`reserve`]: #method.reserve
+ /// [`reserve`]: String::reserve
///
/// # Panics
///
/// Appends the given [`char`] to the end of this `String`.
///
- /// [`char`]: ../../std/primitive.char.html
- ///
/// # Examples
///
/// Basic usage:
///
/// The inverse of this method is [`from_utf8`].
///
- /// [`from_utf8`]: #method.from_utf8
+ /// [`from_utf8`]: String::from_utf8
///
/// # Examples
///
///
/// Panics if `new_len` does not lie on a [`char`] boundary.
///
- /// [`char`]: ../../std/primitive.char.html
- ///
/// # Examples
///
/// Basic usage:
///
/// Returns [`None`] if this `String` is empty.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// Basic usage:
/// Removes a [`char`] from this `String` at a byte position and returns it.
///
- /// This is an `O(n)` operation, as it requires copying every element in the
+ /// This is an *O*(*n*) operation, as it requires copying every element in the
/// buffer.
///
/// # Panics
/// Panics if `idx` is larger than or equal to the `String`'s length,
/// or if it does not lie on a [`char`] boundary.
///
- /// [`char`]: ../../std/primitive.char.html
- ///
/// # Examples
///
/// Basic usage:
/// Inserts a character into this `String` at a byte position.
///
- /// This is an `O(n)` operation as it requires copying every element in the
+ /// This is an *O*(*n*) operation as it requires copying every element in the
/// buffer.
///
/// # Panics
/// Panics if `idx` is larger than the `String`'s length, or if it does not
/// lie on a [`char`] boundary.
///
- /// [`char`]: ../../std/primitive.char.html
- ///
/// # Examples
///
/// Basic usage:
/// Inserts a string slice into this `String` at a byte position.
///
- /// This is an `O(n)` operation as it requires copying every element in the
+ /// This is an *O*(*n*) operation as it requires copying every element in the
/// buffer.
///
/// # Panics
/// Panics if `idx` is larger than the `String`'s length, or if it does not
/// lie on a [`char`] boundary.
///
- /// [`char`]: ../../std/primitive.char.html
- ///
/// # Examples
///
/// Basic usage:
/// Panics if the starting point or end point do not lie on a [`char`]
/// boundary, or if they're out of bounds.
///
- /// [`char`]: ../../std/primitive.char.html
- ///
/// # Examples
///
/// Basic usage:
/// Panics if the starting point or end point do not lie on a [`char`]
/// boundary, or if they're out of bounds.
///
- /// [`char`]: ../../std/primitive.char.html
- /// [`Vec::splice`]: ../../std/vec/struct.Vec.html#method.splice
- ///
/// # Examples
///
/// Basic usage:
///
/// This will drop any excess capacity.
///
- /// [`Box`]: ../../std/boxed/struct.Box.html
- /// [`str`]: ../../std/primitive.str.html
- ///
/// # Examples
///
/// Basic usage:
/// an analogue to `FromUtf8Error`. See its documentation for more details
/// on using it.
///
- /// [`Utf8Error`]: ../../std/str/struct.Utf8Error.html
- /// [`std::str`]: ../../std/str/index.html
- /// [`u8`]: ../../std/primitive.u8.html
- /// [`&str`]: ../../std/primitive.str.html
+ /// [`std::str`]: core::str
+ /// [`&str`]: str
///
/// # Examples
///
///
/// This consumes the `String` on the left-hand side and re-uses its buffer (growing it if
/// necessary). This is done to avoid allocating a new `String` and copying the entire contents on
-/// every operation, which would lead to `O(n^2)` running time when building an `n`-byte string by
+/// every operation, which would lead to *O*(*n*^2) running time when building an *n*-byte string by
/// repeated concatenation.
///
/// The string on the right-hand side is only borrowed; its contents are copied into the returned
///
/// This alias exists for backwards compatibility, and may be eventually deprecated.
///
-/// [`Infallible`]: ../../core/convert/enum.Infallible.html
+/// [`Infallible`]: core::convert::Infallible
#[stable(feature = "str_parse_error", since = "1.5.0")]
pub type ParseError = core::convert::Infallible;
/// [`Display`] should be implemented instead, and you get the `ToString`
/// implementation for free.
///
-/// [`Display`]: ../../std/fmt/trait.Display.html
+/// [`Display`]: fmt::Display
#[stable(feature = "rust1", since = "1.0.0")]
pub trait ToString {
/// Converts the given value to a `String`.
/// This struct is created by the [`drain`] method on [`String`]. See its
/// documentation for more.
///
-/// [`drain`]: struct.String.html#method.drain
-/// [`String`]: struct.String.html
+/// [`drain`]: String::drain
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a> {
/// Will be used as &'a mut String in the destructor
/// Provides a raw pointer to the data.
///
- /// The counts are not affected in way and the `Arc` is not consumed. The pointer is valid for
+ /// The counts are not affected in any way and the `Arc` is not consumed. The pointer is valid for
/// as long as there are strong counts in the `Arc`.
///
/// # Examples
//! [`TryFrom<T>`][`TryFrom`] rather than [`Into<U>`][`Into`] or [`TryInto<U>`][`TryInto`],
//! as [`From`] and [`TryFrom`] provide greater flexibility and offer
//! equivalent [`Into`] or [`TryInto`] implementations for free, thanks to a
-//! blanket implementation in the standard library. Only implement [`Into`] or [`TryInto`]
-//! when a conversion to a type outside the current crate is required.
+//! blanket implementation in the standard library. When targeting a version prior to Rust 1.41, it
+//! may be necessary to implement [`Into`] or [`TryInto`] directly when converting to a type
+//! outside the current crate.
//!
//! # Generic Implementations
//!
/// because implementing `From` automatically provides one with an implementation of [`Into`]
/// thanks to the blanket implementation in the standard library.
///
-/// Only implement [`Into`] if a conversion to a type outside the current crate is required.
-/// `From` cannot do these type of conversions because of Rust's orphaning rules.
+/// Only implement [`Into`] when targeting a version prior to Rust 1.41 and converting to a type
+/// outside the current crate.
+/// `From` was not able to do these types of conversions in earlier versions because of Rust's
+/// orphaning rules.
/// See [`Into`] for more details.
///
/// Prefer using [`Into`] over using `From` when specifying trait bounds on a generic function.
#[doc(hidden)]
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
- pub fn new_v1(pieces: &'a [&'a str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
+ pub fn new_v1(pieces: &'a [&'static str], args: &'a [ArgumentV1<'a>]) -> Arguments<'a> {
Arguments { pieces, fmt: None, args }
}
#[inline]
#[unstable(feature = "fmt_internals", reason = "internal to format_args!", issue = "none")]
pub fn new_v1_formatted(
- pieces: &'a [&'a str],
+ pieces: &'a [&'static str],
args: &'a [ArgumentV1<'a>],
fmt: &'a [rt::v1::Argument],
) -> Arguments<'a> {
#[derive(Copy, Clone)]
pub struct Arguments<'a> {
// Format string pieces to print.
- pieces: &'a [&'a str],
+ pieces: &'a [&'static str],
// Placeholder specs, or `None` if all specs are default (as in "{}{}").
fmt: Option<&'a [rt::v1::Argument]>,
args: &'a [ArgumentV1<'a>],
}
+impl<'a> Arguments<'a> {
+ /// Get the formatted string, if it has no arguments to be formatted.
+ ///
+ /// This can be used to avoid allocations in the most trivial case.
+ ///
+ /// # Examples
+ ///
+ /// ```rust
+ /// #![feature(fmt_as_str)]
+ ///
+ /// use core::fmt::Arguments;
+ ///
+ /// fn write_str(_: &str) { /* ... */ }
+ ///
+ /// fn write_fmt(args: &Arguments) {
+ /// if let Some(s) = args.as_str() {
+ /// write_str(s)
+ /// } else {
+ /// write_str(&args.to_string());
+ /// }
+ /// }
+ /// ```
+ ///
+ /// ```rust
+ /// #![feature(fmt_as_str)]
+ ///
+ /// assert_eq!(format_args!("hello").as_str(), Some("hello"));
+ /// assert_eq!(format_args!("").as_str(), Some(""));
+ /// assert_eq!(format_args!("{}", 1).as_str(), None);
+ /// ```
+ #[unstable(feature = "fmt_as_str", issue = "74442")]
+ #[inline]
+ pub fn as_str(&self) -> Option<&'static str> {
+ match (self.pieces, self.args) {
+ ([], []) => Some(""),
+ ([s], []) => Some(s),
+ _ => None,
+ }
+ }
+}
+
#[stable(feature = "rust1", since = "1.0.0")]
impl Debug for Arguments<'_> {
fn fmt(&self, fmt: &mut Formatter<'_>) -> Result {
Unknown,
}
+/// Used by [width](https://doc.rust-lang.org/std/fmt/#width) and [precision](https://doc.rust-lang.org/std/fmt/#precision) specifiers.
#[derive(Copy, Clone)]
pub enum Count {
+ /// Specified with a literal number, stores the value
Is(usize),
+ /// Specified using `$` and `*` syntaxes, stores the index into `args`
Param(usize),
+ /// Not specified
Implied,
}
/// `.await` the value.
///
/// [`Waker`]: ../task/struct.Waker.html
+#[doc(spotlight)]
#[must_use = "futures do nothing unless you `.await` or poll them"]
#[stable(feature = "futures_api", since = "1.36.0")]
#[lang = "future_trait"]
/// ```
#[inline]
#[stable(feature = "unreachable", since = "1.27.0")]
-pub unsafe fn unreachable_unchecked() -> ! {
+#[rustc_const_unstable(feature = "const_unreachable_unchecked", issue = "53188")]
+pub const unsafe fn unreachable_unchecked() -> ! {
// SAFETY: the safety contract for `intrinsics::unreachable` must
// be upheld by the caller.
unsafe { intrinsics::unreachable() }
///
/// The stabilized version of this intrinsic is
/// [`std::hint::unreachable_unchecked`](../../std/hint/fn.unreachable_unchecked.html).
+ #[rustc_const_unstable(feature = "const_unreachable_unchecked", issue = "53188")]
pub fn unreachable() -> !;
/// Informs the optimizer that a condition is always true.
///
/// // use `u32::from_ne_bytes` instead
/// let num = u32::from_ne_bytes(raw_bytes);
- /// // or use `u32::from_le_bytes` or `u32::from_ge_bytes` to specify the endianness
+ /// // or use `u32::from_le_bytes` or `u32::from_be_bytes` to specify the endianness
/// let num = u32::from_le_bytes(raw_bytes);
/// assert_eq!(num, 0x12345678);
/// let num = u32::from_be_bytes(raw_bytes);
/// The to-be-stabilized version of this intrinsic is
/// [`std::mem::variant_count`](../../std/mem/fn.variant_count.html)
#[rustc_const_unstable(feature = "variant_count", issue = "73662")]
- #[cfg(not(bootstrap))]
pub fn variant_count<T>() -> usize;
/// Rust's "try catch" construct which invokes the function pointer `try_fn`
/// generation.
#[cfg(not(bootstrap))]
#[lang = "count_code_region"]
- pub fn count_code_region(index: u32, start_byte_pos: u32, end_byte_pos: u32);
+ pub fn count_code_region(
+ function_source_hash: u64,
+ index: u32,
+ start_byte_pos: u32,
+ end_byte_pos: u32,
+ );
/// Internal marker for code coverage expressions, injected into the MIR when the
/// "instrument-coverage" option is enabled. This intrinsic is not converted into a
/// This marker identifies a code region and two other counters or counter expressions
/// whose sum is the number of times the code region was executed.
#[cfg(not(bootstrap))]
+ #[lang = "coverage_counter_add"]
pub fn coverage_counter_add(
index: u32,
left_index: u32,
/// whose difference is the number of times the code region was executed.
/// (See `coverage_counter_add` for more information.)
#[cfg(not(bootstrap))]
+ #[lang = "coverage_counter_subtract"]
pub fn coverage_counter_subtract(
index: u32,
left_index: u32,
/// This marker identifies a code region to be added to the "coverage map" to indicate source
/// code that can never be reached.
/// (See `coverage_counter_add` for more information.)
- #[cfg(not(bootstrap))]
pub fn coverage_unreachable(start_byte_pos: u32, end_byte_pos: u32);
/// See documentation of `<*const T>::guaranteed_eq` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
- #[cfg(not(bootstrap))]
pub fn ptr_guaranteed_eq<T>(ptr: *const T, other: *const T) -> bool;
/// See documentation of `<*const T>::guaranteed_ne` for details.
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
- #[cfg(not(bootstrap))]
pub fn ptr_guaranteed_ne<T>(ptr: *const T, other: *const T) -> bool;
}
//! ```
//!
//! An iterator has a method, [`next`], which when called, returns an
-//! [`Option`]`<Item>`. [`next`] will return `Some(Item)` as long as there
+//! [`Option`]`<Item>`. [`next`] will return [`Some(Item)`] as long as there
//! are elements, and once they've all been exhausted, will return `None` to
//! indicate that iteration is finished. Individual iterators may choose to
//! resume iteration, and so calling [`next`] again may or may not eventually
-//! start returning `Some(Item)` again at some point (for example, see [`TryIter`]).
+//! start returning [`Some(Item)`] again at some point (for example, see [`TryIter`]).
//!
//! [`Iterator`]'s full definition includes a number of other methods as well,
//! but they are default methods, built on top of [`next`], and so you get
//! more complex forms of processing. See the [Adapters](#adapters) section
//! below for more details.
//!
+//! [`Some(Item)`]: Some
//! [`Iterator`]: trait.Iterator.html
//! [`next`]: trait.Iterator.html#tymethod.next
-//! [`Option`]: ../../std/option/enum.Option.html
//! [`TryIter`]: ../../std/sync/mpsc/struct.TryIter.html
//!
//! # The three forms of iteration
//! # Implementing Iterator
//!
//! Creating an iterator of your own involves two steps: creating a `struct` to
-//! hold the iterator's state, and then `impl`ementing [`Iterator`] for that
-//! `struct`. This is why there are so many `struct`s in this module: there is
-//! one for each iterator and iterator adapter.
+//! hold the iterator's state, and then implementing [`Iterator`] for that `struct`.
+//! This is why there are so many `struct`s in this module: there is one for
+//! each iterator and iterator adapter.
//!
//! Let's make an iterator named `Counter` which counts from `1` to `5`:
//!
/// [`FromIterator`] this trait should rarely be called directly and instead
/// interacted with through [`Iterator::sum`].
///
-/// [`sum`]: ../../std/iter/trait.Sum.html#tymethod.sum
-/// [`FromIterator`]: ../../std/iter/trait.FromIterator.html
-/// [`Iterator::sum`]: ../../std/iter/trait.Iterator.html#method.sum
+/// [`sum`]: #tymethod.sum
+/// [`FromIterator`]: crate::iter::FromIterator
+/// [`Iterator::sum`]: crate::iter::Iterator::sum
#[stable(feature = "iter_arith_traits", since = "1.12.0")]
pub trait Sum<A = Self>: Sized {
/// Method which takes an iterator and generates `Self` from the elements by
/// [`FromIterator`] this trait should rarely be called directly and instead
/// interacted with through [`Iterator::product`].
///
-/// [`product`]: ../../std/iter/trait.Product.html#tymethod.product
-/// [`FromIterator`]: ../../std/iter/trait.FromIterator.html
-/// [`Iterator::product`]: ../../std/iter/trait.Iterator.html#method.product
+/// [`product`]: #tymethod.product
+/// [`FromIterator`]: crate::iter::FromIterator
+/// [`Iterator::product`]: crate::iter::Iterator::product
#[stable(feature = "iter_arith_traits", since = "1.12.0")]
pub trait Product<A = Self>: Sized {
/// Method which takes an iterator and generates `Self` from the elements by
/// `nth_back()` will return [`None`] if `n` is greater than or equal to the length of the
/// iterator.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`nth`]: ../../std/iter/trait.Iterator.html#method.nth
+ /// [`nth`]: crate::iter::Iterator::nth
///
/// # Examples
///
/// argument is a double reference. You can see this effect in the
/// examples below, with `&&x`.
///
- /// [`Some(element)`]: ../../std/option/enum.Option.html#variant.Some
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`Some(element)`]: Some
///
/// # Examples
///
label = "`{Self}` is not an iterator",
message = "`{Self}` is not an iterator"
)]
+#[doc(spotlight)]
#[must_use = "iterators are lazy and do nothing unless consumed"]
pub trait Iterator {
/// The type of the elements being iterated over.
/// again may or may not eventually start returning [`Some(Item)`] again at some
/// point.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`Some(Item)`]: ../../std/option/enum.Option.html#variant.Some
+ /// [`Some(Item)`]: Some
///
/// # Examples
///
/// The default implementation returns `(0, `[`None`]`)` which is correct for any
/// iterator.
///
- /// [`usize`]: ../../std/primitive.usize.html
- /// [`Option`]: ../../std/option/enum.Option.html
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`usize`]: type@usize
///
/// # Examples
///
/// called at least once even if the iterator does not have any elements.
///
/// [`next`]: #tymethod.next
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`Some`]: ../../std/option/enum.Option.html#variant.Some
///
/// # Overflow Behavior
///
/// This function might panic if the iterator has more than [`usize::MAX`]
/// elements.
///
- /// [`usize::MAX`]: ../../std/usize/constant.MAX.html
+ /// [`usize::MAX`]: crate::usize::MAX
///
/// # Examples
///
/// doing so, it keeps track of the current element. After [`None`] is
/// returned, `last()` will then return the last element it saw.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// Basic usage:
/// `nth()` will return [`None`] if `n` is greater than or equal to the length of the
/// iterator.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// Basic usage:
/// assert_eq!((2, 'o'), zipper[2]);
/// ```
///
- /// [`enumerate`]: trait.Iterator.html#method.enumerate
- /// [`next`]: ../../std/iter/trait.Iterator.html#tymethod.next
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`enumerate`]: #method.enumerate
+ /// [`next`]: #tymethod.next
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn zip<U>(self, other: U) -> Zip<Self, U::IntoIter>
/// more idiomatic to use [`for`] than `map()`.
///
/// [`for`]: ../../book/ch03-05-control-flow.html#looping-through-a-collection-with-for
- /// [`FnMut`]: ../../std/ops/trait.FnMut.html
+ /// [`FnMut`]: crate::ops::FnMut
///
/// # Examples
///
/// Basic usage:
///
/// ```
- /// let a = ["1", "lol", "3", "NaN", "5"];
+ /// let a = ["1", "two", "NaN", "four", "5"];
///
/// let mut iter = a.iter().filter_map(|s| s.parse().ok());
///
/// assert_eq!(iter.next(), Some(1));
- /// assert_eq!(iter.next(), Some(3));
/// assert_eq!(iter.next(), Some(5));
/// assert_eq!(iter.next(), None);
/// ```
/// Here's the same example, but with [`filter`] and [`map`]:
///
/// ```
- /// let a = ["1", "lol", "3", "NaN", "5"];
+ /// let a = ["1", "two", "NaN", "four", "5"];
/// let mut iter = a.iter().map(|s| s.parse()).filter(|s| s.is_ok()).map(|s| s.unwrap());
/// assert_eq!(iter.next(), Some(1));
- /// assert_eq!(iter.next(), Some(3));
/// assert_eq!(iter.next(), Some(5));
/// assert_eq!(iter.next(), None);
/// ```
///
- /// [`Option<T>`]: ../../std/option/enum.Option.html
- /// [`Some`]: ../../std/option/enum.Option.html#variant.Some
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`Option<T>`]: Option
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
fn filter_map<B, F>(self, f: F) -> FilterMap<Self, F>
/// The returned iterator might panic if the to-be-returned index would
/// overflow a [`usize`].
///
- /// [`usize::MAX`]: ../../std/usize/constant.MAX.html
- /// [`usize`]: ../../std/primitive.usize.html
+ /// [`usize`]: type@usize
+ /// [`usize::MAX`]: crate::usize::MAX
/// [`zip`]: #method.zip
///
/// # Examples
/// anything other than fetching the next value) of the [`next`] method
/// will occur.
///
- /// [`peek`]: struct.Peekable.html#method.peek
- /// [`next`]: ../../std/iter/trait.Iterator.html#tymethod.next
+ /// [`peek`]: crate::iter::Peekable::peek
+ /// [`next`]: #tymethod.next
///
/// # Examples
///
/// It is also not specified what this iterator returns after the first` None` is returned.
/// If you need fused iterator, use [`fuse`].
///
- /// [`Some`]: ../../std/option/enum.Option.html#variant.Some
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
/// [`fuse`]: #method.fuse
#[inline]
#[unstable(feature = "iter_map_while", reason = "recently added", issue = "68537")]
/// iterator and the return value from the closure, an [`Option`], is
/// yielded by the iterator.
///
- /// [`Option`]: ../../std/option/enum.Option.html
- ///
/// # Examples
///
/// Basic usage:
/// [`Some(T)`] again. `fuse()` adapts an iterator, ensuring that after a
/// [`None`] is given, it will always return [`None`] forever.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`Some(T)`]: ../../std/option/enum.Option.html#variant.Some
+ /// [`Some(T)`]: Some
///
/// # Examples
///
/// assert_eq!(Ok(vec![1, 3]), result);
/// ```
///
- /// [`iter`]: ../../std/iter/trait.Iterator.html#tymethod.next
+ /// [`iter`]: #tymethod.next
/// [`String`]: ../../std/string/struct.String.html
- /// [`char`]: ../../std/primitive.char.html
- /// [`Result`]: ../../std/result/enum.Result.html
+ /// [`char`]: type@char
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
#[must_use = "if you really need to exhaust the iterator, consider `.for_each(drop)` instead"]
/// argument is a double reference. You can see this effect in the
/// examples below, with `&&x`.
///
- /// [`Some(element)`]: ../../std/option/enum.Option.html#variant.Some
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`Some(element)`]: Some
///
/// # Examples
///
/// This function might panic if the iterator has more than `usize::MAX`
/// non-matching elements.
///
- /// [`Some(index)`]: ../../std/option/enum.Option.html#variant.Some
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [`usize::MAX`]: ../../std/usize/constant.MAX.html
+ /// [`Some(index)`]: Some
+ /// [`usize::MAX`]: crate::usize::MAX
///
/// # Examples
///
/// `rposition()` is short-circuiting; in other words, it will stop
/// processing as soon as it finds a `true`.
///
- /// [`Some(index)`]: ../../std/option/enum.Option.html#variant.Some
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [`Some(index)`]: Some
///
/// # Examples
///
/// If several elements are equally maximum, the last element is
/// returned. If the iterator is empty, [`None`] is returned.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// Basic usage:
/// If several elements are equally minimum, the first element is
/// returned. If the iterator is empty, [`None`] is returned.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// Basic usage:
/// If several elements are equally maximum, the last element is
/// returned. If the iterator is empty, [`None`] is returned.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// ```
/// If several elements are equally maximum, the last element is
/// returned. If the iterator is empty, [`None`] is returned.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// ```
/// If several elements are equally minimum, the first element is
/// returned. If the iterator is empty, [`None`] is returned.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// ```
/// If several elements are equally minimum, the first element is
/// returned. If the iterator is empty, [`None`] is returned.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// ```
/// This is useful when you have an iterator over `&T`, but you need an
/// iterator over `T`.
///
- /// [`clone`]: ../../std/clone/trait.Clone.html#tymethod.clone
+ /// [`clone`]: crate::clone::Clone::clone
///
/// # Examples
///
/// from the beginning. After iterating again, it will start at the
/// beginning again. And again. And again. Forever.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// Basic usage:
/// assert!(![0.0, 1.0, f32::NAN].iter().is_sorted_by(|a, b| a.partial_cmp(b)));
/// ```
///
- /// [`is_sorted`]: trait.Iterator.html#method.is_sorted
+ /// [`is_sorted`]: #method.is_sorted
#[unstable(feature = "is_sorted", reason = "new API", issue = "53485")]
fn is_sorted_by<F>(mut self, mut compare: F) -> bool
where
/// the elements, as determined by `f`. Apart from that, it's equivalent to [`is_sorted`]; see
/// its documentation for more information.
///
- /// [`is_sorted`]: trait.Iterator.html#method.is_sorted
+ /// [`is_sorted`]: #method.is_sorted
///
/// # Examples
///
/// on the iterator. If the iterator is already fused, the additional [`Fuse`]
/// wrapper will be a no-op with no performance penalty.
///
-/// [`None`]: ../../std/option/enum.Option.html#variant.None
-/// [`Iterator::fuse`]: ../../std/iter/trait.Iterator.html#method.fuse
-/// [`Fuse`]: ../../std/iter/struct.Fuse.html
+/// [`Iterator::fuse`]: crate::iter::Iterator::fuse
+/// [`Fuse`]: crate::iter::Fuse
#[stable(feature = "fused", since = "1.26.0")]
#[rustc_unsafe_specialization_marker]
pub trait FusedIterator: Iterator {}
/// This trait must only be implemented when the contract is upheld.
/// Consumers of this trait must inspect [`.size_hint`]’s upper bound.
///
-/// [`None`]: ../../std/option/enum.Option.html#variant.None
-/// [`usize::MAX`]: ../../std/usize/constant.MAX.html
-/// [`.size_hint`]: ../../std/iter/trait.Iterator.html#method.size_hint
+/// [`usize::MAX`]: crate::usize::MAX
+/// [`.size_hint`]: crate::iter::Iterator::size_hint
#[unstable(feature = "trusted_len", issue = "37572")]
#[rustc_unsafe_specialization_marker]
pub unsafe trait TrustedLen: Iterator {}
--- /dev/null
+//! Lazy values and one-time initialization of static data.
+
+use crate::cell::{Cell, UnsafeCell};
+use crate::fmt;
+use crate::mem;
+use crate::ops::Deref;
+
+/// A cell which can be written to only once.
+///
+/// Unlike `RefCell`, a `OnceCell` only provides shared `&T` references to its value.
+/// Unlike `Cell`, a `OnceCell` doesn't require copying or replacing the value to access it.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(once_cell)]
+///
+/// use std::lazy::OnceCell;
+///
+/// let cell = OnceCell::new();
+/// assert!(cell.get().is_none());
+///
+/// let value: &String = cell.get_or_init(|| {
+/// "Hello, World!".to_string()
+/// });
+/// assert_eq!(value, "Hello, World!");
+/// assert!(cell.get().is_some());
+/// ```
+#[unstable(feature = "once_cell", issue = "74465")]
+pub struct OnceCell<T> {
+ // Invariant: written to at most once.
+ inner: UnsafeCell<Option<T>>,
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T> Default for OnceCell<T> {
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: fmt::Debug> fmt::Debug for OnceCell<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.get() {
+ Some(v) => f.debug_tuple("OnceCell").field(v).finish(),
+ None => f.write_str("OnceCell(Uninit)"),
+ }
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: Clone> Clone for OnceCell<T> {
+ fn clone(&self) -> OnceCell<T> {
+ let res = OnceCell::new();
+ if let Some(value) = self.get() {
+ match res.set(value.clone()) {
+ Ok(()) => (),
+ Err(_) => unreachable!(),
+ }
+ }
+ res
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: PartialEq> PartialEq for OnceCell<T> {
+ fn eq(&self, other: &Self) -> bool {
+ self.get() == other.get()
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: Eq> Eq for OnceCell<T> {}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T> From<T> for OnceCell<T> {
+ fn from(value: T) -> Self {
+ OnceCell { inner: UnsafeCell::new(Some(value)) }
+ }
+}
+
+impl<T> OnceCell<T> {
+ /// Creates a new empty cell.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub const fn new() -> OnceCell<T> {
+ OnceCell { inner: UnsafeCell::new(None) }
+ }
+
+ /// Gets the reference to the underlying value.
+ ///
+ /// Returns `None` if the cell is empty.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get(&self) -> Option<&T> {
+ // Safety: Safe due to `inner`'s invariant
+ unsafe { &*self.inner.get() }.as_ref()
+ }
+
+ /// Gets the mutable reference to the underlying value.
+ ///
+ /// Returns `None` if the cell is empty.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get_mut(&mut self) -> Option<&mut T> {
+ // Safety: Safe because we have unique access
+ unsafe { &mut *self.inner.get() }.as_mut()
+ }
+
+ /// Sets the contents of the cell to `value`.
+ ///
+ /// # Errors
+ ///
+ /// This method returns `Ok(())` if the cell was empty and `Err(value)` if
+ /// it was full.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::OnceCell;
+ ///
+ /// let cell = OnceCell::new();
+ /// assert!(cell.get().is_none());
+ ///
+ /// assert_eq!(cell.set(92), Ok(()));
+ /// assert_eq!(cell.set(62), Err(62));
+ ///
+ /// assert!(cell.get().is_some());
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn set(&self, value: T) -> Result<(), T> {
+ // Safety: Safe because we cannot have overlapping mutable borrows
+ let slot = unsafe { &*self.inner.get() };
+ if slot.is_some() {
+ return Err(value);
+ }
+
+ // Safety: This is the only place where we set the slot, no races
+ // due to reentrancy/concurrency are possible, and we've
+ // checked that slot is currently `None`, so this write
+ // maintains the `inner`'s invariant.
+ let slot = unsafe { &mut *self.inner.get() };
+ *slot = Some(value);
+ Ok(())
+ }
+
+ /// Gets the contents of the cell, initializing it with `f`
+ /// if the cell was empty.
+ ///
+ /// # Panics
+ ///
+ /// If `f` panics, the panic is propagated to the caller, and the cell
+ /// remains uninitialized.
+ ///
+ /// It is an error to reentrantly initialize the cell from `f`. Doing
+ /// so results in a panic.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::OnceCell;
+ ///
+ /// let cell = OnceCell::new();
+ /// let value = cell.get_or_init(|| 92);
+ /// assert_eq!(value, &92);
+ /// let value = cell.get_or_init(|| unreachable!());
+ /// assert_eq!(value, &92);
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get_or_init<F>(&self, f: F) -> &T
+ where
+ F: FnOnce() -> T,
+ {
+ match self.get_or_try_init(|| Ok::<T, !>(f())) {
+ Ok(val) => val,
+ }
+ }
+
+ /// Gets the contents of the cell, initializing it with `f` if
+ /// the cell was empty. If the cell was empty and `f` failed, an
+ /// error is returned.
+ ///
+ /// # Panics
+ ///
+ /// If `f` panics, the panic is propagated to the caller, and the cell
+ /// remains uninitialized.
+ ///
+ /// It is an error to reentrantly initialize the cell from `f`. Doing
+ /// so results in a panic.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::OnceCell;
+ ///
+ /// let cell = OnceCell::new();
+ /// assert_eq!(cell.get_or_try_init(|| Err(())), Err(()));
+ /// assert!(cell.get().is_none());
+ /// let value = cell.get_or_try_init(|| -> Result<i32, ()> {
+ /// Ok(92)
+ /// });
+ /// assert_eq!(value, Ok(&92));
+ /// assert_eq!(cell.get(), Some(&92))
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get_or_try_init<F, E>(&self, f: F) -> Result<&T, E>
+ where
+ F: FnOnce() -> Result<T, E>,
+ {
+ if let Some(val) = self.get() {
+ return Ok(val);
+ }
+ let val = f()?;
+ // Note that *some* forms of reentrant initialization might lead to
+ // UB (see `reentrant_init` test). I believe that just removing this
+ // `assert`, while keeping `set/get` would be sound, but it seems
+ // better to panic, rather than to silently use an old value.
+ assert!(self.set(val).is_ok(), "reentrant init");
+ Ok(self.get().unwrap())
+ }
+
+ /// Consumes the cell, returning the wrapped value.
+ ///
+ /// Returns `None` if the cell was empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::OnceCell;
+ ///
+ /// let cell: OnceCell<String> = OnceCell::new();
+ /// assert_eq!(cell.into_inner(), None);
+ ///
+ /// let cell = OnceCell::new();
+ /// cell.set("hello".to_string()).unwrap();
+ /// assert_eq!(cell.into_inner(), Some("hello".to_string()));
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn into_inner(self) -> Option<T> {
+ // Because `into_inner` takes `self` by value, the compiler statically verifies
+ // that it is not currently borrowed. So it is safe to move out `Option<T>`.
+ self.inner.into_inner()
+ }
+
+ /// Takes the value out of this `OnceCell`, moving it back to an uninitialized state.
+ ///
+ /// Has no effect and returns `None` if the `OnceCell` hasn't been initialized.
+ ///
+ /// Safety is guaranteed by requiring a mutable reference.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::OnceCell;
+ ///
+ /// let mut cell: OnceCell<String> = OnceCell::new();
+ /// assert_eq!(cell.take(), None);
+ ///
+ /// let mut cell = OnceCell::new();
+ /// cell.set("hello".to_string()).unwrap();
+ /// assert_eq!(cell.take(), Some("hello".to_string()));
+ /// assert_eq!(cell.get(), None);
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn take(&mut self) -> Option<T> {
+ mem::take(self).into_inner()
+ }
+}
+
+/// A value which is initialized on the first access.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(once_cell)]
+///
+/// use std::lazy::Lazy;
+///
+/// let lazy: Lazy<i32> = Lazy::new(|| {
+/// println!("initializing");
+/// 92
+/// });
+/// println!("ready");
+/// println!("{}", *lazy);
+/// println!("{}", *lazy);
+///
+/// // Prints:
+/// // ready
+/// // initializing
+/// // 92
+/// // 92
+/// ```
+#[unstable(feature = "once_cell", issue = "74465")]
+pub struct Lazy<T, F = fn() -> T> {
+ cell: OnceCell<T>,
+ init: Cell<Option<F>>,
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: fmt::Debug, F> fmt::Debug for Lazy<T, F> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Lazy").field("cell", &self.cell).field("init", &"..").finish()
+ }
+}
+
+impl<T, F> Lazy<T, F> {
+ /// Creates a new lazy value with the given initializing function.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// # fn main() {
+ /// use std::lazy::Lazy;
+ ///
+ /// let hello = "Hello, World!".to_string();
+ ///
+ /// let lazy = Lazy::new(|| hello.to_uppercase());
+ ///
+ /// assert_eq!(&*lazy, "HELLO, WORLD!");
+ /// # }
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub const fn new(init: F) -> Lazy<T, F> {
+ Lazy { cell: OnceCell::new(), init: Cell::new(Some(init)) }
+ }
+}
+
+impl<T, F: FnOnce() -> T> Lazy<T, F> {
+ /// Forces the evaluation of this lazy value and returns a reference to
+ /// the result.
+ ///
+ /// This is equivalent to the `Deref` impl, but is explicit.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::Lazy;
+ ///
+ /// let lazy = Lazy::new(|| 92);
+ ///
+ /// assert_eq!(Lazy::force(&lazy), &92);
+ /// assert_eq!(&*lazy, &92);
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn force(this: &Lazy<T, F>) -> &T {
+ this.cell.get_or_init(|| match this.init.take() {
+ Some(f) => f(),
+ None => panic!("`Lazy` instance has previously been poisoned"),
+ })
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T, F: FnOnce() -> T> Deref for Lazy<T, F> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ Lazy::force(self)
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: Default> Default for Lazy<T> {
+ /// Creates a new lazy value using `Default` as the initializing function.
+ fn default() -> Lazy<T> {
+ Lazy::new(T::default)
+ }
+}
#![feature(const_ascii_ctype_on_intrinsics)]
#![feature(const_alloc_layout)]
#![feature(const_discriminant)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
-#![cfg_attr(bootstrap, feature(const_loop))]
#![feature(const_checked_int_methods)]
#![feature(const_euclidean_int_methods)]
#![feature(const_overflowing_int_methods)]
#![feature(const_panic)]
#![feature(const_fn_union)]
#![feature(const_generics)]
+#![feature(const_option)]
#![feature(const_ptr_offset)]
#![feature(const_ptr_offset_from)]
-#![cfg_attr(not(bootstrap), feature(const_raw_ptr_comparison))]
+#![feature(const_raw_ptr_comparison)]
#![feature(const_result)]
#![feature(const_slice_from_raw_parts)]
#![feature(const_slice_ptr_len)]
#![feature(const_type_name)]
#![feature(const_likely)]
+#![feature(const_unreachable_unchecked)]
#![feature(custom_inner_attributes)]
#![feature(decl_macro)]
#![feature(doc_cfg)]
+#![cfg_attr(not(bootstrap), feature(doc_spotlight))]
+#![feature(duration_consts_2)]
#![feature(extern_types)]
#![feature(fundamental)]
#![feature(intrinsics)]
#![feature(staged_api)]
#![feature(std_internals)]
#![feature(stmt_expr_attributes)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![feature(transparent_unions)]
#![feature(unboxed_closures)]
#![feature(unsized_locals)]
#![feature(untagged_unions)]
#![feature(unwind_attributes)]
-#![cfg_attr(not(bootstrap), feature(variant_count))]
+#![feature(variant_count)]
#![feature(doc_alias)]
#![feature(mmx_target_feature)]
#![feature(tbm_target_feature)]
#![feature(rtm_target_feature)]
#![feature(f16c_target_feature)]
#![feature(hexagon_target_feature)]
-#![cfg_attr(not(bootstrap), feature(const_fn_transmute))]
+#![feature(const_fn_transmute)]
#![feature(abi_unadjusted)]
#![feature(adx_target_feature)]
#![feature(maybe_uninit_slice)]
#![feature(slice_ptr_get)]
#![feature(no_niche)] // rust-lang/rust#68303
#![feature(unsafe_block_in_unsafe_fn)]
+#![deny(intra_doc_link_resolution_failure)]
#![deny(unsafe_op_in_unsafe_fn)]
#[prelude_import]
pub mod ffi;
#[cfg(not(test))] // See #65860
pub mod iter;
+#[unstable(feature = "once_cell", issue = "74465")]
+pub mod lazy;
pub mod option;
pub mod panic;
pub mod panicking;
)]
// FIXME: This annotation should be moved into rust-lang/stdarch after clashing_extern_declarations is
// merged. It currently cannot because bootstrap fails as the lint hasn't been defined yet.
-#[cfg_attr(not(bootstrap), allow(clashing_extern_declarations))]
+#[allow(clashing_extern_declarations)]
#[unstable(feature = "stdsimd", issue = "48556")]
mod core_arch;
() => (
$crate::panic!("explicit panic")
);
- ($msg:expr) => (
+ ($msg:literal) => (
$crate::panicking::panic($msg)
);
+ ($msg:expr) => (
+ $crate::panic!("{}", $crate::convert::identity::<&str>($msg))
+ );
($msg:expr,) => (
$crate::panic!($msg)
);
pub trait DiscriminantKind {
/// The type of the discriminant, which must satisfy the trait
/// bounds required by `mem::Discriminant`.
+ #[cfg_attr(not(bootstrap), lang = "discriminant_type")]
type Discriminant: Clone + Copy + Debug + Eq + PartialEq + Hash + Send + Sync + Unpin;
}
/// assert_eq!(mem::variant_count::<Result<!, !>>(), 2);
/// ```
#[inline(always)]
-#[cfg(not(bootstrap))]
#[unstable(feature = "variant_count", issue = "73662")]
#[rustc_const_unstable(feature = "variant_count", issue = "73662")]
pub const fn variant_count<T>() -> usize {
};
}
-#[cfg(bootstrap)]
-macro_rules! unlikely {
- ($e: expr) => {
- $e
- };
-}
-
-#[cfg(not(bootstrap))]
#[allow_internal_unstable(const_likely)]
macro_rules! unlikely {
($e: expr) => {
#[stable(feature = "no_panic_abs", since = "1.13.0")]
#[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")]
#[allow(unused_attributes)]
- #[cfg_attr(bootstrap, allow_internal_unstable(const_if_match))]
#[inline]
pub const fn wrapping_abs(self) -> Self {
if self.is_negative() {
#[stable(feature = "wrapping", since = "1.7.0")]
#[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")]
#[allow(unused_attributes)]
- #[cfg_attr(bootstrap, allow_internal_unstable(const_if_match))]
pub const fn overflowing_neg(self) -> (Self, bool) {
if unlikely!(self == Self::MIN) {
(Self::MIN, true)
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_const_stable(feature = "const_int_methods", since = "1.32.0")]
#[allow(unused_attributes)]
- #[cfg_attr(bootstrap, allow_internal_unstable(const_if_match))]
#[inline]
#[rustc_inherit_overflow_checks]
pub const fn abs(self) -> Self {
#[must_use = "closures are lazy and do nothing unless called"]
pub trait FnOnce<Args> {
/// The returned type after the call operator is used.
- #[cfg_attr(not(bootstrap), lang = "fn_once_output")]
+ #[lang = "fn_once_output"]
#[stable(feature = "fn_once_output", since = "1.12.0")]
type Output;
/// [`Iterator`]: ../iter/trait.IntoIterator.html
/// [slicing index]: ../slice/trait.SliceIndex.html
#[doc(alias = "..")]
-#[derive(Copy, Clone, PartialEq, Eq, Hash)]
+#[derive(Copy, Clone, Default, PartialEq, Eq, Hash)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct RangeFull;
/// assert_eq!(arr[1..=3], [ 1,2,3 ]);
/// ```
#[doc(alias = "..")]
-#[derive(Clone, PartialEq, Eq, Hash)] // not Copy -- see #27186
+#[derive(Clone, Default, PartialEq, Eq, Hash)] // not Copy -- see #27186
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Range<Idx> {
/// The lower bound of the range (inclusive).
/// [`Some`]: #variant.Some
#[must_use = "if you intended to assert that this has a value, consider `.unwrap()` instead"]
#[inline]
+ #[rustc_const_unstable(feature = "const_option", issue = "67441")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_some(&self) -> bool {
+ pub const fn is_some(&self) -> bool {
matches!(*self, Some(_))
}
#[must_use = "if you intended to assert that this doesn't have a value, consider \
`.and_then(|| panic!(\"`Option` had a value when expected `None`\"))` instead"]
#[inline]
+ #[rustc_const_unstable(feature = "const_option", issue = "67441")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn is_none(&self) -> bool {
+ pub const fn is_none(&self) -> bool {
!self.is_some()
}
/// println!("still can print text: {:?}", text);
/// ```
#[inline]
+ #[rustc_const_unstable(feature = "const_option", issue = "67441")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn as_ref(&self) -> Option<&T> {
+ pub const fn as_ref(&self) -> Option<&T> {
match *self {
Some(ref x) => Some(x),
None => None,
/// assert_eq!(x.iter().next(), None);
/// ```
#[inline]
+ #[rustc_const_unstable(feature = "const_option", issue = "67441")]
#[stable(feature = "rust1", since = "1.0.0")]
- pub fn iter(&self) -> Iter<'_, T> {
+ pub const fn iter(&self) -> Iter<'_, T> {
Iter { inner: Item { opt: self.as_ref() } }
}
/// to allow `x?` (where `x` is an `Option<T>`) to be converted into your error type, you can
/// implement `impl From<NoneError>` for `YourErrorType`. In that case, `x?` within a function that
/// returns `Result<_, YourErrorType>` will translate a `None` value into an `Err` result.
+#[rustc_diagnostic_item = "none_error"]
#[unstable(feature = "try_trait", issue = "42327")]
#[derive(Clone, Copy, PartialEq, PartialOrd, Eq, Ord, Debug, Hash)]
pub struct NoneError;
#[cfg_attr(not(feature = "panic_immediate_abort"), inline(never))]
#[track_caller]
#[lang = "panic"] // needed by codegen for panic on overflow and other `Assert` MIR terminators
-pub fn panic(expr: &str) -> ! {
+pub fn panic(expr: &'static str) -> ! {
if cfg!(feature = "panic_immediate_abort") {
super::intrinsics::abort()
}
#[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[inline]
- #[cfg(not(bootstrap))]
pub const fn guaranteed_eq(self, other: *const T) -> bool
where
T: Sized,
#[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[inline]
- #[cfg(not(bootstrap))]
pub const fn guaranteed_ne(self, other: *const T) -> bool
where
T: Sized,
#[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[inline]
- #[cfg(not(bootstrap))]
pub const fn guaranteed_eq(self, other: *mut T) -> bool
where
T: Sized,
#[unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[rustc_const_unstable(feature = "const_raw_ptr_comparison", issue = "53020")]
#[inline]
- #[cfg(not(bootstrap))]
pub const unsafe fn guaranteed_ne(self, other: *mut T) -> bool
where
T: Sized,
/// Sorts the slice, but may not preserve the order of equal elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place
- /// (i.e., does not allocate), and `O(n * log(n))` worst-case.
+ /// (i.e., does not allocate), and *O*(*n* \* log(*n*)) worst-case.
///
/// # Current implementation
///
/// elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place
- /// (i.e., does not allocate), and `O(n * log(n))` worst-case.
+ /// (i.e., does not allocate), and *O*(*n* \* log(*n*)) worst-case.
///
/// The comparator function must define a total ordering for the elements in the slice. If
/// the ordering is not total, the order of the elements is unspecified. An order is a
/// elements.
///
/// This sort is unstable (i.e., may reorder equal elements), in-place
- /// (i.e., does not allocate), and `O(m * n * log(n))` worst-case, where the key function is
- /// `O(m)`.
+ /// (i.e., does not allocate), and *O*(m \* *n* \* log(*n*)) worst-case, where the key function is
+ /// *O*(*m*).
///
/// # Current implementation
///
/// This reordering has the additional property that any value at position `i < index` will be
/// less than or equal to any value at a position `j > index`. Additionally, this reordering is
/// unstable (i.e. any number of equal elements may end up at position `index`), in-place
- /// (i.e. does not allocate), and `O(n)` worst-case. This function is also/ known as "kth
+ /// (i.e. does not allocate), and *O*(*n*) worst-case. This function is also/ known as "kth
/// element" in other libraries. It returns a triplet of the following values: all elements less
/// than the one at the given index, the value at the given index, and all elements greater than
/// the one at the given index.
/// This reordering has the additional property that any value at position `i < index` will be
/// less than or equal to any value at a position `j > index` using the comparator function.
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
- /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
+ /// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
/// is also known as "kth element" in other libraries. It returns a triplet of the following
/// values: all elements less than the one at the given index, the value at the given index,
/// and all elements greater than the one at the given index, using the provided comparator
/// This reordering has the additional property that any value at position `i < index` will be
/// less than or equal to any value at a position `j > index` using the key extraction function.
/// Additionally, this reordering is unstable (i.e. any number of equal elements may end up at
- /// position `index`), in-place (i.e. does not allocate), and `O(n)` worst-case. This function
+ /// position `index`), in-place (i.e. does not allocate), and *O*(*n*) worst-case. This function
/// is also known as "kth element" in other libraries. It returns a triplet of the following
/// values: all elements less than the one at the given index, the value at the given index, and
/// all elements greater than the one at the given index, using the provided key extraction
///
/// Behavior is undefined if any of the following conditions are violated:
///
-/// * `data` must be [valid] for writes for `len * mem::size_of::<T>()` many bytes,
+/// * `data` must be [valid] for boths reads and writes for `len * mem::size_of::<T>()` many bytes,
/// and it must be properly aligned. This means in particular:
///
/// * The entire memory range of this slice must be contained within a single allocated object!
return false;
}
- #[cfg(bootstrap)]
- if self.as_ptr() == other.as_ptr() {
- return true;
- }
-
// While performance would suffer if `guaranteed_eq` just returned `false`
// for all arguments, correctness and return value of this function are not affected.
- #[cfg(not(bootstrap))]
if self.as_ptr().guaranteed_eq(other.as_ptr()) {
return true;
}
return false;
}
- #[cfg(bootstrap)]
- if self.as_ptr() == other.as_ptr() {
- return true;
- }
-
// While performance would suffer if `guaranteed_eq` just returned `false`
// for all arguments, correctness and return value of this function are not affected.
- #[cfg(not(bootstrap))]
if self.as_ptr().guaranteed_eq(other.as_ptr()) {
return true;
}
/// Partially sorts a slice by shifting several out-of-order elements around.
///
-/// Returns `true` if the slice is sorted at the end. This function is `O(n)` worst-case.
+/// Returns `true` if the slice is sorted at the end. This function is *O*(*n*) worst-case.
#[cold]
fn partial_insertion_sort<T, F>(v: &mut [T], is_less: &mut F) -> bool
where
false
}
-/// Sorts a slice using insertion sort, which is `O(n^2)` worst-case.
+/// Sorts a slice using insertion sort, which is *O*(*n*^2) worst-case.
fn insertion_sort<T, F>(v: &mut [T], is_less: &mut F)
where
F: FnMut(&T, &T) -> bool,
}
}
-/// Sorts `v` using heapsort, which guarantees `O(n * log(n))` worst-case.
+/// Sorts `v` using heapsort, which guarantees *O*(*n* \* log(*n*)) worst-case.
#[cold]
pub fn heapsort<T, F>(v: &mut [T], is_less: &mut F)
where
}
}
-/// Sorts `v` using pattern-defeating quicksort, which is `O(n * log(n))` worst-case.
+/// Sorts `v` using pattern-defeating quicksort, which is *O*(*n* \* log(*n*)) worst-case.
pub fn quicksort<T, F>(v: &mut [T], mut is_less: F)
where
F: FnMut(&T, &T) -> bool,
//!
//! For more details, see the [`std::str`] module.
//!
-//! [`std::str`]: ../../std/str/index.html
+//! [`std::str`]: self
#![stable(feature = "rust1", since = "1.0.0")]
/// `FromStr`'s [`from_str`] method is often used implicitly, through
/// [`str`]'s [`parse`] method. See [`parse`]'s documentation for examples.
///
-/// [`from_str`]: #tymethod.from_str
-/// [`str`]: ../../std/primitive.str.html
-/// [`parse`]: ../../std/primitive.str.html#method.parse
+/// [`from_str`]: FromStr::from_str
+/// [`parse`]: str::parse
///
/// `FromStr` does not have a lifetime parameter, and so you can only parse types
/// that do not contain a lifetime parameter themselves. In other words, you can
/// An error returned when parsing a `bool` using [`from_str`] fails
///
-/// [`from_str`]: ../../std/primitive.bool.html#method.from_str
+/// [`from_str`]: FromStr::from_str
#[derive(Debug, Clone, PartialEq, Eq)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct ParseBoolError {
/// Errors which can occur when attempting to interpret a sequence of [`u8`]
/// as a string.
///
-/// [`u8`]: ../../std/primitive.u8.html
-///
/// As such, the `from_utf8` family of functions and methods for both [`String`]s
/// and [`&str`]s make use of this error, for example.
///
/// [`String`]: ../../std/string/struct.String.html#method.from_utf8
-/// [`&str`]: ../../std/str/fn.from_utf8.html
+/// [`&str`]: from_utf8
///
/// # Examples
///
/// that it is valid UTF-8. `from_utf8()` checks to ensure that the bytes are valid
/// UTF-8, and then does the conversion.
///
-/// [`&str`]: ../../std/primitive.str.html
-/// [`u8`]: ../../std/primitive.u8.html
+/// [`&str`]: str
/// [byteslice]: ../../std/primitive.slice.html
///
/// If you are sure that the byte slice is valid UTF-8, and you don't want to
/// it are valid UTF-8. If this constraint is violated, undefined behavior
/// results, as the rest of Rust assumes that [`&str`]s are valid UTF-8.
///
-/// [`&str`]: ../../std/primitive.str.html
+/// [`&str`]: str
///
/// # Examples
///
/// Converts a slice of bytes to a string slice without checking
/// that the string contains valid UTF-8; mutable version.
///
-/// See the immutable version, [`from_utf8_unchecked()`][fromutf8], for more information.
-///
-/// [fromutf8]: fn.from_utf8_unchecked.html
+/// See the immutable version, [`from_utf8_unchecked()`] for more information.
///
/// # Examples
///
/// An iterator over the [`char`]s of a string slice.
///
-/// [`char`]: ../../std/primitive.char.html
///
/// This struct is created by the [`chars`] method on [`str`].
/// See its documentation for more.
///
-/// [`chars`]: ../../std/primitive.str.html#method.chars
-/// [`str`]: ../../std/primitive.str.html
+/// [`chars`]: str::chars
#[derive(Clone)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Chars<'a> {
/// An iterator over the [`char`]s of a string slice, and their positions.
///
-/// [`char`]: ../../std/primitive.char.html
-///
/// This struct is created by the [`char_indices`] method on [`str`].
/// See its documentation for more.
///
-/// [`char_indices`]: ../../std/primitive.str.html#method.char_indices
-/// [`str`]: ../../std/primitive.str.html
+/// [`char_indices`]: str::char_indices
#[derive(Clone, Debug)]
#[stable(feature = "rust1", since = "1.0.0")]
pub struct CharIndices<'a> {
/// This struct is created by the [`bytes`] method on [`str`].
/// See its documentation for more.
///
-/// [`bytes`]: ../../std/primitive.str.html#method.bytes
-/// [`str`]: ../../std/primitive.str.html
+/// [`bytes`]: str::bytes
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone, Debug)]
pub struct Bytes<'a>(Copied<slice::Iter<'a, u8>>);
forward:
/// Created with the method [`split`].
///
- /// [`split`]: ../../std/primitive.str.html#method.split
+ /// [`split`]: str::split
struct Split;
reverse:
/// Created with the method [`rsplit`].
///
- /// [`rsplit`]: ../../std/primitive.str.html#method.rsplit
+ /// [`rsplit`]: str::rsplit
struct RSplit;
stability:
#[stable(feature = "rust1", since = "1.0.0")]
forward:
/// Created with the method [`split_terminator`].
///
- /// [`split_terminator`]: ../../std/primitive.str.html#method.split_terminator
+ /// [`split_terminator`]: str::split_terminator
struct SplitTerminator;
reverse:
/// Created with the method [`rsplit_terminator`].
///
- /// [`rsplit_terminator`]: ../../std/primitive.str.html#method.rsplit_terminator
+ /// [`rsplit_terminator`]: str::rsplit_terminator
struct RSplitTerminator;
stability:
#[stable(feature = "rust1", since = "1.0.0")]
forward:
/// Created with the method [`splitn`].
///
- /// [`splitn`]: ../../std/primitive.str.html#method.splitn
+ /// [`splitn`]: str::splitn
struct SplitN;
reverse:
/// Created with the method [`rsplitn`].
///
- /// [`rsplitn`]: ../../std/primitive.str.html#method.rsplitn
+ /// [`rsplitn`]: str::rsplitn
struct RSplitN;
stability:
#[stable(feature = "rust1", since = "1.0.0")]
forward:
/// Created with the method [`match_indices`].
///
- /// [`match_indices`]: ../../std/primitive.str.html#method.match_indices
+ /// [`match_indices`]: str::match_indices
struct MatchIndices;
reverse:
/// Created with the method [`rmatch_indices`].
///
- /// [`rmatch_indices`]: ../../std/primitive.str.html#method.rmatch_indices
+ /// [`rmatch_indices`]: str::rmatch_indices
struct RMatchIndices;
stability:
#[stable(feature = "str_match_indices", since = "1.5.0")]
forward:
/// Created with the method [`matches`].
///
- /// [`matches`]: ../../std/primitive.str.html#method.matches
+ /// [`matches`]: str::matches
struct Matches;
reverse:
/// Created with the method [`rmatches`].
///
- /// [`rmatches`]: ../../std/primitive.str.html#method.rmatches
+ /// [`rmatches`]: str::rmatches
struct RMatches;
stability:
#[stable(feature = "str_matches", since = "1.2.0")]
/// This struct is created with the [`lines`] method on [`str`].
/// See its documentation for more.
///
-/// [`lines`]: ../../std/primitive.str.html#method.lines
-/// [`str`]: ../../std/primitive.str.html
+/// [`lines`]: str::lines
#[stable(feature = "rust1", since = "1.0.0")]
#[derive(Clone, Debug)]
pub struct Lines<'a>(Map<SplitTerminator<'a, char>, LinesAnyMap>);
/// Created with the method [`lines_any`].
///
-/// [`lines_any`]: ../../std/primitive.str.html#method.lines_any
+/// [`lines_any`]: str::lines_any
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(since = "1.4.0", reason = "use lines()/Lines instead now")]
#[derive(Clone, Debug)]
}
/// Converts a string slice to a byte slice. To convert the byte slice back
- /// into a string slice, use the [`str::from_utf8`] function.
- ///
- /// [`str::from_utf8`]: ./str/fn.from_utf8.html
+ /// into a string slice, use the [`from_utf8`] function.
///
/// # Examples
///
unsafe { Slices { str: self }.slice }
}
- /// Converts a mutable string slice to a mutable byte slice. To convert the
- /// mutable byte slice back into a mutable string slice, use the
- /// [`str::from_utf8_mut`] function.
+ /// Converts a mutable string slice to a mutable byte slice.
+ ///
+ /// # Safety
+ ///
+ /// The caller must ensure that the content of the slice is valid UTF-8
+ /// before the borrow ends and the underlying `str` is used.
///
- /// [`str::from_utf8_mut`]: ./str/fn.from_utf8_mut.html
+ /// Use of a `str` whose contents are not valid UTF-8 is undefined behavior.
///
/// # Examples
///
/// The caller must ensure that the returned pointer is never written to.
/// If you need to mutate the contents of the string slice, use [`as_mut_ptr`].
///
- /// [`u8`]: primitive.u8.html
- /// [`as_mut_ptr`]: #method.as_mut_ptr
+ /// [`as_mut_ptr`]: str::as_mut_ptr
///
/// # Examples
///
///
/// It is your responsibility to make sure that the string slice only gets
/// modified in a way that it remains valid UTF-8.
- ///
- /// [`u8`]: primitive.u8.html
#[stable(feature = "str_as_mut_ptr", since = "1.36.0")]
#[inline]
pub fn as_mut_ptr(&mut self) -> *mut u8 {
/// This is the non-panicking alternative to indexing the `str`. Returns
/// [`None`] whenever equivalent indexing operation would panic.
///
- /// [`None`]: option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// ```
/// This is the non-panicking alternative to indexing the `str`. Returns
/// [`None`] whenever equivalent indexing operation would panic.
///
- /// [`None`]: option/enum.Option.html#variant.None
- ///
/// # Examples
///
/// ```
/// This is generally not recommended, use with caution! For a safe
/// alternative see [`str`] and [`Index`].
///
- /// [`str`]: primitive.str.html
- /// [`Index`]: ops/trait.Index.html
+ /// [`Index`]: crate::ops::Index
///
/// This new slice goes from `begin` to `end`, including `begin` but
/// excluding `end`.
/// To get a mutable string slice instead, see the
/// [`slice_mut_unchecked`] method.
///
- /// [`slice_mut_unchecked`]: #method.slice_mut_unchecked
+ /// [`slice_mut_unchecked`]: str::slice_mut_unchecked
///
/// # Safety
///
/// This is generally not recommended, use with caution! For a safe
/// alternative see [`str`] and [`IndexMut`].
///
- /// [`str`]: primitive.str.html
- /// [`IndexMut`]: ops/trait.IndexMut.html
+ /// [`IndexMut`]: crate::ops::IndexMut
///
/// This new slice goes from `begin` to `end`, including `begin` but
/// excluding `end`.
/// To get an immutable string slice instead, see the
/// [`slice_unchecked`] method.
///
- /// [`slice_unchecked`]: #method.slice_unchecked
+ /// [`slice_unchecked`]: str::slice_unchecked
///
/// # Safety
///
/// To get mutable string slices instead, see the [`split_at_mut`]
/// method.
///
- /// [`split_at_mut`]: #method.split_at_mut
+ /// [`split_at_mut`]: str::split_at_mut
///
/// # Panics
///
///
/// To get immutable string slices instead, see the [`split_at`] method.
///
- /// [`split_at`]: #method.split_at
+ /// [`split_at`]: str::split_at
///
/// # Panics
///
/// Core Property `White_Space`. If you only want to split on ASCII whitespace
/// instead, use [`split_ascii_whitespace`].
///
- /// [`split_ascii_whitespace`]: #method.split_ascii_whitespace
+ /// [`split_ascii_whitespace`]: str::split_ascii_whitespace
///
/// # Examples
///
///
/// To split by Unicode `Whitespace` instead, use [`split_whitespace`].
///
- /// [`split_whitespace`]: #method.split_whitespace
+ /// [`split_whitespace`]: str::split_whitespace
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`None`]: option/enum.Option.html#variant.None
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`None`]: option/enum.Option.html#variant.None
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// allows a reverse search and forward/reverse search yields the same
/// elements. This is true for, e.g., [`char`], but not for `&str`.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// If the pattern allows a reverse search but its results might differ
/// from a forward search, the [`rsplit`] method can be used.
///
- /// [`rsplit`]: #method.rsplit
+ /// [`rsplit`]: str::rsplit
///
/// # Examples
///
///
/// Use [`split_whitespace`] for this behavior.
///
- /// [`split_whitespace`]: #method.split_whitespace
+ /// [`split_whitespace`]: str::split_whitespace
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn split<'a, P: Pattern<'a>>(&'a self, pat: P) -> Split<'a, P> {
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// search, and it will be a [`DoubleEndedIterator`] if a forward/reverse
/// search yields the same elements.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// For iterating from the front, the [`split`] method can be used.
///
- /// [`split`]: #method.split
+ /// [`split`]: str::split
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// Equivalent to [`split`], except that the trailing substring
/// is skipped if empty.
///
- /// [`split`]: #method.split
+ /// [`split`]: str::split
///
/// This method can be used for string data that is _terminated_,
/// rather than _separated_ by a pattern.
/// allows a reverse search and forward/reverse search yields the same
/// elements. This is true for, e.g., [`char`], but not for `&str`.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// If the pattern allows a reverse search but its results might differ
/// from a forward search, the [`rsplit_terminator`] method can be used.
///
- /// [`rsplit_terminator`]: #method.rsplit_terminator
+ /// [`rsplit_terminator`]: str::rsplit_terminator
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// Equivalent to [`split`], except that the trailing substring is
/// skipped if empty.
///
- /// [`split`]: #method.split
+ /// [`split`]: str::split
///
/// This method can be used for string data that is _terminated_,
/// rather than _separated_ by a pattern.
/// For iterating from the front, the [`split_terminator`] method can be
/// used.
///
- /// [`split_terminator`]: #method.split_terminator
+ /// [`split_terminator`]: str::split_terminator
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// If the pattern allows a reverse search, the [`rsplitn`] method can be
/// used.
///
- /// [`rsplitn`]: #method.rsplitn
+ /// [`rsplitn`]: str::rsplitn
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
///
/// For splitting from the front, the [`splitn`] method can be used.
///
- /// [`splitn`]: #method.splitn
+ /// [`splitn`]: str::splitn
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// allows a reverse search and forward/reverse search yields the same
/// elements. This is true for, e.g., [`char`], but not for `&str`.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// If the pattern allows a reverse search but its results might differ
/// from a forward search, the [`rmatches`] method can be used.
///
- /// [`rmatches`]: #method.rmatches
+ /// [`rmatches`]: str::matches
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// search, and it will be a [`DoubleEndedIterator`] if a forward/reverse
/// search yields the same elements.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// For iterating from the front, the [`matches`] method can be used.
///
- /// [`matches`]: #method.matches
+ /// [`matches`]: str::matches
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// allows a reverse search and forward/reverse search yields the same
/// elements. This is true for, e.g., [`char`], but not for `&str`.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// If the pattern allows a reverse search but its results might differ
/// from a forward search, the [`rmatch_indices`] method can be used.
///
- /// [`rmatch_indices`]: #method.rmatch_indices
+ /// [`rmatch_indices`]: str::match_indices
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Iterator behavior
///
/// search, and it will be a [`DoubleEndedIterator`] if a forward/reverse
/// search yields the same elements.
///
- /// [`DoubleEndedIterator`]: iter/trait.DoubleEndedIterator.html
- ///
/// For iterating from the front, the [`match_indices`] method can be used.
///
- /// [`match_indices`]: #method.match_indices
+ /// [`match_indices`]: str::match_indices
///
/// # Examples
///
/// The [pattern] can be a [`char`], a slice of [`char`]s, or a function
/// or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Text directionality
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Examples
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Text directionality
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Text directionality
///
/// The [pattern] can be a `&str`, [`char`], a slice of [`char`]s, or a
/// function or closure that determines if a character matches.
///
- /// [`char`]: primitive.char.html
- /// [pattern]: str/pattern/index.html
+ /// [pattern]: self::pattern
///
/// # Text directionality
///
/// you're trying to parse into.
///
/// `parse` can parse any type that implements the [`FromStr`] trait.
- ///
- /// [`FromStr`]: str/trait.FromStr.html
+
///
/// # Errors
///
/// Will return [`Err`] if it's not possible to parse this string slice into
/// the desired type.
///
- /// [`Err`]: str/trait.FromStr.html#associatedtype.Err
+ /// [`Err`]: FromStr::Err
///
/// # Examples
///
/// Note: only extended grapheme codepoints that begin the string will be
/// escaped.
///
- /// [`char::escape_debug`]: ../std/primitive.char.html#method.escape_debug
- ///
/// # Examples
///
/// As an iterator:
/// Return an iterator that escapes each char in `self` with [`char::escape_default`].
///
- /// [`char::escape_default`]: ../std/primitive.char.html#method.escape_default
- ///
/// # Examples
///
/// As an iterator:
/// Return an iterator that escapes each char in `self` with [`char::escape_unicode`].
///
- /// [`char::escape_unicode`]: ../std/primitive.char.html#method.escape_unicode
- ///
/// # Examples
///
/// As an iterator:
/// This struct is created by the [`split_whitespace`] method on [`str`].
/// See its documentation for more.
///
-/// [`split_whitespace`]: ../../std/primitive.str.html#method.split_whitespace
-/// [`str`]: ../../std/primitive.str.html
+/// [`split_whitespace`]: str::split_whitespace
#[stable(feature = "split_whitespace", since = "1.1.0")]
#[derive(Clone, Debug)]
pub struct SplitWhitespace<'a> {
/// This struct is created by the [`split_ascii_whitespace`] method on [`str`].
/// See its documentation for more.
///
-/// [`split_ascii_whitespace`]: ../../std/primitive.str.html#method.split_ascii_whitespace
-/// [`str`]: ../../std/primitive.str.html
+/// [`split_ascii_whitespace`]: str::split_ascii_whitespace
#[stable(feature = "split_ascii_whitespace", since = "1.34.0")]
#[derive(Clone, Debug)]
pub struct SplitAsciiWhitespace<'a> {
/// This struct is created by the [`split_inclusive`] method on [`str`].
/// See its documentation for more.
///
-/// [`split_inclusive`]: ../../std/primitive.str.html#method.split_inclusive
-/// [`str`]: ../../std/primitive.str.html
+/// [`split_inclusive`]: str::split_inclusive
#[unstable(feature = "split_inclusive", issue = "72360")]
pub struct SplitInclusive<'a, P: Pattern<'a>>(SplitInternal<'a, P>);
/// An iterator of [`u16`] over the string encoded as UTF-16.
///
-/// [`u16`]: ../../std/primitive.u16.html
-///
/// This struct is created by the [`encode_utf16`] method on [`str`].
/// See its documentation for more.
///
-/// [`encode_utf16`]: ../../std/primitive.str.html#method.encode_utf16
-/// [`str`]: ../../std/primitive.str.html
+/// [`encode_utf16`]: str::encode_utf16
#[derive(Clone)]
#[stable(feature = "encode_utf16", since = "1.8.0")]
pub struct EncodeUtf16<'a> {
impl FusedIterator for EncodeUtf16<'_> {}
/// The return type of [`str::escape_debug`].
-///
-/// [`str::escape_debug`]: ../../std/primitive.str.html#method.escape_debug
#[stable(feature = "str_escape", since = "1.34.0")]
#[derive(Clone, Debug)]
pub struct EscapeDebug<'a> {
}
/// The return type of [`str::escape_default`].
-///
-/// [`str::escape_default`]: ../../std/primitive.str.html#method.escape_default
#[stable(feature = "str_escape", since = "1.34.0")]
#[derive(Clone, Debug)]
pub struct EscapeDefault<'a> {
}
/// The return type of [`str::escape_unicode`].
-///
-/// [`str::escape_unicode`]: ../../std/primitive.str.html#method.escape_unicode
#[stable(feature = "str_escape", since = "1.34.0")]
#[derive(Clone, Debug)]
pub struct EscapeUnicode<'a> {
mod wake;
#[stable(feature = "futures_api", since = "1.36.0")]
pub use self::wake::{Context, RawWaker, RawWakerVTable, Waker};
+
+mod ready;
+#[unstable(feature = "ready_macro", issue = "70922")]
+pub use ready::ready;
--- /dev/null
+/// Extracts the successful type of a `Poll<T>`.
+///
+/// This macro bakes in propagation of `Pending` signals by returning early.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(future_readiness_fns)]
+/// #![feature(ready_macro)]
+///
+/// use core::task::{ready, Context, Poll};
+/// use core::future::{self, Future};
+/// use core::pin::Pin;
+///
+/// pub fn do_poll(cx: &mut Context<'_>) -> Poll<()> {
+/// let mut fut = future::ready(42);
+/// let fut = Pin::new(&mut fut);
+///
+/// let num = ready!(fut.poll(cx));
+/// # drop(num);
+/// // ... use num
+///
+/// Poll::Ready(())
+/// }
+/// ```
+///
+/// The `ready!` call expands to:
+///
+/// ```
+/// # #![feature(future_readiness_fns)]
+/// # #![feature(ready_macro)]
+/// #
+/// # use core::task::{Context, Poll};
+/// # use core::future::{self, Future};
+/// # use core::pin::Pin;
+/// #
+/// # pub fn do_poll(cx: &mut Context<'_>) -> Poll<()> {
+/// # let mut fut = future::ready(42);
+/// # let fut = Pin::new(&mut fut);
+/// #
+/// let num = match fut.poll(cx) {
+/// Poll::Ready(t) => t,
+/// Poll::Pending => return Poll::Pending,
+/// };
+/// # drop(num);
+/// # // ... use num
+/// #
+/// # Poll::Ready(())
+/// # }
+/// ```
+#[unstable(feature = "ready_macro", issue = "70922")]
+#[rustc_macro_transparency = "semitransparent"]
+pub macro ready($e:expr) {
+ match $e {
+ $crate::task::Poll::Ready(t) => t,
+ $crate::task::Poll::Pending => {
+ return $crate::task::Poll::Pending;
+ }
+ }
+}
--- /dev/null
+use core::{
+ cell::Cell,
+ lazy::{Lazy, OnceCell},
+ sync::atomic::{AtomicUsize, Ordering::SeqCst},
+};
+
+#[test]
+fn once_cell() {
+ let c = OnceCell::new();
+ assert!(c.get().is_none());
+ c.get_or_init(|| 92);
+ assert_eq!(c.get(), Some(&92));
+
+ c.get_or_init(|| panic!("Kabom!"));
+ assert_eq!(c.get(), Some(&92));
+}
+
+#[test]
+fn once_cell_get_mut() {
+ let mut c = OnceCell::new();
+ assert!(c.get_mut().is_none());
+ c.set(90).unwrap();
+ *c.get_mut().unwrap() += 2;
+ assert_eq!(c.get_mut(), Some(&mut 92));
+}
+
+#[test]
+fn once_cell_drop() {
+ static DROP_CNT: AtomicUsize = AtomicUsize::new(0);
+ struct Dropper;
+ impl Drop for Dropper {
+ fn drop(&mut self) {
+ DROP_CNT.fetch_add(1, SeqCst);
+ }
+ }
+
+ let x = OnceCell::new();
+ x.get_or_init(|| Dropper);
+ assert_eq!(DROP_CNT.load(SeqCst), 0);
+ drop(x);
+ assert_eq!(DROP_CNT.load(SeqCst), 1);
+}
+
+#[test]
+fn unsync_once_cell_drop_empty() {
+ let x = OnceCell::<&'static str>::new();
+ drop(x);
+}
+
+#[test]
+fn clone() {
+ let s = OnceCell::new();
+ let c = s.clone();
+ assert!(c.get().is_none());
+
+ s.set("hello").unwrap();
+ let c = s.clone();
+ assert_eq!(c.get().map(|c| *c), Some("hello"));
+}
+
+#[test]
+fn from_impl() {
+ assert_eq!(OnceCell::from("value").get(), Some(&"value"));
+ assert_ne!(OnceCell::from("foo").get(), Some(&"bar"));
+}
+
+#[test]
+fn partialeq_impl() {
+ assert!(OnceCell::from("value") == OnceCell::from("value"));
+ assert!(OnceCell::from("foo") != OnceCell::from("bar"));
+
+ assert!(OnceCell::<&'static str>::new() == OnceCell::new());
+ assert!(OnceCell::<&'static str>::new() != OnceCell::from("value"));
+}
+
+#[test]
+fn into_inner() {
+ let cell: OnceCell<&'static str> = OnceCell::new();
+ assert_eq!(cell.into_inner(), None);
+ let cell = OnceCell::new();
+ cell.set("hello").unwrap();
+ assert_eq!(cell.into_inner(), Some("hello"));
+}
+
+#[test]
+fn lazy_new() {
+ let called = Cell::new(0);
+ let x = Lazy::new(|| {
+ called.set(called.get() + 1);
+ 92
+ });
+
+ assert_eq!(called.get(), 0);
+
+ let y = *x - 30;
+ assert_eq!(y, 62);
+ assert_eq!(called.get(), 1);
+
+ let y = *x - 30;
+ assert_eq!(y, 62);
+ assert_eq!(called.get(), 1);
+}
+
+#[test]
+fn aliasing_in_get() {
+ let x = OnceCell::new();
+ x.set(42).unwrap();
+ let at_x = x.get().unwrap(); // --- (shared) borrow of inner `Option<T>` --+
+ let _ = x.set(27); // <-- temporary (unique) borrow of inner `Option<T>` |
+ println!("{}", at_x); // <------- up until here ---------------------------+
+}
+
+#[test]
+#[should_panic(expected = "reentrant init")]
+fn reentrant_init() {
+ let x: OnceCell<Box<i32>> = OnceCell::new();
+ let dangling_ref: Cell<Option<&i32>> = Cell::new(None);
+ x.get_or_init(|| {
+ let r = x.get_or_init(|| Box::new(92));
+ dangling_ref.set(Some(r));
+ Box::new(62)
+ });
+ eprintln!("use after free: {:?}", dangling_ref.get().unwrap());
+}
#![feature(option_unwrap_none)]
#![feature(peekable_next_if)]
#![feature(partition_point)]
+#![feature(once_cell)]
#![feature(unsafe_block_in_unsafe_fn)]
#![deny(unsafe_op_in_unsafe_fn)]
mod hash;
mod intrinsics;
mod iter;
+mod lazy;
mod manually_drop;
mod mem;
mod nonzero;
/// ```
#[stable(feature = "duration", since = "1.3.0")]
#[inline]
- #[rustc_const_stable(feature = "duration_consts", since = "1.32.0")]
- pub fn new(secs: u64, nanos: u32) -> Duration {
- let secs =
- secs.checked_add((nanos / NANOS_PER_SEC) as u64).expect("overflow in Duration::new");
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn new(secs: u64, nanos: u32) -> Duration {
+ let secs = match secs.checked_add((nanos / NANOS_PER_SEC) as u64) {
+ Some(secs) => secs,
+ None => panic!("overflow in Duration::new"),
+ };
let nanos = nanos % NANOS_PER_SEC;
Duration { secs, nanos }
}
/// ```
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
- pub fn checked_add(self, rhs: Duration) -> Option<Duration> {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn checked_add(self, rhs: Duration) -> Option<Duration> {
if let Some(mut secs) = self.secs.checked_add(rhs.secs) {
let mut nanos = self.nanos + rhs.nanos;
if nanos >= NANOS_PER_SEC {
/// ```
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
- pub fn checked_sub(self, rhs: Duration) -> Option<Duration> {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn checked_sub(self, rhs: Duration) -> Option<Duration> {
if let Some(mut secs) = self.secs.checked_sub(rhs.secs) {
let nanos = if self.nanos >= rhs.nanos {
self.nanos - rhs.nanos
/// ```
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
- pub fn checked_mul(self, rhs: u32) -> Option<Duration> {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn checked_mul(self, rhs: u32) -> Option<Duration> {
// Multiply nanoseconds as u64, because it cannot overflow that way.
let total_nanos = self.nanos as u64 * rhs as u64;
let extra_secs = total_nanos / (NANOS_PER_SEC as u64);
let nanos = (total_nanos % (NANOS_PER_SEC as u64)) as u32;
- if let Some(secs) =
- self.secs.checked_mul(rhs as u64).and_then(|s| s.checked_add(extra_secs))
- {
- debug_assert!(nanos < NANOS_PER_SEC);
- Some(Duration { secs, nanos })
- } else {
- None
+ if let Some(s) = self.secs.checked_mul(rhs as u64) {
+ if let Some(secs) = s.checked_add(extra_secs) {
+ debug_assert!(nanos < NANOS_PER_SEC);
+ return Some(Duration { secs, nanos });
+ }
}
+ None
}
/// Checked `Duration` division. Computes `self / other`, returning [`None`]
/// ```
#[stable(feature = "duration_checked_ops", since = "1.16.0")]
#[inline]
- pub fn checked_div(self, rhs: u32) -> Option<Duration> {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn checked_div(self, rhs: u32) -> Option<Duration> {
if rhs != 0 {
let secs = self.secs / (rhs as u64);
let carry = self.secs - secs * (rhs as u64);
/// ```
#[stable(feature = "duration_float", since = "1.38.0")]
#[inline]
- pub fn as_secs_f64(&self) -> f64 {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn as_secs_f64(&self) -> f64 {
(self.secs as f64) + (self.nanos as f64) / (NANOS_PER_SEC as f64)
}
/// ```
#[stable(feature = "duration_float", since = "1.38.0")]
#[inline]
- pub fn as_secs_f32(&self) -> f32 {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn as_secs_f32(&self) -> f32 {
(self.secs as f32) + (self.nanos as f32) / (NANOS_PER_SEC as f32)
}
/// ```
#[unstable(feature = "div_duration", issue = "63139")]
#[inline]
- pub fn div_duration_f64(self, rhs: Duration) -> f64 {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn div_duration_f64(self, rhs: Duration) -> f64 {
self.as_secs_f64() / rhs.as_secs_f64()
}
/// ```
#[unstable(feature = "div_duration", issue = "63139")]
#[inline]
- pub fn div_duration_f32(self, rhs: Duration) -> f32 {
+ #[rustc_const_unstable(feature = "duration_consts_2", issue = "72440")]
+ pub const fn div_duration_f32(self, rhs: Duration) -> f32 {
self.as_secs_f32() / rhs.as_secs_f32()
}
}
doc = false
[dependencies]
+cfg-if = { version = "0.1.8", features = ['rustc-dep-of-std'] }
core = { path = "../libcore" }
libc = { version = "0.2", default-features = false }
compiler_builtins = "0.1.0"
use core::any::Any;
#[rustc_std_internal_symbol]
-#[cfg_attr(not(bootstrap), allow(improper_ctypes_definitions))]
+#[allow(improper_ctypes_definitions)]
pub unsafe extern "C" fn __rust_panic_cleanup(_: *mut u8) -> *mut (dyn Any + Send + 'static) {
unreachable!()
}
pub unsafe extern "C" fn __rust_start_panic(_payload: usize) -> u32 {
abort();
- #[cfg(any(unix, target_os = "cloudabi"))]
- unsafe fn abort() -> ! {
- libc::abort();
- }
-
- #[cfg(any(windows, all(target_arch = "wasm32", not(target_os = "emscripten"))))]
- unsafe fn abort() -> ! {
- core::intrinsics::abort();
- }
-
- #[cfg(any(target_os = "hermit", all(target_vendor = "fortanix", target_env = "sgx")))]
- unsafe fn abort() -> ! {
- // call std::sys::abort_internal
- extern "C" {
- pub fn __rust_abort() -> !;
+ cfg_if::cfg_if! {
+ if #[cfg(any(unix, target_os = "cloudabi"))] {
+ unsafe fn abort() -> ! {
+ libc::abort();
+ }
+ } else if #[cfg(any(target_os = "hermit",
+ all(target_vendor = "fortanix", target_env = "sgx")
+ ))] {
+ unsafe fn abort() -> ! {
+ // call std::sys::abort_internal
+ extern "C" {
+ pub fn __rust_abort() -> !;
+ }
+ __rust_abort();
+ }
+ } else {
+ unsafe fn abort() -> ! {
+ core::intrinsics::abort();
+ }
}
- __rust_abort();
}
}
if #[cfg(target_os = "emscripten")] {
#[path = "emcc.rs"]
mod real_imp;
- } else if #[cfg(target_arch = "wasm32")] {
- #[path = "dummy.rs"]
- mod real_imp;
} else if #[cfg(target_os = "hermit")] {
#[path = "hermit.rs"]
mod real_imp;
} else if #[cfg(target_env = "msvc")] {
#[path = "seh.rs"]
mod real_imp;
- } else {
+ } else if #[cfg(any(
+ all(target_family = "windows", target_env = "gnu"),
+ target_os = "cloudabi",
+ target_family = "unix",
+ all(target_vendor = "fortanix", target_env = "sgx"),
+ ))] {
// Rust runtime's startup objects depend on these symbols, so make them public.
#[cfg(all(target_os="windows", target_arch = "x86", target_env="gnu"))]
pub use real_imp::eh_frame_registry::*;
#[path = "gcc.rs"]
mod real_imp;
+ } else {
+ // Targets that don't support unwinding.
+ // - arch=wasm32
+ // - os=none ("bare metal" targets)
+ // - os=uefi
+ // - nvptx64-nvidia-cuda
+ // - avr-unknown-unknown
+ // - mipsel-sony-psp
+ #[path = "dummy.rs"]
+ mod real_imp;
}
}
mod dwarf;
#[rustc_std_internal_symbol]
-#[cfg_attr(not(bootstrap), allow(improper_ctypes_definitions))]
+#[allow(improper_ctypes_definitions)]
pub unsafe extern "C" fn __rust_panic_cleanup(payload: *mut u8) -> *mut (dyn Any + Send + 'static) {
Box::into_raw(imp::cleanup(payload))
}
#![feature(in_band_lifetimes)]
#![feature(negative_impls)]
#![feature(optin_builtin_traits)]
+#![feature(restricted_std)]
#![feature(rustc_attrs)]
#![feature(min_specialization)]
#![recursion_limit = "256"]
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/", test(attr(deny(warnings))))]
#![feature(bool_to_option)]
#![feature(box_syntax)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_fn)] // For the `transmute` in `P::new`
#![feature(const_panic)]
-#![cfg_attr(not(bootstrap), feature(const_fn_transmute))]
+#![feature(const_fn_transmute)]
#![feature(crate_visibility_modifier)]
#![feature(label_break_value)]
#![feature(nll)]
include => external_doc
cfg => doc_cfg
masked => doc_masked
+ spotlight => doc_spotlight
alias => doc_alias
keyword => doc_keyword
);
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, marker::Copy),
+ path: path_std!(marker::Copy),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: true,
methods: Vec::new(),
}
}
ItemKind::Union(..) => {
- bounds = vec![Literal(path_std!(cx, marker::Copy))];
+ bounds = vec![Literal(path_std!(marker::Copy))];
is_shallow = true;
substructure = combine_substructure(Box::new(|c, s, sub| {
cs_clone_shallow("Clone", c, s, sub, true)
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, clone::Clone),
+ path: path_std!(clone::Clone),
additional_bounds: bounds,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: true,
methods: vec![MethodDef {
name: sym::clone,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: Vec::new(),
ret_ty: Self_,
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, cmp::Eq),
+ path: path_std!(cmp::Eq),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: true,
methods: vec![MethodDef {
name: sym::assert_receiver_is_total_eq,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
args: vec![],
ret_ty: nil_ty(),
associated_types: Vec::new(),
};
- super::inject_impl_of_structural_trait(
- cx,
- span,
- item,
- path_std!(cx, marker::StructuralEq),
- push,
- );
+ super::inject_impl_of_structural_trait(cx, span, item, path_std!(marker::StructuralEq), push);
trait_def.expand_ext(cx, mitem, item, push, true)
}
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, cmp::Ord),
+ path: path_std!(cmp::Ord),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::cmp,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
- args: vec![(borrowed_self(), "other")],
- ret_ty: Literal(path_std!(cx, cmp::Ordering)),
+ args: vec![(borrowed_self(), sym::other)],
+ ret_ty: Literal(path_std!(cmp::Ordering)),
attributes: attrs,
is_unsafe: false,
unify_fieldless_variants: true,
let attrs = vec![cx.attribute(inline)];
MethodDef {
name: $name,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
- args: vec![(borrowed_self(), "other")],
+ args: vec![(borrowed_self(), sym::other)],
ret_ty: Literal(path_local!(bool)),
attributes: attrs,
is_unsafe: false,
cx,
span,
item,
- path_std!(cx, marker::StructuralPartialEq),
+ path_std!(marker::StructuralPartialEq),
push,
);
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, cmp::PartialEq),
+ path: path_std!(cmp::PartialEq),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods,
let attrs = vec![cx.attribute(inline)];
MethodDef {
name: $name,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
- args: vec![(borrowed_self(), "other")],
+ args: vec![(borrowed_self(), sym::other)],
ret_ty: Literal(path_local!(bool)),
attributes: attrs,
is_unsafe: false,
}};
}
- let ordering_ty = Literal(path_std!(cx, cmp::Ordering));
+ let ordering_ty = Literal(path_std!(cmp::Ordering));
let ret_ty = Literal(Path::new_(
- pathvec_std!(cx, option::Option),
+ pathvec_std!(option::Option),
None,
vec![Box::new(ordering_ty)],
PathKind::Std,
let partial_cmp_def = MethodDef {
name: sym::partial_cmp,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
- args: vec![(borrowed_self(), "other")],
+ args: vec![(borrowed_self(), sym::other)],
ret_ty,
attributes: attrs,
is_unsafe: false,
let trait_def = TraitDef {
span,
attributes: vec![],
- path: path_std!(cx, cmp::PartialOrd),
+ path: path_std!(cmp::PartialOrd),
additional_bounds: vec![],
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods,
) {
// &mut ::std::fmt::Formatter
let fmtr =
- Ptr(Box::new(Literal(path_std!(cx, fmt::Formatter))), Borrowed(None, ast::Mutability::Mut));
+ Ptr(Box::new(Literal(path_std!(fmt::Formatter))), Borrowed(None, ast::Mutability::Mut));
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, fmt::Debug),
+ path: path_std!(fmt::Debug),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::fmt,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: borrowed_explicit_self(),
- args: vec![(fmtr, "f")],
- ret_ty: Literal(path_std!(cx, fmt::Result)),
+ args: vec![(fmtr, sym::f)],
+ ret_ty: Literal(path_std!(fmt::Result)),
attributes: Vec::new(),
is_unsafe: false,
unify_fieldless_variants: false,
// We want to make sure we have the ctxt set so that we can use unstable methods
let span = cx.with_def_site_ctxt(span);
let name = cx.expr_lit(span, ast::LitKind::Str(ident.name, ast::StrStyle::Cooked));
- let builder = cx.ident_of("debug_trait_builder", span);
+ let builder = Ident::new(sym::debug_trait_builder, span);
let builder_expr = cx.expr_ident(span, builder);
let fmt = substr.nonself_args[0].clone();
match vdata {
ast::VariantData::Tuple(..) | ast::VariantData::Unit(..) => {
// tuple struct/"normal" variant
- let expr = cx.expr_method_call(span, fmt, cx.ident_of("debug_tuple", span), vec![name]);
+ let expr =
+ cx.expr_method_call(span, fmt, Ident::new(sym::debug_tuple, span), vec![name]);
stmts.push(cx.stmt_let(span, true, builder, expr));
for field in fields {
ast::VariantData::Struct(..) => {
// normal struct/struct variant
let expr =
- cx.expr_method_call(span, fmt, cx.ident_of("debug_struct", span), vec![name]);
+ cx.expr_method_call(span, fmt, Ident::new(sym::debug_struct, span), vec![name]);
stmts.push(cx.stmt_let(DUMMY_SP, true, builder, expr));
for field in fields {
}
}
- let expr = cx.expr_method_call(span, builder_expr, cx.ident_of("finish", span), vec![]);
+ let expr = cx.expr_method_call(span, builder_expr, Ident::new(sym::finish, span), vec![]);
stmts.push(cx.stmt_expr(expr));
let block = cx.block(span, stmts);
use rustc_ast::ast::{Expr, MetaItem, Mutability};
use rustc_ast::ptr::P;
use rustc_expand::base::{Annotatable, ExtCtxt};
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_rustc_decodable(
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
- let krate = "rustc_serialize";
- let typaram = "__D";
+ let krate = sym::rustc_serialize;
+ let typaram = sym::__D;
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: Path::new_(vec![krate, "Decodable"], None, vec![], PathKind::Global),
+ path: Path::new_(vec![krate, sym::Decodable], None, vec![], PathKind::Global),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::decode,
- generics: LifetimeBounds {
- lifetimes: Vec::new(),
+ generics: Bounds {
bounds: vec![(
typaram,
- vec![Path::new_(vec![krate, "Decoder"], None, vec![], PathKind::Global)],
+ vec![Path::new_(vec![krate, sym::Decoder], None, vec![], PathKind::Global)],
)],
},
explicit_self: None,
args: vec![(
Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)),
- "d",
+ sym::d,
)],
ret_ty: Literal(Path::new_(
- pathvec_std!(cx, result::Result),
+ pathvec_std!(result::Result),
None,
vec![
Box::new(Self_),
Box::new(Literal(Path::new_(
- vec![typaram, "Error"],
+ vec![typaram, sym::Error],
None,
vec![],
PathKind::Local,
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
- krate: &str,
+ krate: Symbol,
) -> P<Expr> {
let decoder = substr.nonself_args[0].clone();
let recurse = vec![
- cx.ident_of(krate, trait_span),
- cx.ident_of("Decodable", trait_span),
- cx.ident_of("decode", trait_span),
+ Ident::new(krate, trait_span),
+ Ident::new(sym::Decodable, trait_span),
+ Ident::new(sym::decode, trait_span),
];
let exprdecode = cx.expr_path(cx.path_global(trait_span, recurse));
// throw an underscore in front to suppress unused variable warnings
- let blkarg = cx.ident_of("_d", trait_span);
+ let blkarg = Ident::new(sym::_d, trait_span);
let blkdecoder = cx.expr_ident(trait_span, blkarg);
match *substr.fields {
Unnamed(ref fields, _) => fields.len(),
Named(ref fields) => fields.len(),
};
- let read_struct_field = cx.ident_of("read_struct_field", trait_span);
+ let read_struct_field = Ident::new(sym::read_struct_field, trait_span);
let path = cx.path_ident(trait_span, substr.type_ident);
let result =
cx.expr_method_call(
trait_span,
decoder,
- cx.ident_of("read_struct", trait_span),
+ Ident::new(sym::read_struct, trait_span),
vec![
cx.expr_str(trait_span, substr.type_ident.name),
cx.expr_usize(trait_span, nfields),
)
}
StaticEnum(_, ref fields) => {
- let variant = cx.ident_of("i", trait_span);
+ let variant = Ident::new(sym::i, trait_span);
let mut arms = Vec::with_capacity(fields.len() + 1);
let mut variants = Vec::with_capacity(fields.len());
- let rvariant_arg = cx.ident_of("read_enum_variant_arg", trait_span);
+ let rvariant_arg = Ident::new(sym::read_enum_variant_arg, trait_span);
for (i, &(ident, v_span, ref parts)) in fields.iter().enumerate() {
variants.push(cx.expr_str(v_span, ident.name));
let result = cx.expr_method_call(
trait_span,
blkdecoder,
- cx.ident_of("read_enum_variant", trait_span),
+ Ident::new(sym::read_enum_variant, trait_span),
vec![variant_vec, lambda],
);
cx.expr_method_call(
trait_span,
decoder,
- cx.ident_of("read_enum", trait_span),
+ Ident::new(sym::read_enum, trait_span),
vec![
cx.expr_str(trait_span, substr.type_ident.name),
cx.lambda1(trait_span, result, blkarg),
use crate::deriving::generic::ty::*;
use crate::deriving::generic::*;
-use crate::deriving::path_std;
use rustc_ast::ast::{Expr, MetaItem};
use rustc_ast::ptr::P;
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: path_std!(cx, default::Default),
+ path: Path::new(vec![kw::Default, sym::Default]),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: kw::Default,
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
explicit_self: None,
args: Vec::new(),
ret_ty: Self_,
use rustc_ast::ast::{Expr, ExprKind, MetaItem, Mutability};
use rustc_ast::ptr::P;
use rustc_expand::base::{Annotatable, ExtCtxt};
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
pub fn expand_deriving_rustc_encodable(
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
- let krate = "rustc_serialize";
- let typaram = "__S";
+ let krate = sym::rustc_serialize;
+ let typaram = sym::__S;
let trait_def = TraitDef {
span,
attributes: Vec::new(),
- path: Path::new_(vec![krate, "Encodable"], None, vec![], PathKind::Global),
+ path: Path::new_(vec![krate, sym::Encodable], None, vec![], PathKind::Global),
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::encode,
- generics: LifetimeBounds {
- lifetimes: Vec::new(),
+ generics: Bounds {
bounds: vec![(
typaram,
- vec![Path::new_(vec![krate, "Encoder"], None, vec![], PathKind::Global)],
+ vec![Path::new_(vec![krate, sym::Encoder], None, vec![], PathKind::Global)],
)],
},
explicit_self: borrowed_explicit_self(),
args: vec![(
Ptr(Box::new(Literal(Path::new_local(typaram))), Borrowed(None, Mutability::Mut)),
- "s",
+ // FIXME: we could use `sym::s` here, but making `s` a static
+ // symbol changes the symbol index ordering in a way that makes
+ // ui/lint/rfc-2457-non-ascii-idents/lint-confusable-idents.rs
+ // fail. The linting code should be fixed so that its output
+ // does not depend on the symbol index ordering.
+ Symbol::intern("s"),
)],
ret_ty: Literal(Path::new_(
- pathvec_std!(cx, result::Result),
+ pathvec_std!(result::Result),
None,
vec![
Box::new(Tuple(Vec::new())),
Box::new(Literal(Path::new_(
- vec![typaram, "Error"],
+ vec![typaram, sym::Error],
None,
vec![],
PathKind::Local,
cx: &mut ExtCtxt<'_>,
trait_span: Span,
substr: &Substructure<'_>,
- krate: &'static str,
+ krate: Symbol,
) -> P<Expr> {
let encoder = substr.nonself_args[0].clone();
// throw an underscore in front to suppress unused variable warnings
- let blkarg = cx.ident_of("_e", trait_span);
+ let blkarg = Ident::new(sym::_e, trait_span);
let blkencoder = cx.expr_ident(trait_span, blkarg);
let fn_path = cx.expr_path(cx.path_global(
trait_span,
vec![
- cx.ident_of(krate, trait_span),
- cx.ident_of("Encodable", trait_span),
- cx.ident_of("encode", trait_span),
+ Ident::new(krate, trait_span),
+ Ident::new(sym::Encodable, trait_span),
+ Ident::new(sym::encode, trait_span),
],
));
match *substr.fields {
Struct(_, ref fields) => {
- let emit_struct_field = cx.ident_of("emit_struct_field", trait_span);
+ let emit_struct_field = Ident::new(sym::emit_struct_field, trait_span);
let mut stmts = Vec::new();
for (i, &FieldInfo { name, ref self_, span, .. }) in fields.iter().enumerate() {
let name = match name {
cx.expr_method_call(
trait_span,
encoder,
- cx.ident_of("emit_struct", trait_span),
+ Ident::new(sym::emit_struct, trait_span),
vec![
cx.expr_str(trait_span, substr.type_ident.name),
cx.expr_usize(trait_span, fields.len()),
// actually exist.
let me = cx.stmt_let(trait_span, false, blkarg, encoder);
let encoder = cx.expr_ident(trait_span, blkarg);
- let emit_variant_arg = cx.ident_of("emit_enum_variant_arg", trait_span);
+ let emit_variant_arg = Ident::new(sym::emit_enum_variant_arg, trait_span);
let mut stmts = Vec::new();
if !fields.is_empty() {
let last = fields.len() - 1;
let call = cx.expr_method_call(
trait_span,
blkencoder,
- cx.ident_of("emit_enum_variant", trait_span),
+ Ident::new(sym::emit_enum_variant, trait_span),
vec![
name,
cx.expr_usize(trait_span, idx),
let ret = cx.expr_method_call(
trait_span,
encoder,
- cx.ident_of("emit_enum", trait_span),
+ Ident::new(sym::emit_enum, trait_span),
vec![cx.expr_str(trait_span, substr.type_ident.name), blk],
);
cx.expr_block(cx.block(trait_span, vec![me, cx.stmt_expr(ret)]))
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::Span;
-use ty::{LifetimeBounds, Path, Ptr, PtrTy, Self_, Ty};
+use ty::{Bounds, Path, Ptr, PtrTy, Self_, Ty};
use crate::deriving;
pub attributes: Vec<ast::Attribute>,
/// Path of the trait, including any type parameters
- pub path: Path<'a>,
+ pub path: Path,
/// Additional bounds required of any type parameters of the type,
/// other than the current trait
- pub additional_bounds: Vec<Ty<'a>>,
+ pub additional_bounds: Vec<Ty>,
/// Any extra lifetimes and/or bounds, e.g., `D: serialize::Decoder`
- pub generics: LifetimeBounds<'a>,
+ pub generics: Bounds,
/// Is it an `unsafe` trait?
pub is_unsafe: bool,
pub methods: Vec<MethodDef<'a>>,
- pub associated_types: Vec<(Ident, Ty<'a>)>,
+ pub associated_types: Vec<(Ident, Ty)>,
}
pub struct MethodDef<'a> {
/// name of the method
pub name: Symbol,
/// List of generics, e.g., `R: rand::Rng`
- pub generics: LifetimeBounds<'a>,
+ pub generics: Bounds,
/// Whether there is a self argument (outer Option) i.e., whether
/// this is a static function, and whether it is a pointer (inner
pub explicit_self: Option<Option<PtrTy>>,
/// Arguments other than the self argument
- pub args: Vec<(Ty<'a>, &'a str)>,
+ pub args: Vec<(Ty, Symbol)>,
/// Returns type
- pub ret_ty: Ty<'a>,
+ pub ret_ty: Ty,
pub attributes: Vec<ast::Attribute>,
for (ty, name) in self.args.iter() {
let ast_ty = ty.to_ty(cx, trait_.span, type_ident, generics);
- let ident = cx.ident_of(name, trait_.span);
+ let ident = Ident::new(*name, trait_.span);
arg_tys.push((ident, ast_ty));
let arg_expr = cx.expr_ident(trait_.span, ident);
)
.collect::<Vec<String>>();
- let self_arg_idents =
- self_arg_names.iter().map(|name| cx.ident_of(name, sp)).collect::<Vec<Ident>>();
+ let self_arg_idents = self_arg_names
+ .iter()
+ .map(|name| Ident::from_str_and_span(name, sp))
+ .collect::<Vec<Ident>>();
// The `vi_idents` will be bound, solely in the catch-all, to
// a series of let statements mapping each self_arg to an int
.iter()
.map(|name| {
let vi_suffix = format!("{}_vi", &name[..]);
- cx.ident_of(&vi_suffix[..], trait_.span)
+ Ident::from_str_and_span(&vi_suffix, trait_.span)
})
.collect::<Vec<Ident>>();
let mut ident_exprs = Vec::new();
for (i, struct_field) in struct_def.fields().iter().enumerate() {
let sp = struct_field.span.with_ctxt(self.span.ctxt());
- let ident = cx.ident_of(&format!("{}_{}", prefix, i), self.span);
+ let ident = Ident::from_str_and_span(&format!("{}_{}", prefix, i), self.span);
paths.push(ident.with_span_pos(sp));
let val = cx.expr_path(cx.path_ident(sp, ident));
let val = if use_temporaries { val } else { cx.expr_deref(sp, val) };
use rustc_ast::ptr::P;
use rustc_expand::base::ExtCtxt;
use rustc_span::source_map::{respan, DUMMY_SP};
-use rustc_span::symbol::{kw, Ident};
+use rustc_span::symbol::{kw, Ident, Symbol};
use rustc_span::Span;
/// The types of pointers
/// A path, e.g., `::std::option::Option::<i32>` (global). Has support
/// for type parameters and a lifetime.
#[derive(Clone)]
-pub struct Path<'a> {
- path: Vec<&'a str>,
+pub struct Path {
+ path: Vec<Symbol>,
lifetime: Option<Ident>,
- params: Vec<Box<Ty<'a>>>,
+ params: Vec<Box<Ty>>,
kind: PathKind,
}
Std,
}
-impl<'a> Path<'a> {
- pub fn new(path: Vec<&str>) -> Path<'_> {
+impl Path {
+ pub fn new(path: Vec<Symbol>) -> Path {
Path::new_(path, None, Vec::new(), PathKind::Std)
}
- pub fn new_local(path: &str) -> Path<'_> {
+ pub fn new_local(path: Symbol) -> Path {
Path::new_(vec![path], None, Vec::new(), PathKind::Local)
}
- pub fn new_<'r>(
- path: Vec<&'r str>,
+ pub fn new_(
+ path: Vec<Symbol>,
lifetime: Option<Ident>,
- params: Vec<Box<Ty<'r>>>,
+ params: Vec<Box<Ty>>,
kind: PathKind,
- ) -> Path<'r> {
+ ) -> Path {
Path { path, lifetime, params, kind }
}
self_ty: Ident,
self_generics: &Generics,
) -> ast::Path {
- let mut idents = self.path.iter().map(|s| cx.ident_of(*s, span)).collect();
+ let mut idents = self.path.iter().map(|s| Ident::new(*s, span)).collect();
let lt = mk_lifetimes(cx, span, &self.lifetime);
let tys: Vec<P<ast::Ty>> =
self.params.iter().map(|t| t.to_ty(cx, span, self_ty, self_generics)).collect();
/// A type. Supports pointers, Self, and literals.
#[derive(Clone)]
-pub enum Ty<'a> {
+pub enum Ty {
Self_,
/// &/Box/ Ty
- Ptr(Box<Ty<'a>>, PtrTy),
+ Ptr(Box<Ty>, PtrTy),
/// `mod::mod::Type<[lifetime], [Params...]>`, including a plain type
/// parameter, and things like `i32`
- Literal(Path<'a>),
+ Literal(Path),
/// includes unit
- Tuple(Vec<Ty<'a>>),
+ Tuple(Vec<Ty>),
}
pub fn borrowed_ptrty() -> PtrTy {
Borrowed(None, ast::Mutability::Not)
}
-pub fn borrowed(ty: Box<Ty<'_>>) -> Ty<'_> {
+pub fn borrowed(ty: Box<Ty>) -> Ty {
Ptr(ty, borrowed_ptrty())
}
Some(Some(borrowed_ptrty()))
}
-pub fn borrowed_self<'r>() -> Ty<'r> {
+pub fn borrowed_self() -> Ty {
borrowed(Box::new(Self_))
}
-pub fn nil_ty<'r>() -> Ty<'r> {
+pub fn nil_ty() -> Ty {
Tuple(Vec::new())
}
mk_lifetime(cx, span, lt).into_iter().collect()
}
-impl<'a> Ty<'a> {
+impl Ty {
pub fn to_ty(
&self,
cx: &ExtCtxt<'_>,
fn mk_ty_param(
cx: &ExtCtxt<'_>,
span: Span,
- name: &str,
+ name: Symbol,
attrs: &[ast::Attribute],
- bounds: &[Path<'_>],
+ bounds: &[Path],
self_ident: Ident,
self_generics: &Generics,
) -> ast::GenericParam {
cx.trait_bound(path)
})
.collect();
- cx.typaram(span, cx.ident_of(name, span), attrs.to_owned(), bounds, None)
+ cx.typaram(span, Ident::new(name, span), attrs.to_owned(), bounds, None)
}
fn mk_generics(params: Vec<ast::GenericParam>, span: Span) -> Generics {
}
}
-/// Lifetimes and bounds on type parameters
+/// Bounds on type parameters.
#[derive(Clone)]
-pub struct LifetimeBounds<'a> {
- pub lifetimes: Vec<(&'a str, Vec<&'a str>)>,
- pub bounds: Vec<(&'a str, Vec<Path<'a>>)>,
+pub struct Bounds {
+ pub bounds: Vec<(Symbol, Vec<Path>)>,
}
-impl<'a> LifetimeBounds<'a> {
- pub fn empty() -> LifetimeBounds<'a> {
- LifetimeBounds { lifetimes: Vec::new(), bounds: Vec::new() }
+impl Bounds {
+ pub fn empty() -> Bounds {
+ Bounds { bounds: Vec::new() }
}
pub fn to_generics(
&self,
self_generics: &Generics,
) -> Generics {
let generic_params = self
- .lifetimes
+ .bounds
.iter()
- .map(|&(lt, ref bounds)| {
- let bounds = bounds
- .iter()
- .map(|b| ast::GenericBound::Outlives(cx.lifetime(span, Ident::from_str(b))));
- cx.lifetime_def(span, Ident::from_str(lt), vec![], bounds.collect())
- })
- .chain(self.bounds.iter().map(|t| {
+ .map(|t| {
let (name, ref bounds) = *t;
mk_ty_param(cx, span, name, &[], &bounds, self_ty, self_generics)
- }))
+ })
.collect();
mk_generics(generic_params, span)
item: &Annotatable,
push: &mut dyn FnMut(Annotatable),
) {
- let path = Path::new_(pathvec_std!(cx, hash::Hash), None, vec![], PathKind::Std);
+ let path = Path::new_(pathvec_std!(hash::Hash), None, vec![], PathKind::Std);
- let typaram = "__H";
+ let typaram = sym::__H;
let arg = Path::new_local(typaram);
let hash_trait_def = TraitDef {
attributes: Vec::new(),
path,
additional_bounds: Vec::new(),
- generics: LifetimeBounds::empty(),
+ generics: Bounds::empty(),
is_unsafe: false,
supports_unions: false,
methods: vec![MethodDef {
name: sym::hash,
- generics: LifetimeBounds {
- lifetimes: Vec::new(),
- bounds: vec![(typaram, vec![path_std!(cx, hash::Hasher)])],
- },
+ generics: Bounds { bounds: vec![(typaram, vec![path_std!(hash::Hasher)])] },
explicit_self: borrowed_explicit_self(),
- args: vec![(Ptr(Box::new(Literal(arg)), Borrowed(None, Mutability::Mut)), "state")],
+ args: vec![(Ptr(Box::new(Literal(arg)), Borrowed(None, Mutability::Mut)), sym::state)],
ret_ty: nil_ty(),
attributes: vec![],
is_unsafe: false,
use rustc_span::Span;
macro path_local($x:ident) {
- generic::ty::Path::new_local(stringify!($x))
+ generic::ty::Path::new_local(sym::$x)
}
-macro pathvec_std($cx:expr, $($rest:ident)::+) {{
- vec![ $( stringify!($rest) ),+ ]
+macro pathvec_std($($rest:ident)::+) {{
+ vec![ $( sym::$rest ),+ ]
}}
macro path_std($($x:tt)*) {
cx: &mut ExtCtxt<'_>,
span: Span,
item: &Annotatable,
- structural_path: generic::ty::Path<'_>,
+ structural_path: generic::ty::Path,
push: &mut dyn FnMut(Annotatable),
) {
let item = match *item {
self.count_args_index_offset = sofar;
}
- fn rtpath(ecx: &ExtCtxt<'_>, s: &str) -> Vec<Ident> {
- ecx.std_path(&[sym::fmt, sym::rt, sym::v1, Symbol::intern(s)])
+ fn rtpath(ecx: &ExtCtxt<'_>, s: Symbol) -> Vec<Ident> {
+ ecx.std_path(&[sym::fmt, sym::rt, sym::v1, s])
}
fn build_count(&self, c: parse::Count) -> P<ast::Expr> {
let sp = self.macsp;
let count = |c, arg| {
- let mut path = Context::rtpath(self.ecx, "Count");
- path.push(self.ecx.ident_of(c, sp));
+ let mut path = Context::rtpath(self.ecx, sym::Count);
+ path.push(Ident::new(c, sp));
match arg {
Some(arg) => self.ecx.expr_call_global(sp, path, vec![arg]),
None => self.ecx.expr_path(self.ecx.path_global(sp, path)),
}
};
match c {
- parse::CountIs(i) => count("Is", Some(self.ecx.expr_usize(sp, i))),
+ parse::CountIs(i) => count(sym::Is, Some(self.ecx.expr_usize(sp, i))),
parse::CountIsParam(i) => {
// This needs mapping too, as `i` is referring to a macro
// argument. If `i` is not found in `count_positions` then
// the error had already been emitted elsewhere.
let i = self.count_positions.get(&i).cloned().unwrap_or(0)
+ self.count_args_index_offset;
- count("Param", Some(self.ecx.expr_usize(sp, i)))
+ count(sym::Param, Some(self.ecx.expr_usize(sp, i)))
}
- parse::CountImplied => count("Implied", None),
+ parse::CountImplied => count(sym::Implied, None),
// should never be the case, names are already resolved
parse::CountIsName(_) => panic!("should never happen"),
}
// Build the format
let fill = self.ecx.expr_lit(sp, ast::LitKind::Char(fill));
let align = |name| {
- let mut p = Context::rtpath(self.ecx, "Alignment");
- p.push(self.ecx.ident_of(name, sp));
+ let mut p = Context::rtpath(self.ecx, sym::Alignment);
+ p.push(Ident::new(name, sp));
self.ecx.path_global(sp, p)
};
let align = match arg.format.align {
- parse::AlignLeft => align("Left"),
- parse::AlignRight => align("Right"),
- parse::AlignCenter => align("Center"),
- parse::AlignUnknown => align("Unknown"),
+ parse::AlignLeft => align(sym::Left),
+ parse::AlignRight => align(sym::Right),
+ parse::AlignCenter => align(sym::Center),
+ parse::AlignUnknown => align(sym::Unknown),
};
let align = self.ecx.expr_path(align);
let flags = self.ecx.expr_u32(sp, arg.format.flags);
let prec = self.build_count(arg.format.precision);
let width = self.build_count(arg.format.width);
- let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "FormatSpec"));
+ let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, sym::FormatSpec));
let fmt = self.ecx.expr_struct(
sp,
path,
vec![
- self.ecx.field_imm(sp, self.ecx.ident_of("fill", sp), fill),
- self.ecx.field_imm(sp, self.ecx.ident_of("align", sp), align),
- self.ecx.field_imm(sp, self.ecx.ident_of("flags", sp), flags),
- self.ecx.field_imm(sp, self.ecx.ident_of("precision", sp), prec),
- self.ecx.field_imm(sp, self.ecx.ident_of("width", sp), width),
+ self.ecx.field_imm(sp, Ident::new(sym::fill, sp), fill),
+ self.ecx.field_imm(sp, Ident::new(sym::align, sp), align),
+ self.ecx.field_imm(sp, Ident::new(sym::flags, sp), flags),
+ self.ecx.field_imm(sp, Ident::new(sym::precision, sp), prec),
+ self.ecx.field_imm(sp, Ident::new(sym::width, sp), width),
],
);
- let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, "Argument"));
+ let path = self.ecx.path_global(sp, Context::rtpath(self.ecx, sym::Argument));
Some(self.ecx.expr_struct(
sp,
path,
vec![
- self.ecx.field_imm(sp, self.ecx.ident_of("position", sp), pos),
- self.ecx.field_imm(sp, self.ecx.ident_of("format", sp), fmt),
+ self.ecx.field_imm(sp, Ident::new(sym::position, sp), pos),
+ self.ecx.field_imm(sp, Ident::new(sym::format, sp), fmt),
],
))
}
let mut heads = Vec::with_capacity(self.args.len());
let names_pos: Vec<_> = (0..self.args.len())
- .map(|i| self.ecx.ident_of(&format!("arg{}", i), self.macsp))
+ .map(|i| Ident::from_str_and_span(&format!("arg{}", i), self.macsp))
.collect();
// First, build up the static array which will become our precompiled
let mut abi_args = Vec::new();
let mut i = 0;
let mut mk = || {
- let name = self.cx.ident_of(&format!("arg{}", i), self.span);
+ let name = Ident::from_str_and_span(&format!("arg{}", i), self.span);
i += 1;
name
};
let kind = ItemKind::Fn(ast::Defaultness::Final, sig, Generics::default(), block);
let item = self.cx.item(
self.span,
- self.cx.ident_of(&self.kind.fn_name(method.name), self.span),
+ Ident::from_str_and_span(&self.kind.fn_name(method.name), self.span),
self.attrs(),
kind,
);
let proc_macro = Ident::new(sym::proc_macro, span);
let krate = cx.item(span, proc_macro, Vec::new(), ast::ItemKind::ExternCrate(None));
- let bridge = cx.ident_of("bridge", span);
- let client = cx.ident_of("client", span);
- let proc_macro_ty = cx.ident_of("ProcMacro", span);
- let custom_derive = cx.ident_of("custom_derive", span);
- let attr = cx.ident_of("attr", span);
- let bang = cx.ident_of("bang", span);
+ let bridge = Ident::new(sym::bridge, span);
+ let client = Ident::new(sym::client, span);
+ let proc_macro_ty = Ident::new(sym::ProcMacro, span);
+ let custom_derive = Ident::new(sym::custom_derive, span);
+ let attr = Ident::new(sym::attr, span);
+ let bang = Ident::new(sym::bang, span);
let krate_ref = RefCell::new(ast_krate);
let decls_static = cx
.item_static(
span,
- cx.ident_of("_DECLS", span),
+ Ident::new(sym::_DECLS, span),
cx.ty_rptr(
span,
cx.ty(
let test_id = Ident::new(sym::test, attr_sp);
// creates test::$name
- let test_path = |name| cx.path(sp, vec![test_id, cx.ident_of(name, sp)]);
+ let test_path = |name| cx.path(sp, vec![test_id, Ident::from_str_and_span(name, sp)]);
// creates test::ShouldPanic::$name
- let should_panic_path =
- |name| cx.path(sp, vec![test_id, cx.ident_of("ShouldPanic", sp), cx.ident_of(name, sp)]);
+ let should_panic_path = |name| {
+ cx.path(
+ sp,
+ vec![
+ test_id,
+ Ident::from_str_and_span("ShouldPanic", sp),
+ Ident::from_str_and_span(name, sp),
+ ],
+ )
+ };
// creates test::TestType::$name
- let test_type_path =
- |name| cx.path(sp, vec![test_id, cx.ident_of("TestType", sp), cx.ident_of(name, sp)]);
+ let test_type_path = |name| {
+ cx.path(
+ sp,
+ vec![
+ test_id,
+ Ident::from_str_and_span("TestType", sp),
+ Ident::from_str_and_span(name, sp),
+ ],
+ )
+ };
// creates $name: $expr
- let field = |name, expr| cx.field_imm(sp, cx.ident_of(name, sp), expr);
+ let field = |name, expr| cx.field_imm(sp, Ident::from_str_and_span(name, sp), expr);
let test_fn = if is_bench {
// A simple ident for a lambda
- let b = cx.ident_of("b", attr_sp);
+ let b = Ident::from_str_and_span("b", attr_sp);
cx.expr_call(
sp,
let mut test_runner = cx
.test_runner
.clone()
- .unwrap_or(ecx.path(sp, vec![test_id, ecx.ident_of(runner_name, sp)]));
+ .unwrap_or(ecx.path(sp, vec![test_id, Ident::from_str_and_span(runner_name, sp)]));
test_runner.span = sp;
return;
}
+ // FIXME(richkadel): Make sure probestack plays nice with `-Z instrument-coverage`
+ // or disable it if not, similar to above early exits.
+
// Flag our internal `__rust_probestack` function as the stack probe symbol.
// This is defined in the `compiler-builtins` crate for each architecture.
llvm::AddFunctionAttrStringValue(
}
}
+ // Finalize code coverage by injecting the coverage map. Note, the coverage map will
+ // also be added to the `llvm.used` variable, created next.
+ if cx.sess().opts.debugging_opts.instrument_coverage {
+ cx.coverageinfo_finalize();
+ }
+
// Create the llvm.used variable
// This variable has type [N x i8*] and is stored in the llvm.metadata section
if !cx.used_statics().borrow().is_empty() {
cx.create_used_variable()
}
- // Finalize code coverage by injecting the coverage map
- if cx.sess().opts.debugging_opts.instrument_coverage {
- cx.coverageinfo_finalize();
- }
-
// Finalize debuginfo
if cx.sess().opts.debuginfo != DebugInfo::None {
cx.debuginfo_finalize();
fn_name, hash, num_counters, index
);
- let llfn = unsafe { llvm::LLVMRustGetInstrprofIncrementIntrinsic(self.cx().llmod) };
+ let llfn = unsafe { llvm::LLVMRustGetInstrProfIncrementIntrinsic(self.cx().llmod) };
let args = &[fn_name, hash, num_counters, index];
let args = self.check_call("call", llfn, args);
}
if attrs.flags.contains(CodegenFnAttrFlags::USED) {
- // This static will be stored in the llvm.used variable which is an array of i8*
- let cast = llvm::LLVMConstPointerCast(g, self.type_i8p());
- self.used_statics.borrow_mut().push(cast);
+ self.add_used_global(g);
}
}
}
+
+ /// Add a global value to a list to be stored in the `llvm.used` variable, an array of i8*.
+ fn add_used_global(&self, global: &'ll Value) {
+ let cast = unsafe { llvm::LLVMConstPointerCast(global, self.type_i8p()) };
+ self.used_statics.borrow_mut().push(cast);
+ }
}
--- /dev/null
+use crate::llvm;
+
+use crate::common::CodegenCx;
+use crate::coverageinfo;
+
+use log::debug;
+use rustc_codegen_ssa::coverageinfo::map::*;
+use rustc_codegen_ssa::traits::{BaseTypeMethods, ConstMethods, MiscMethods};
+use rustc_data_structures::fx::FxHashMap;
+use rustc_llvm::RustString;
+use rustc_middle::ty::Instance;
+use rustc_middle::{bug, mir};
+
+use std::collections::BTreeMap;
+use std::ffi::CString;
+use std::path::PathBuf;
+
+// FIXME(richkadel): Complete all variations of generating and exporting the coverage map to LLVM.
+// The current implementation is an initial foundation with basic capabilities (Counters, but not
+// CounterExpressions, etc.).
+
+/// Generates and exports the Coverage Map.
+///
+/// This Coverage Map complies with Coverage Mapping Format version 3 (zero-based encoded as 2),
+/// as defined at [LLVM Code Coverage Mapping Format](https://github.com/rust-lang/llvm-project/blob/llvmorg-8.0.0/llvm/docs/CoverageMappingFormat.rst#llvm-code-coverage-mapping-format)
+/// and published in Rust's current (July 2020) fork of LLVM. This version is supported by the
+/// LLVM coverage tools (`llvm-profdata` and `llvm-cov`) bundled with Rust's fork of LLVM.
+///
+/// Consequently, Rust's bundled version of Clang also generates Coverage Maps compliant with
+/// version 3. Clang's implementation of Coverage Map generation was referenced when implementing
+/// this Rust version, and though the format documentation is very explicit and detailed, some
+/// undocumented details in Clang's implementation (that may or may not be important) were also
+/// replicated for Rust's Coverage Map.
+pub fn finalize<'ll, 'tcx>(cx: &CodegenCx<'ll, 'tcx>) {
+ let mut coverage_writer = CoverageMappingWriter::new(cx);
+
+ let function_coverage_map = cx.coverage_context().take_function_coverage_map();
+
+ // Encode coverage mappings and generate function records
+ let mut function_records = Vec::<&'ll llvm::Value>::new();
+ let coverage_mappings_buffer = llvm::build_byte_buffer(|coverage_mappings_buffer| {
+ for (instance, function_coverage) in function_coverage_map.into_iter() {
+ if let Some(function_record) = coverage_writer.write_function_mappings_and_record(
+ instance,
+ function_coverage,
+ coverage_mappings_buffer,
+ ) {
+ function_records.push(function_record);
+ }
+ }
+ });
+
+ // Encode all filenames covered in this module, ordered by `file_id`
+ let filenames_buffer = llvm::build_byte_buffer(|filenames_buffer| {
+ coverageinfo::write_filenames_section_to_buffer(
+ &coverage_writer.filenames,
+ filenames_buffer,
+ );
+ });
+
+ if coverage_mappings_buffer.len() > 0 {
+ // Generate the LLVM IR representation of the coverage map and store it in a well-known
+ // global constant.
+ coverage_writer.write_coverage_map(
+ function_records,
+ filenames_buffer,
+ coverage_mappings_buffer,
+ );
+ }
+}
+
+struct CoverageMappingWriter<'a, 'll, 'tcx> {
+ cx: &'a CodegenCx<'ll, 'tcx>,
+ filenames: Vec<CString>,
+ filename_to_index: FxHashMap<CString, u32>,
+}
+
+impl<'a, 'll, 'tcx> CoverageMappingWriter<'a, 'll, 'tcx> {
+ fn new(cx: &'a CodegenCx<'ll, 'tcx>) -> Self {
+ Self { cx, filenames: Vec::new(), filename_to_index: FxHashMap::<CString, u32>::default() }
+ }
+
+ /// For the given function, get the coverage region data, stream it to the given buffer, and
+ /// then generate and return a new function record.
+ fn write_function_mappings_and_record(
+ &mut self,
+ instance: Instance<'tcx>,
+ mut function_coverage: FunctionCoverage,
+ coverage_mappings_buffer: &RustString,
+ ) -> Option<&'ll llvm::Value> {
+ let cx = self.cx;
+ let coverageinfo: &mir::CoverageInfo = cx.tcx.coverageinfo(instance.def_id());
+ debug!(
+ "Generate coverage map for: {:?}, num_counters: {}, num_expressions: {}",
+ instance, coverageinfo.num_counters, coverageinfo.num_expressions
+ );
+ debug_assert!(coverageinfo.num_counters > 0);
+
+ let regions_in_file_order = function_coverage.regions_in_file_order(cx.sess().source_map());
+ if regions_in_file_order.len() == 0 {
+ return None;
+ }
+
+ // Stream the coverage mapping regions for the function (`instance`) to the buffer, and
+ // compute the data byte size used.
+ let old_len = coverage_mappings_buffer.len();
+ self.regions_to_mappings(regions_in_file_order, coverage_mappings_buffer);
+ let mapping_data_size = coverage_mappings_buffer.len() - old_len;
+ debug_assert!(mapping_data_size > 0);
+
+ let mangled_function_name = cx.tcx.symbol_name(instance).to_string();
+ let name_ref = coverageinfo::compute_hash(&mangled_function_name);
+ let function_source_hash = function_coverage.source_hash();
+
+ // Generate and return the function record
+ let name_ref_val = cx.const_u64(name_ref);
+ let mapping_data_size_val = cx.const_u32(mapping_data_size as u32);
+ let func_hash_val = cx.const_u64(function_source_hash);
+ Some(cx.const_struct(
+ &[name_ref_val, mapping_data_size_val, func_hash_val],
+ /*packed=*/ true,
+ ))
+ }
+
+ /// For each coverage region, extract its coverage data from the earlier coverage analysis.
+ /// Use LLVM APIs to convert the data into buffered bytes compliant with the LLVM Coverage
+ /// Mapping format.
+ fn regions_to_mappings(
+ &mut self,
+ regions_in_file_order: BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>>,
+ coverage_mappings_buffer: &RustString,
+ ) {
+ let mut virtual_file_mapping = Vec::new();
+ let mut mapping_regions = coverageinfo::SmallVectorCounterMappingRegion::new();
+ let mut expressions = coverageinfo::SmallVectorCounterExpression::new();
+
+ for (file_id, (file_path, file_coverage_regions)) in
+ regions_in_file_order.into_iter().enumerate()
+ {
+ let file_id = file_id as u32;
+ let filename = CString::new(file_path.to_string_lossy().to_string())
+ .expect("null error converting filename to C string");
+ debug!(" file_id: {} = '{:?}'", file_id, filename);
+ let filenames_index = match self.filename_to_index.get(&filename) {
+ Some(index) => *index,
+ None => {
+ let index = self.filenames.len() as u32;
+ self.filenames.push(filename.clone());
+ self.filename_to_index.insert(filename, index);
+ index
+ }
+ };
+ virtual_file_mapping.push(filenames_index);
+
+ let mut mapping_indexes = vec![0 as u32; file_coverage_regions.len()];
+ for (mapping_index, (region_id, _)) in file_coverage_regions.values().enumerate() {
+ mapping_indexes[*region_id] = mapping_index as u32;
+ }
+
+ for (region_loc, (region_id, region_kind)) in file_coverage_regions.into_iter() {
+ let mapping_index = mapping_indexes[region_id];
+ match region_kind {
+ CoverageKind::Counter => {
+ debug!(
+ " Counter {}, file_id: {}, region_loc: {}",
+ mapping_index, file_id, region_loc
+ );
+ mapping_regions.push_from(
+ mapping_index,
+ file_id,
+ region_loc.start_line,
+ region_loc.start_col,
+ region_loc.end_line,
+ region_loc.end_col,
+ );
+ }
+ CoverageKind::CounterExpression(lhs, op, rhs) => {
+ debug!(
+ " CounterExpression {} = {} {:?} {}, file_id: {}, region_loc: {:?}",
+ mapping_index, lhs, op, rhs, file_id, region_loc,
+ );
+ mapping_regions.push_from(
+ mapping_index,
+ file_id,
+ region_loc.start_line,
+ region_loc.start_col,
+ region_loc.end_line,
+ region_loc.end_col,
+ );
+ expressions.push_from(op, lhs, rhs);
+ }
+ CoverageKind::Unreachable => {
+ debug!(
+ " Unreachable region, file_id: {}, region_loc: {:?}",
+ file_id, region_loc,
+ );
+ bug!("Unreachable region not expected and not yet handled!")
+ // FIXME(richkadel): implement and call
+ // mapping_regions.push_from(...) for unreachable regions
+ }
+ }
+ }
+ }
+
+ // Encode and append the current function's coverage mapping data
+ coverageinfo::write_mapping_to_buffer(
+ virtual_file_mapping,
+ expressions,
+ mapping_regions,
+ coverage_mappings_buffer,
+ );
+ }
+
+ fn write_coverage_map(
+ self,
+ function_records: Vec<&'ll llvm::Value>,
+ filenames_buffer: Vec<u8>,
+ mut coverage_mappings_buffer: Vec<u8>,
+ ) {
+ let cx = self.cx;
+
+ // Concatenate the encoded filenames and encoded coverage mappings, and add additional zero
+ // bytes as-needed to ensure 8-byte alignment.
+ let mut coverage_size = coverage_mappings_buffer.len();
+ let filenames_size = filenames_buffer.len();
+ let remaining_bytes =
+ (filenames_size + coverage_size) % coverageinfo::COVMAP_VAR_ALIGN_BYTES;
+ if remaining_bytes > 0 {
+ let pad = coverageinfo::COVMAP_VAR_ALIGN_BYTES - remaining_bytes;
+ coverage_mappings_buffer.append(&mut [0].repeat(pad));
+ coverage_size += pad;
+ }
+ let filenames_and_coverage_mappings = [filenames_buffer, coverage_mappings_buffer].concat();
+ let filenames_and_coverage_mappings_val =
+ cx.const_bytes(&filenames_and_coverage_mappings[..]);
+
+ debug!(
+ "cov map: n_records = {}, filenames_size = {}, coverage_size = {}, 0-based version = {}",
+ function_records.len(),
+ filenames_size,
+ coverage_size,
+ coverageinfo::mapping_version()
+ );
+
+ // Create the coverage data header
+ let n_records_val = cx.const_u32(function_records.len() as u32);
+ let filenames_size_val = cx.const_u32(filenames_size as u32);
+ let coverage_size_val = cx.const_u32(coverage_size as u32);
+ let version_val = cx.const_u32(coverageinfo::mapping_version());
+ let cov_data_header_val = cx.const_struct(
+ &[n_records_val, filenames_size_val, coverage_size_val, version_val],
+ /*packed=*/ false,
+ );
+
+ // Create the function records array
+ let name_ref_from_u64 = cx.type_i64();
+ let mapping_data_size_from_u32 = cx.type_i32();
+ let func_hash_from_u64 = cx.type_i64();
+ let function_record_ty = cx.type_struct(
+ &[name_ref_from_u64, mapping_data_size_from_u32, func_hash_from_u64],
+ /*packed=*/ true,
+ );
+ let function_records_val = cx.const_array(function_record_ty, &function_records[..]);
+
+ // Create the complete LLVM coverage data value to add to the LLVM IR
+ let cov_data_val = cx.const_struct(
+ &[cov_data_header_val, function_records_val, filenames_and_coverage_mappings_val],
+ /*packed=*/ false,
+ );
+
+ // Save the coverage data value to LLVM IR
+ coverageinfo::save_map_to_mod(cx, cov_data_val);
+ }
+}
+use crate::llvm;
+
use crate::builder::Builder;
use crate::common::CodegenCx;
+
+use libc::c_uint;
use log::debug;
use rustc_codegen_ssa::coverageinfo::map::*;
-use rustc_codegen_ssa::traits::{CoverageInfoBuilderMethods, CoverageInfoMethods};
+use rustc_codegen_ssa::traits::{
+ BaseTypeMethods, CoverageInfoBuilderMethods, CoverageInfoMethods, StaticMethods,
+};
use rustc_data_structures::fx::FxHashMap;
+use rustc_llvm::RustString;
use rustc_middle::ty::Instance;
use std::cell::RefCell;
+use std::ffi::CString;
+
+pub mod mapgen;
+
+const COVMAP_VAR_ALIGN_BYTES: usize = 8;
/// A context object for maintaining all state needed by the coverageinfo module.
pub struct CrateCoverageContext<'tcx> {
// Coverage region data for each instrumented function identified by DefId.
- pub(crate) coverage_regions: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverageRegions>>,
+ pub(crate) function_coverage_map: RefCell<FxHashMap<Instance<'tcx>, FunctionCoverage>>,
}
impl<'tcx> CrateCoverageContext<'tcx> {
pub fn new() -> Self {
- Self { coverage_regions: Default::default() }
+ Self { function_coverage_map: Default::default() }
}
-}
-/// Generates and exports the Coverage Map.
-// FIXME(richkadel): Actually generate and export the coverage map to LLVM.
-// The current implementation is actually just debug messages to show the data is available.
-pub fn finalize(cx: &CodegenCx<'_, '_>) {
- let coverage_regions = &*cx.coverage_context().coverage_regions.borrow();
- for instance in coverage_regions.keys() {
- let coverageinfo = cx.tcx.coverageinfo(instance.def_id());
- debug_assert!(coverageinfo.num_counters > 0);
- debug!(
- "Generate coverage map for: {:?}, hash: {}, num_counters: {}",
- instance, coverageinfo.hash, coverageinfo.num_counters
- );
- let function_coverage_regions = &coverage_regions[instance];
- for (index, region) in function_coverage_regions.indexed_regions() {
- match region.kind {
- CoverageKind::Counter => debug!(
- " Counter {}, for {}..{}",
- index, region.coverage_span.start_byte_pos, region.coverage_span.end_byte_pos
- ),
- CoverageKind::CounterExpression(lhs, op, rhs) => debug!(
- " CounterExpression {} = {} {:?} {}, for {}..{}",
- index,
- lhs,
- op,
- rhs,
- region.coverage_span.start_byte_pos,
- region.coverage_span.end_byte_pos
- ),
- }
- }
- for unreachable in function_coverage_regions.unreachable_regions() {
- debug!(
- " Unreachable code region: {}..{}",
- unreachable.start_byte_pos, unreachable.end_byte_pos
- );
- }
+ pub fn take_function_coverage_map(&self) -> FxHashMap<Instance<'tcx>, FunctionCoverage> {
+ self.function_coverage_map.replace(FxHashMap::default())
}
}
impl CoverageInfoMethods for CodegenCx<'ll, 'tcx> {
fn coverageinfo_finalize(&self) {
- finalize(self)
+ mapgen::finalize(self)
}
}
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
+ function_source_hash: u64,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
debug!(
- "adding counter to coverage map: instance={:?}, index={}, byte range {}..{}",
- instance, index, start_byte_pos, end_byte_pos,
- );
- let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
- coverage_regions.entry(instance).or_default().add_counter(
- index,
- start_byte_pos,
- end_byte_pos,
+ "adding counter to coverage_regions: instance={:?}, function_source_hash={}, index={}, byte range {}..{}",
+ instance, function_source_hash, index, start_byte_pos, end_byte_pos,
);
+ let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
+ coverage_regions
+ .entry(instance)
+ .or_insert_with(|| {
+ FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
+ })
+ .add_counter(function_source_hash, index, start_byte_pos, end_byte_pos);
}
fn add_counter_expression_region(
end_byte_pos: u32,
) {
debug!(
- "adding counter expression to coverage map: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
+ "adding counter expression to coverage_regions: instance={:?}, index={}, {} {:?} {}, byte range {}..{}",
instance, index, lhs, op, rhs, start_byte_pos, end_byte_pos,
);
- let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
- coverage_regions.entry(instance).or_default().add_counter_expression(
- index,
- lhs,
- op,
- rhs,
- start_byte_pos,
- end_byte_pos,
- );
+ let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
+ coverage_regions
+ .entry(instance)
+ .or_insert_with(|| {
+ FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
+ })
+ .add_counter_expression(index, lhs, op, rhs, start_byte_pos, end_byte_pos);
}
fn add_unreachable_region(
end_byte_pos: u32,
) {
debug!(
- "adding unreachable code to coverage map: instance={:?}, byte range {}..{}",
+ "adding unreachable code to coverage_regions: instance={:?}, byte range {}..{}",
instance, start_byte_pos, end_byte_pos,
);
- let mut coverage_regions = self.coverage_context().coverage_regions.borrow_mut();
- coverage_regions.entry(instance).or_default().add_unreachable(start_byte_pos, end_byte_pos);
+ let mut coverage_regions = self.coverage_context().function_coverage_map.borrow_mut();
+ coverage_regions
+ .entry(instance)
+ .or_insert_with(|| {
+ FunctionCoverage::with_coverageinfo(self.tcx.coverageinfo(instance.def_id()))
+ })
+ .add_unreachable(start_byte_pos, end_byte_pos);
+ }
+}
+
+/// This struct wraps an opaque reference to the C++ template instantiation of
+/// `llvm::SmallVector<coverage::CounterExpression>`. Each `coverage::CounterExpression` object is
+/// constructed from primative-typed arguments, and pushed to the `SmallVector`, in the C++
+/// implementation of `LLVMRustCoverageSmallVectorCounterExpressionAdd()` (see
+/// `src/rustllvm/CoverageMappingWrapper.cpp`).
+pub struct SmallVectorCounterExpression<'a> {
+ pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterExpression<'a>,
+}
+
+impl SmallVectorCounterExpression<'a> {
+ pub fn new() -> Self {
+ SmallVectorCounterExpression {
+ raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterExpressionCreate() },
+ }
+ }
+
+ pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterExpression<'a> {
+ self.raw
+ }
+
+ pub fn push_from(
+ &mut self,
+ kind: rustc_codegen_ssa::coverageinfo::CounterOp,
+ left_index: u32,
+ right_index: u32,
+ ) {
+ unsafe {
+ llvm::LLVMRustCoverageSmallVectorCounterExpressionAdd(
+ &mut *(self.raw as *mut _),
+ kind,
+ left_index,
+ right_index,
+ )
+ }
+ }
+}
+
+impl Drop for SmallVectorCounterExpression<'a> {
+ fn drop(&mut self) {
+ unsafe {
+ llvm::LLVMRustCoverageSmallVectorCounterExpressionDispose(&mut *(self.raw as *mut _));
+ }
+ }
+}
+
+/// This struct wraps an opaque reference to the C++ template instantiation of
+/// `llvm::SmallVector<coverage::CounterMappingRegion>`. Each `coverage::CounterMappingRegion`
+/// object is constructed from primative-typed arguments, and pushed to the `SmallVector`, in the
+/// C++ implementation of `LLVMRustCoverageSmallVectorCounterMappingRegionAdd()` (see
+/// `src/rustllvm/CoverageMappingWrapper.cpp`).
+pub struct SmallVectorCounterMappingRegion<'a> {
+ pub raw: &'a mut llvm::coverageinfo::SmallVectorCounterMappingRegion<'a>,
+}
+
+impl SmallVectorCounterMappingRegion<'a> {
+ pub fn new() -> Self {
+ SmallVectorCounterMappingRegion {
+ raw: unsafe { llvm::LLVMRustCoverageSmallVectorCounterMappingRegionCreate() },
+ }
+ }
+
+ pub fn as_ptr(&self) -> *const llvm::coverageinfo::SmallVectorCounterMappingRegion<'a> {
+ self.raw
+ }
+
+ pub fn push_from(
+ &mut self,
+ index: u32,
+ file_id: u32,
+ line_start: u32,
+ column_start: u32,
+ line_end: u32,
+ column_end: u32,
+ ) {
+ unsafe {
+ llvm::LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
+ &mut *(self.raw as *mut _),
+ index,
+ file_id,
+ line_start,
+ column_start,
+ line_end,
+ column_end,
+ )
+ }
+ }
+}
+
+impl Drop for SmallVectorCounterMappingRegion<'a> {
+ fn drop(&mut self) {
+ unsafe {
+ llvm::LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
+ &mut *(self.raw as *mut _),
+ );
+ }
+ }
+}
+
+pub(crate) fn write_filenames_section_to_buffer(filenames: &Vec<CString>, buffer: &RustString) {
+ let c_str_vec = filenames.iter().map(|cstring| cstring.as_ptr()).collect::<Vec<_>>();
+ unsafe {
+ llvm::LLVMRustCoverageWriteFilenamesSectionToBuffer(
+ c_str_vec.as_ptr(),
+ c_str_vec.len(),
+ buffer,
+ );
+ }
+}
+
+pub(crate) fn write_mapping_to_buffer(
+ virtual_file_mapping: Vec<u32>,
+ expressions: SmallVectorCounterExpression<'_>,
+ mapping_regions: SmallVectorCounterMappingRegion<'_>,
+ buffer: &RustString,
+) {
+ unsafe {
+ llvm::LLVMRustCoverageWriteMappingToBuffer(
+ virtual_file_mapping.as_ptr(),
+ virtual_file_mapping.len() as c_uint,
+ expressions.as_ptr(),
+ mapping_regions.as_ptr(),
+ buffer,
+ );
}
}
+
+pub(crate) fn compute_hash(name: &str) -> u64 {
+ let name = CString::new(name).expect("null error converting hashable name to C string");
+ unsafe { llvm::LLVMRustCoverageComputeHash(name.as_ptr()) }
+}
+
+pub(crate) fn mapping_version() -> u32 {
+ unsafe { llvm::LLVMRustCoverageMappingVersion() }
+}
+
+pub(crate) fn save_map_to_mod<'ll, 'tcx>(
+ cx: &CodegenCx<'ll, 'tcx>,
+ cov_data_val: &'ll llvm::Value,
+) {
+ let covmap_var_name = llvm::build_string(|s| unsafe {
+ llvm::LLVMRustCoverageWriteMappingVarNameToString(s);
+ })
+ .expect("Rust Coverage Mapping var name failed UTF-8 conversion");
+ debug!("covmap var name: {:?}", covmap_var_name);
+
+ let covmap_section_name = llvm::build_string(|s| unsafe {
+ llvm::LLVMRustCoverageWriteSectionNameToString(cx.llmod, s);
+ })
+ .expect("Rust Coverage section name failed UTF-8 conversion");
+ debug!("covmap section name: {:?}", covmap_section_name);
+
+ let llglobal = llvm::add_global(cx.llmod, cx.val_ty(cov_data_val), &covmap_var_name);
+ llvm::set_initializer(llglobal, cov_data_val);
+ llvm::set_global_constant(llglobal, true);
+ llvm::set_linkage(llglobal, llvm::Linkage::InternalLinkage);
+ llvm::set_section(llglobal, &covmap_section_name);
+ llvm::set_alignment(llglobal, COVMAP_VAR_ALIGN_BYTES);
+ cx.add_used_global(llglobal);
+}
args: &Vec<Operand<'tcx>>,
caller_instance: ty::Instance<'tcx>,
) -> bool {
- match intrinsic {
- sym::count_code_region => {
- use coverage::count_code_region_args::*;
- self.add_counter_region(
- caller_instance,
- op_to_u32(&args[COUNTER_INDEX]),
- op_to_u32(&args[START_BYTE_POS]),
- op_to_u32(&args[END_BYTE_POS]),
- );
- true // Also inject the counter increment in the backend
- }
- sym::coverage_counter_add | sym::coverage_counter_subtract => {
- use coverage::coverage_counter_expression_args::*;
- self.add_counter_expression_region(
- caller_instance,
- op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
- op_to_u32(&args[LEFT_INDEX]),
- if intrinsic == sym::coverage_counter_add {
- CounterOp::Add
- } else {
- CounterOp::Subtract
- },
- op_to_u32(&args[RIGHT_INDEX]),
- op_to_u32(&args[START_BYTE_POS]),
- op_to_u32(&args[END_BYTE_POS]),
- );
- false // Does not inject backend code
- }
- sym::coverage_unreachable => {
- use coverage::coverage_unreachable_args::*;
- self.add_unreachable_region(
- caller_instance,
- op_to_u32(&args[START_BYTE_POS]),
- op_to_u32(&args[END_BYTE_POS]),
- );
- false // Does not inject backend code
+ if self.tcx.sess.opts.debugging_opts.instrument_coverage {
+ // Add the coverage information from the MIR to the Codegen context. Some coverage
+ // intrinsics are used only to pass along the coverage information (returns `false`
+ // for `is_codegen_intrinsic()`), but `count_code_region` is also converted into an
+ // LLVM intrinsic to increment a coverage counter.
+ match intrinsic {
+ sym::count_code_region => {
+ use coverage::count_code_region_args::*;
+ self.add_counter_region(
+ caller_instance,
+ op_to_u64(&args[FUNCTION_SOURCE_HASH]),
+ op_to_u32(&args[COUNTER_INDEX]),
+ op_to_u32(&args[START_BYTE_POS]),
+ op_to_u32(&args[END_BYTE_POS]),
+ );
+ return true; // Also inject the counter increment in the backend
+ }
+ sym::coverage_counter_add | sym::coverage_counter_subtract => {
+ use coverage::coverage_counter_expression_args::*;
+ self.add_counter_expression_region(
+ caller_instance,
+ op_to_u32(&args[COUNTER_EXPRESSION_INDEX]),
+ op_to_u32(&args[LEFT_INDEX]),
+ if intrinsic == sym::coverage_counter_add {
+ CounterOp::Add
+ } else {
+ CounterOp::Subtract
+ },
+ op_to_u32(&args[RIGHT_INDEX]),
+ op_to_u32(&args[START_BYTE_POS]),
+ op_to_u32(&args[END_BYTE_POS]),
+ );
+ return false; // Does not inject backend code
+ }
+ sym::coverage_unreachable => {
+ use coverage::coverage_unreachable_args::*;
+ self.add_unreachable_region(
+ caller_instance,
+ op_to_u32(&args[START_BYTE_POS]),
+ op_to_u32(&args[END_BYTE_POS]),
+ );
+ return false; // Does not inject backend code
+ }
+ _ => {}
+ }
+ } else {
+ // NOT self.tcx.sess.opts.debugging_opts.instrument_coverage
+ if intrinsic == sym::count_code_region {
+ // An external crate may have been pre-compiled with coverage instrumentation, and
+ // some references from the current crate to the external crate might carry along
+ // the call terminators to coverage intrinsics, like `count_code_region` (for
+ // example, when instantiating a generic function). If the current crate has
+ // `instrument_coverage` disabled, the `count_code_region` call terminators should
+ // be ignored.
+ return false; // Do not inject coverage counters inlined from external crates
}
- _ => true, // Unhandled intrinsics should be passed to `codegen_intrinsic_call()`
}
+ true // Unhandled intrinsics should be passed to `codegen_intrinsic_call()`
}
fn codegen_intrinsic_call(
let coverageinfo = tcx.coverageinfo(caller_instance.def_id());
let mangled_fn = tcx.symbol_name(caller_instance);
let (mangled_fn_name, _len_val) = self.const_str(Symbol::intern(mangled_fn.name));
- let hash = self.const_u64(coverageinfo.hash);
let num_counters = self.const_u32(coverageinfo.num_counters);
use coverage::count_code_region_args::*;
+ let hash = args[FUNCTION_SOURCE_HASH].immediate();
let index = args[COUNTER_INDEX].immediate();
debug!(
- "count_code_region to LLVM intrinsic instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
+ "translating Rust intrinsic `count_code_region()` to LLVM intrinsic: \
+ instrprof.increment(fn_name={}, hash={:?}, num_counters={:?}, index={:?})",
mangled_fn.name, hash, num_counters, index,
);
self.instrprof_increment(mangled_fn_name, hash, num_counters, index)
fn op_to_u32<'tcx>(op: &Operand<'tcx>) -> u32 {
Operand::scalar_from_const(op).to_u32().expect("Scalar is u32")
}
+
+fn op_to_u64<'tcx>(op: &Operand<'tcx>) -> u64 {
+ Operand::scalar_from_const(op).to_u64().expect("Scalar is u64")
+}
#![allow(non_camel_case_types)]
#![allow(non_upper_case_globals)]
+use super::coverageinfo::{SmallVectorCounterExpression, SmallVectorCounterMappingRegion};
+
use super::debuginfo::{
DIArray, DIBasicType, DIBuilder, DICompositeType, DIDerivedType, DIDescriptor, DIEnumerator,
DIFile, DIFlags, DIGlobalVariableExpression, DILexicalBlock, DINameSpace, DISPFlags, DIScope,
pub type DiagnosticHandler = unsafe extern "C" fn(&DiagnosticInfo, *mut c_void);
pub type InlineAsmDiagHandler = unsafe extern "C" fn(&SMDiagnostic, *const c_void, c_uint);
+pub mod coverageinfo {
+ use super::InvariantOpaque;
+
+ #[repr(C)]
+ pub struct SmallVectorCounterExpression<'a>(InvariantOpaque<'a>);
+
+ #[repr(C)]
+ pub struct SmallVectorCounterMappingRegion<'a>(InvariantOpaque<'a>);
+}
+
pub mod debuginfo {
use super::{InvariantOpaque, Metadata};
use bitflags::bitflags;
// Miscellaneous instructions
pub fn LLVMBuildPhi(B: &Builder<'a>, Ty: &'a Type, Name: *const c_char) -> &'a Value;
- pub fn LLVMRustGetInstrprofIncrementIntrinsic(M: &Module) -> &'a Value;
+ pub fn LLVMRustGetInstrProfIncrementIntrinsic(M: &Module) -> &'a Value;
pub fn LLVMRustBuildCall(
B: &Builder<'a>,
Fn: &'a Value,
ConstraintsLen: size_t,
) -> bool;
+ pub fn LLVMRustCoverageSmallVectorCounterExpressionCreate()
+ -> &'a mut SmallVectorCounterExpression<'a>;
+ pub fn LLVMRustCoverageSmallVectorCounterExpressionDispose(
+ Container: &'a mut SmallVectorCounterExpression<'a>,
+ );
+ pub fn LLVMRustCoverageSmallVectorCounterExpressionAdd(
+ Container: &mut SmallVectorCounterExpression<'a>,
+ Kind: rustc_codegen_ssa::coverageinfo::CounterOp,
+ LeftIndex: c_uint,
+ RightIndex: c_uint,
+ );
+
+ pub fn LLVMRustCoverageSmallVectorCounterMappingRegionCreate()
+ -> &'a mut SmallVectorCounterMappingRegion<'a>;
+ pub fn LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
+ Container: &'a mut SmallVectorCounterMappingRegion<'a>,
+ );
+ pub fn LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
+ Container: &mut SmallVectorCounterMappingRegion<'a>,
+ Index: c_uint,
+ FileID: c_uint,
+ LineStart: c_uint,
+ ColumnStart: c_uint,
+ LineEnd: c_uint,
+ ColumnEnd: c_uint,
+ );
+
+ #[allow(improper_ctypes)]
+ pub fn LLVMRustCoverageWriteFilenamesSectionToBuffer(
+ Filenames: *const *const c_char,
+ FilenamesLen: size_t,
+ BufferOut: &RustString,
+ );
+
+ #[allow(improper_ctypes)]
+ pub fn LLVMRustCoverageWriteMappingToBuffer(
+ VirtualFileMappingIDs: *const c_uint,
+ NumVirtualFileMappingIDs: c_uint,
+ Expressions: *const SmallVectorCounterExpression<'_>,
+ MappingRegions: *const SmallVectorCounterMappingRegion<'_>,
+ BufferOut: &RustString,
+ );
+
+ pub fn LLVMRustCoverageComputeHash(Name: *const c_char) -> u64;
+
+ #[allow(improper_ctypes)]
+ pub fn LLVMRustCoverageWriteSectionNameToString(M: &Module, Str: &RustString);
+
+ #[allow(improper_ctypes)]
+ pub fn LLVMRustCoverageWriteMappingVarNameToString(Str: &RustString);
+
+ pub fn LLVMRustCoverageMappingVersion() -> u32;
pub fn LLVMRustDebugMetadataVersion() -> u32;
pub fn LLVMRustVersionMajor() -> u32;
pub fn LLVMRustVersionMinor() -> u32;
use rustc_data_structures::small_c_str::SmallCStr;
use rustc_llvm::RustString;
use std::cell::RefCell;
-use std::ffi::CStr;
+use std::ffi::{CStr, CString};
use std::str::FromStr;
use std::string::FromUtf8Error;
unsafe { SectionIter { llsi: LLVMGetSections(llof) } }
}
+pub fn set_section(llglobal: &Value, section_name: &str) {
+ let section_name_cstr = CString::new(section_name).expect("unexpected CString error");
+ unsafe {
+ LLVMSetSection(llglobal, section_name_cstr.as_ptr());
+ }
+}
+
+pub fn add_global<'a>(llmod: &'a Module, ty: &'a Type, name: &str) -> &'a Value {
+ let name_cstr = CString::new(name).expect("unexpected CString error");
+ unsafe { LLVMAddGlobal(llmod, ty, name_cstr.as_ptr()) }
+}
+
+pub fn set_initializer(llglobal: &Value, constant_val: &Value) {
+ unsafe {
+ LLVMSetInitializer(llglobal, constant_val);
+ }
+}
+
+pub fn set_global_constant(llglobal: &Value, is_constant: bool) {
+ unsafe {
+ LLVMSetGlobalConstant(llglobal, if is_constant { ffi::True } else { ffi::False });
+ }
+}
+
+pub fn set_linkage(llglobal: &Value, linkage: Linkage) {
+ unsafe {
+ LLVMRustSetLinkage(llglobal, linkage);
+ }
+}
+
+pub fn set_alignment(llglobal: &Value, bytes: usize) {
+ unsafe {
+ ffi::LLVMSetAlignment(llglobal, bytes as c_uint);
+ }
+}
+
/// Safe wrapper around `LLVMGetParam`, because segfaults are no fun.
pub fn get_param(llfn: &Value, index: c_uint) -> &Value {
unsafe {
String::from_utf8(sr.bytes.into_inner())
}
+pub fn build_byte_buffer(f: impl FnOnce(&RustString)) -> Vec<u8> {
+ let sr = RustString { bytes: RefCell::new(Vec::new()) };
+ f(&sr);
+ sr.bytes.into_inner()
+}
+
pub fn twine_to_string(tr: &Twine) -> String {
unsafe {
build_string(|s| LLVMRustWriteTwineToString(tr, s)).expect("got a non-UTF8 Twine from LLVM")
libc = "0.2.50"
jobserver = "0.1.11"
tempfile = "3.1"
+pathdiff = "0.2.0"
rustc_serialize = { path = "../librustc_serialize" }
rustc_ast = { path = "../librustc_ast" }
// FIXME: Order dependent, applies to the following objects. Where should it be placed?
// Try to strip as much out of the generated object by removing unused
// sections if possible. See more comments in linker.rs
- if !sess.opts.cg.link_dead_code {
+ if sess.opts.cg.link_dead_code != Some(true) {
let keep_metadata = crate_type == CrateType::Dylib;
cmd.gc_sections(keep_metadata);
}
);
// OBJECT-FILES-NO, AUDIT-ORDER
- if sess.opts.cg.profile_generate.enabled() {
+ if sess.opts.cg.profile_generate.enabled() || sess.opts.debugging_opts.instrument_coverage {
cmd.pgo_gen();
}
+use pathdiff::diff_paths;
use rustc_data_structures::fx::FxHashSet;
use std::env;
use std::fs;
// In particular, this handles the case on unix where both paths are
// absolute but with only the root as the common directory.
fn path_relative_from(path: &Path, base: &Path) -> Option<PathBuf> {
- use std::path::Component;
-
- if path.is_absolute() != base.is_absolute() {
- path.is_absolute().then(|| PathBuf::from(path))
- } else {
- let mut ita = path.components();
- let mut itb = base.components();
- let mut comps: Vec<Component<'_>> = vec![];
- loop {
- match (ita.next(), itb.next()) {
- (None, None) => break,
- (Some(a), None) => {
- comps.push(a);
- comps.extend(ita.by_ref());
- break;
- }
- (None, _) => comps.push(Component::ParentDir),
- (Some(a), Some(b)) if comps.is_empty() && a == b => (),
- (Some(a), Some(b)) if b == Component::CurDir => comps.push(a),
- (Some(_), Some(b)) if b == Component::ParentDir => return None,
- (Some(a), Some(_)) => {
- comps.push(Component::ParentDir);
- comps.extend(itb.map(|_| Component::ParentDir));
- comps.push(a);
- comps.extend(ita.by_ref());
- break;
- }
- }
- }
- Some(comps.iter().map(|c| c.as_os_str()).collect())
- }
+ diff_paths(path, base)
}
fn get_install_prefix_rpath(config: &mut RPathConfig<'_>) -> String {
}));
}
+ if tcx.sess.opts.debugging_opts.instrument_coverage {
+ // Similar to PGO profiling, preserve symbols used by LLVM InstrProf coverage profiling.
+ const COVERAGE_WEAK_SYMBOLS: [&str; 3] =
+ ["__llvm_profile_filename", "__llvm_coverage_mapping", "__llvm_covmap"];
+
+ symbols.extend(COVERAGE_WEAK_SYMBOLS.iter().map(|sym| {
+ let exported_symbol = ExportedSymbol::NoDefId(SymbolName::new(tcx, sym));
+ (exported_symbol, SymbolExportLevel::C)
+ }));
+ }
+
if tcx.sess.opts.debugging_opts.sanitizer.contains(SanitizerSet::MEMORY) {
// Similar to profiling, preserve weak msan symbol during LTO.
const MSAN_WEAK_SYMBOLS: [&str; 2] = ["__msan_track_origins", "__msan_keep_going"];
-use rustc_data_structures::fx::FxHashMap;
-use std::collections::hash_map;
-use std::slice;
+use rustc_data_structures::sync::Lrc;
+use rustc_middle::mir;
+use rustc_span::source_map::{Pos, SourceFile, SourceMap};
+use rustc_span::{BytePos, FileName, RealFileName};
+
+use std::cmp::{Ord, Ordering};
+use std::collections::BTreeMap;
+use std::fmt;
+use std::path::PathBuf;
#[derive(Copy, Clone, Debug)]
+#[repr(C)]
pub enum CounterOp {
- Add,
+ // Note the order (and therefore the default values) is important. With the attribute
+ // `#[repr(C)]`, this enum matches the layout of the LLVM enum defined for the nested enum,
+ // `llvm::coverage::CounterExpression::ExprKind`, as shown in the following source snippet:
+ // https://github.com/rust-lang/llvm-project/blob/f208b70fbc4dee78067b3c5bd6cb92aa3ba58a1e/llvm/include/llvm/ProfileData/Coverage/CoverageMapping.h#L146
Subtract,
+ Add,
}
+#[derive(Copy, Clone, Debug)]
pub enum CoverageKind {
Counter,
CounterExpression(u32, CounterOp, u32),
+ Unreachable,
}
-pub struct CoverageSpan {
+#[derive(Clone, Debug)]
+pub struct CoverageRegion {
+ pub kind: CoverageKind,
pub start_byte_pos: u32,
pub end_byte_pos: u32,
}
-pub struct CoverageRegion {
- pub kind: CoverageKind,
- pub coverage_span: CoverageSpan,
+impl CoverageRegion {
+ pub fn source_loc(&self, source_map: &SourceMap) -> Option<(Lrc<SourceFile>, CoverageLoc)> {
+ let (start_file, start_line, start_col) =
+ lookup_file_line_col(source_map, BytePos::from_u32(self.start_byte_pos));
+ let (end_file, end_line, end_col) =
+ lookup_file_line_col(source_map, BytePos::from_u32(self.end_byte_pos));
+ let start_file_path = match &start_file.name {
+ FileName::Real(RealFileName::Named(path)) => path,
+ _ => {
+ bug!("start_file_path should be a RealFileName, but it was: {:?}", start_file.name)
+ }
+ };
+ let end_file_path = match &end_file.name {
+ FileName::Real(RealFileName::Named(path)) => path,
+ _ => bug!("end_file_path should be a RealFileName, but it was: {:?}", end_file.name),
+ };
+ if start_file_path == end_file_path {
+ Some((start_file, CoverageLoc { start_line, start_col, end_line, end_col }))
+ } else {
+ None
+ // FIXME(richkadel): There seems to be a problem computing the file location in
+ // some cases. I need to investigate this more. When I generate and show coverage
+ // for the example binary in the crates.io crate `json5format`, I had a couple of
+ // notable problems:
+ //
+ // 1. I saw a lot of coverage spans in `llvm-cov show` highlighting regions in
+ // various comments (not corresponding to rustdoc code), indicating a possible
+ // problem with the byte_pos-to-source-map implementation.
+ //
+ // 2. And (perhaps not related) when I build the aforementioned example binary with:
+ // `RUST_FLAGS="-Zinstrument-coverage" cargo build --example formatjson5`
+ // and then run that binary with
+ // `LLVM_PROFILE_FILE="formatjson5.profraw" ./target/debug/examples/formatjson5 \
+ // some.json5` for some reason the binary generates *TWO* `.profraw` files. One
+ // named `default.profraw` and the other named `formatjson5.profraw` (the expected
+ // name, in this case).
+ //
+ // If the byte range conversion is wrong, fix it. But if it
+ // is right, then it is possible for the start and end to be in different files.
+ // Can I do something other than ignore coverages that span multiple files?
+ //
+ // If I can resolve this, remove the "Option<>" result type wrapper
+ // `regions_in_file_order()` accordingly.
+ }
+ }
+}
+
+impl Default for CoverageRegion {
+ fn default() -> Self {
+ Self {
+ // The default kind (Unreachable) is a placeholder that will be overwritten before
+ // backend codegen.
+ kind: CoverageKind::Unreachable,
+ start_byte_pos: 0,
+ end_byte_pos: 0,
+ }
+ }
+}
+
+/// A source code region used with coverage information.
+#[derive(Debug, Eq, PartialEq)]
+pub struct CoverageLoc {
+ /// The (1-based) line number of the region start.
+ pub start_line: u32,
+ /// The (1-based) column number of the region start.
+ pub start_col: u32,
+ /// The (1-based) line number of the region end.
+ pub end_line: u32,
+ /// The (1-based) column number of the region end.
+ pub end_col: u32,
+}
+
+impl Ord for CoverageLoc {
+ fn cmp(&self, other: &Self) -> Ordering {
+ (self.start_line, &self.start_col, &self.end_line, &self.end_col).cmp(&(
+ other.start_line,
+ &other.start_col,
+ &other.end_line,
+ &other.end_col,
+ ))
+ }
+}
+
+impl PartialOrd for CoverageLoc {
+ fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
+ Some(self.cmp(other))
+ }
+}
+
+impl fmt::Display for CoverageLoc {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ // Customize debug format, and repeat the file name, so generated location strings are
+ // "clickable" in many IDEs.
+ write!(f, "{}:{} - {}:{}", self.start_line, self.start_col, self.end_line, self.end_col)
+ }
+}
+
+fn lookup_file_line_col(source_map: &SourceMap, byte_pos: BytePos) -> (Lrc<SourceFile>, u32, u32) {
+ let found = source_map
+ .lookup_line(byte_pos)
+ .expect("should find coverage region byte position in source");
+ let file = found.sf;
+ let line_pos = file.line_begin_pos(byte_pos);
+
+ // Use 1-based indexing.
+ let line = (found.line + 1) as u32;
+ let col = (byte_pos - line_pos).to_u32() + 1;
+
+ (file, line, col)
}
/// Collects all of the coverage regions associated with (a) injected counters, (b) counter
/// expressions (additions or subtraction), and (c) unreachable regions (always counted as zero),
/// for a given Function. Counters and counter expressions are indexed because they can be operands
-/// in an expression.
+/// in an expression. This struct also stores the `function_source_hash`, computed during
+/// instrumentation and forwarded with counters.
///
/// Note, it's important to distinguish the `unreachable` region type from what LLVM's refers to as
/// a "gap region" (or "gap area"). A gap region is a code region within a counted region (either
/// lines with only whitespace or comments). According to LLVM Code Coverage Mapping documentation,
/// "A count for a gap area is only used as the line execution count if there are no other regions
/// on a line."
-#[derive(Default)]
-pub struct FunctionCoverageRegions {
- indexed: FxHashMap<u32, CoverageRegion>,
- unreachable: Vec<CoverageSpan>,
+pub struct FunctionCoverage {
+ source_hash: u64,
+ counters: Vec<CoverageRegion>,
+ expressions: Vec<CoverageRegion>,
+ unreachable: Vec<CoverageRegion>,
+ translated: bool,
}
-impl FunctionCoverageRegions {
- pub fn add_counter(&mut self, index: u32, start_byte_pos: u32, end_byte_pos: u32) {
- self.indexed.insert(
- index,
- CoverageRegion {
- kind: CoverageKind::Counter,
- coverage_span: CoverageSpan { start_byte_pos, end_byte_pos },
- },
- );
+impl FunctionCoverage {
+ pub fn with_coverageinfo<'tcx>(coverageinfo: &'tcx mir::CoverageInfo) -> Self {
+ Self {
+ source_hash: 0, // will be set with the first `add_counter()`
+ counters: vec![CoverageRegion::default(); coverageinfo.num_counters as usize],
+ expressions: vec![CoverageRegion::default(); coverageinfo.num_expressions as usize],
+ unreachable: Vec::new(),
+ translated: false,
+ }
}
- pub fn add_counter_expression(
+ /// Adds a code region to be counted by an injected counter intrinsic. Return a counter ID
+ /// for the call.
+ pub fn add_counter(
&mut self,
+ source_hash: u64,
index: u32,
+ start_byte_pos: u32,
+ end_byte_pos: u32,
+ ) {
+ self.source_hash = source_hash;
+ self.counters[index as usize] =
+ CoverageRegion { kind: CoverageKind::Counter, start_byte_pos, end_byte_pos };
+ }
+
+ pub fn add_counter_expression(
+ &mut self,
+ translated_index: u32,
lhs: u32,
op: CounterOp,
rhs: u32,
start_byte_pos: u32,
end_byte_pos: u32,
) {
- self.indexed.insert(
- index,
- CoverageRegion {
- kind: CoverageKind::CounterExpression(lhs, op, rhs),
- coverage_span: CoverageSpan { start_byte_pos, end_byte_pos },
- },
- );
+ let index = u32::MAX - translated_index;
+ // Counter expressions start with "translated indexes", descending from `u32::MAX`, so
+ // the range of expression indexes is disjoint from the range of counter indexes. This way,
+ // both counters and expressions can be operands in other expressions.
+ //
+ // Once all counters have been added, the final "region index" for an expression is
+ // `counters.len() + expression_index` (where `expression_index` is its index in
+ // `self.expressions`), and the expression operands (`lhs` and `rhs`) can be converted to
+ // final "region index" references by the same conversion, after subtracting from
+ // `u32::MAX`.
+ self.expressions[index as usize] = CoverageRegion {
+ kind: CoverageKind::CounterExpression(lhs, op, rhs),
+ start_byte_pos,
+ end_byte_pos,
+ };
}
pub fn add_unreachable(&mut self, start_byte_pos: u32, end_byte_pos: u32) {
- self.unreachable.push(CoverageSpan { start_byte_pos, end_byte_pos });
+ self.unreachable.push(CoverageRegion {
+ kind: CoverageKind::Unreachable,
+ start_byte_pos,
+ end_byte_pos,
+ });
+ }
+
+ pub fn source_hash(&self) -> u64 {
+ self.source_hash
+ }
+
+ fn regions(&'a mut self) -> impl Iterator<Item = &'a CoverageRegion> {
+ assert!(self.source_hash != 0);
+ self.ensure_expressions_translated();
+ self.counters.iter().chain(self.expressions.iter().chain(self.unreachable.iter()))
}
- pub fn indexed_regions(&self) -> hash_map::Iter<'_, u32, CoverageRegion> {
- self.indexed.iter()
+ pub fn regions_in_file_order(
+ &'a mut self,
+ source_map: &SourceMap,
+ ) -> BTreeMap<PathBuf, BTreeMap<CoverageLoc, (usize, CoverageKind)>> {
+ let mut regions_in_file_order = BTreeMap::new();
+ for (region_id, region) in self.regions().enumerate() {
+ if let Some((source_file, region_loc)) = region.source_loc(source_map) {
+ // FIXME(richkadel): `region.source_loc()` sometimes fails with two different
+ // filenames for the start and end byte position. This seems wrong, but for
+ // now, if encountered, the region is skipped. If resolved, convert the result
+ // to a non-option value so regions are never skipped.
+ let real_file_path = match &(*source_file).name {
+ FileName::Real(RealFileName::Named(path)) => path.clone(),
+ _ => bug!("coverage mapping expected only real, named files"),
+ };
+ let file_coverage_regions =
+ regions_in_file_order.entry(real_file_path).or_insert_with(|| BTreeMap::new());
+ file_coverage_regions.insert(region_loc, (region_id, region.kind));
+ }
+ }
+ regions_in_file_order
}
- pub fn unreachable_regions(&self) -> slice::Iter<'_, CoverageSpan> {
- self.unreachable.iter()
+ /// A one-time translation of expression operands is needed, for any operands referencing
+ /// other CounterExpressions. CounterExpression operands get an initial operand ID that is
+ /// computed by the simple translation: `u32::max - expression_index` because, when created,
+ /// the total number of Counters is not yet known. This function recomputes region indexes
+ /// for expressions so they start with the next region index after the last counter index.
+ fn ensure_expressions_translated(&mut self) {
+ if !self.translated {
+ self.translated = true;
+ let start = self.counters.len() as u32;
+ assert!(
+ (start as u64 + self.expressions.len() as u64) < u32::MAX as u64,
+ "the number of counters and counter expressions in a single function exceeds {}",
+ u32::MAX
+ );
+ for region in self.expressions.iter_mut() {
+ match region.kind {
+ CoverageKind::CounterExpression(lhs, op, rhs) => {
+ let lhs = to_region_index(start, lhs);
+ let rhs = to_region_index(start, rhs);
+ region.kind = CoverageKind::CounterExpression(lhs, op, rhs);
+ }
+ _ => bug!("expressions must only contain CounterExpression kinds"),
+ }
+ }
+ }
}
}
+
+fn to_region_index(start: u32, index: u32) -> u32 {
+ if index < start { index } else { start + (u32::MAX - index) }
+}
fn add_counter_region(
&mut self,
instance: Instance<'tcx>,
+ function_source_hash: u64,
index: u32,
start_byte_pos: u32,
end_byte_pos: u32,
pub trait StaticMethods: BackendTypes {
fn static_addr_of(&self, cv: Self::Value, align: Align, kind: Option<&str>) -> Self::Value;
fn codegen_static(&self, def_id: DefId, is_mutable: bool);
+
+ /// Mark the given global value as "used", to prevent a backend from potentially removing a
+ /// static variable that may otherwise appear unused.
+ ///
+ /// Static variables in Rust can be annotated with the `#[used]` attribute to direct the `rustc`
+ /// compiler to mark the variable as a "used global".
+ ///
+ /// ```no_run
+ /// #[used]
+ /// static FOO: u32 = 0;
+ /// ```
+ fn add_used_global(&self, global: Self::Value);
}
pub trait StaticBuilderMethods: BackendTypes {
use crate::stable_hasher::{HashStable, StableHasher};
use rustc_index::vec::{Idx, IndexVec};
-/// An indexed multi-map that preserves insertion order while permitting both `O(log n)` lookup of
-/// an item by key and `O(1)` lookup by index.
+/// An indexed multi-map that preserves insertion order while permitting both *O*(log *n*) lookup of
+/// an item by key and *O*(1) lookup by index.
///
/// This data structure is a hybrid of an [`IndexVec`] and a [`SortedMap`]. Like `IndexVec`,
/// `SortedIndexMultiMap` assigns a typed index to each item while preserving insertion order.
/// items will be yielded in insertion order.
///
/// Unlike a general-purpose map like `BTreeSet` or `HashSet`, `SortedMap` and
-/// `SortedIndexMultiMap` require `O(n)` time to insert a single item. This is because we may need
+/// `SortedIndexMultiMap` require *O*(*n*) time to insert a single item. This is because we may need
/// to insert into the middle of the sorted array. Users should avoid mutating this data structure
/// in-place.
///
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(nll)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![recursion_limit = "256"]
#[macro_use]
/// Exit status code used for compilation failures and invalid flags.
pub const EXIT_FAILURE: i32 = 1;
-const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.\
- md#bug-reports";
+const BUG_REPORT_URL: &str = "https://github.com/rust-lang/rust/issues/new\
+ ?labels=C-bug%2C+I-ICE%2C+T-compiler&template=ice.md";
const ICE_REPORT_COMPILER_FLAGS: &[&str] = &["Z", "C", "crate-type"];
PpmTyped => {
abort_on_err(tcx.analysis(LOCAL_CRATE), tcx.sess);
- let annotation = TypedAnnotation { tcx, maybe_typeck_tables: Cell::new(None) };
+ let annotation = TypedAnnotation { tcx, maybe_typeck_results: Cell::new(None) };
tcx.dep_graph.with_ignore(|| f(&annotation, tcx.hir().krate()))
}
_ => panic!("Should use call_with_pp_support"),
struct TypedAnnotation<'tcx> {
tcx: TyCtxt<'tcx>,
- maybe_typeck_tables: Cell<Option<&'tcx ty::TypeckTables<'tcx>>>,
+ maybe_typeck_results: Cell<Option<&'tcx ty::TypeckResults<'tcx>>>,
}
impl<'tcx> TypedAnnotation<'tcx> {
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables.get().expect("`TypedAnnotation::tables` called outside of body")
+ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results
+ .get()
+ .expect("`TypedAnnotation::typeck_results` called outside of body")
}
}
impl<'tcx> pprust_hir::PpAnn for TypedAnnotation<'tcx> {
fn nested(&self, state: &mut pprust_hir::State<'_>, nested: pprust_hir::Nested) {
- let old_maybe_typeck_tables = self.maybe_typeck_tables.get();
+ let old_maybe_typeck_results = self.maybe_typeck_results.get();
if let pprust_hir::Nested::Body(id) = nested {
- self.maybe_typeck_tables.set(Some(self.tcx.body_tables(id)));
+ self.maybe_typeck_results.set(Some(self.tcx.typeck_body(id)));
}
let pp_ann = &(&self.tcx.hir() as &dyn hir::intravisit::Map<'_>);
pprust_hir::PpAnn::nested(pp_ann, state, nested);
- self.maybe_typeck_tables.set(old_maybe_typeck_tables);
+ self.maybe_typeck_results.set(old_maybe_typeck_results);
}
fn pre(&self, s: &mut pprust_hir::State<'_>, node: pprust_hir::AnnNode<'_>) {
if let pprust_hir::AnnNode::Expr(_) = node {
s.s.space();
s.s.word("as");
s.s.space();
- s.s.word(self.tables().expr_ty(expr).to_string());
+ s.s.word(self.typeck_results().expr_ty(expr).to_string());
s.pclose();
}
}
E0767: include_str!("./error_codes/E0767.md"),
E0768: include_str!("./error_codes/E0768.md"),
E0769: include_str!("./error_codes/E0769.md"),
+E0770: include_str!("./error_codes/E0770.md"),
+E0771: include_str!("./error_codes/E0771.md"),
;
// E0006, // merged with E0005
// E0008, // cannot bind by-move into a pattern guard
// E0420, merged into 532
// E0421, merged into 531
// E0427, merged into 530
- E0456, // plugin `..` is not available for triple `..`
+// E0456, // plugin `..` is not available for triple `..`
E0457, // plugin `..` only found in rlib format, but must be available...
E0460, // found possibly newer version of crate `..`
E0461, // couldn't find crate `..` with expected target triple ..
Const parameters cannot depend on type parameters.
The following is therefore invalid:
-```compile_fail,E0741
+```compile_fail,E0770
#![feature(const_generics)]
fn const_id<T, const N: T>() -> T { // error
-A `#![feature]` attribute was declared for a feature that is stable in
-the current edition, but not in all editions.
+A `#![feature]` attribute was declared for a feature that is stable in the
+current edition, but not in all editions.
Erroneous code example:
-An unknown tool name found in scoped lint
+An unknown tool name was found in a scoped lint.
Erroneous code examples:
impl Marker for OverrideConst { // error!
const N: usize = 1;
}
-
-fn main() {}
+# fn main() {}
```
Because marker traits are allowed to have multiple implementations for the same
-This error indicates that a temporary value is being dropped
-while a borrow is still in active use.
+A temporary value is being dropped while a borrow is still in active use.
Erroneous code example:
let q = *p;
```
-Here, the expression `&foo()` is borrowing the expression
-`foo()`. As `foo()` is a call to a function, and not the name of
-a variable, this creates a **temporary** -- that temporary stores
-the return value from `foo()` so that it can be borrowed.
-You could imagine that `let p = bar(&foo());` is equivalent
-to this:
+Here, the expression `&foo()` is borrowing the expression `foo()`. As `foo()` is
+a call to a function, and not the name of a variable, this creates a
+**temporary** -- that temporary stores the return value from `foo()` so that it
+can be borrowed. You could imagine that `let p = bar(&foo());` is equivalent to
+this:
```compile_fail,E0597
# fn foo() -> i32 { 22 }
let q = p;
```
-Whenever a temporary is created, it is automatically dropped (freed)
-according to fixed rules. Ordinarily, the temporary is dropped
-at the end of the enclosing statement -- in this case, after the `let`.
-This is illustrated in the example above by showing that `tmp` would
-be freed as we exit the block.
+Whenever a temporary is created, it is automatically dropped (freed) according
+to fixed rules. Ordinarily, the temporary is dropped at the end of the enclosing
+statement -- in this case, after the `let`. This is illustrated in the example
+above by showing that `tmp` would be freed as we exit the block.
-To fix this problem, you need to create a local variable
-to store the value in rather than relying on a temporary.
-For example, you might change the original program to
-the following:
+To fix this problem, you need to create a local variable to store the value in
+rather than relying on a temporary. For example, you might change the original
+program to the following:
```
fn foo() -> i32 { 22 }
let q = *p;
```
-By introducing the explicit `let value`, we allocate storage
-that will last until the end of the enclosing block (when `value`
-goes out of scope). When we borrow `&value`, we are borrowing a
-local variable that already exists, and hence no temporary is created.
+By introducing the explicit `let value`, we allocate storage that will last
+until the end of the enclosing block (when `value` goes out of scope). When we
+borrow `&value`, we are borrowing a local variable that already exists, and
+hence no temporary is created.
-Temporaries are not always dropped at the end of the enclosing
-statement. In simple cases where the `&` expression is immediately
-stored into a variable, the compiler will automatically extend
-the lifetime of the temporary until the end of the enclosing
-block. Therefore, an alternative way to fix the original
+Temporaries are not always dropped at the end of the enclosing statement. In
+simple cases where the `&` expression is immediately stored into a variable, the
+compiler will automatically extend the lifetime of the temporary until the end
+of the enclosing block. Therefore, an alternative way to fix the original
program is to write `let tmp = &foo()` and not `let tmp = foo()`:
```
let q = *p;
```
-Here, we are still borrowing `foo()`, but as the borrow is assigned
-directly into a variable, the temporary will not be dropped until
-the end of the enclosing block. Similar rules apply when temporaries
-are stored into aggregate structures like a tuple or struct:
+Here, we are still borrowing `foo()`, but as the borrow is assigned directly
+into a variable, the temporary will not be dropped until the end of the
+enclosing block. Similar rules apply when temporaries are stored into aggregate
+structures like a tuple or struct:
```
// Here, two temporaries are created, but
-An feature unstable in `const` contexts was used.
+An unstable feature in `const` contexts was used.
Erroneous code example:
--- /dev/null
+The type of a const parameter references other generic parameters.
+
+Erroneous code example:
+
+```compile_fail,E0770
+#![feature(const_generics)]
+fn foo<T, const N: T>() {} // error!
+```
+
+To fix this error, use a concrete type for the const parameter:
+
+```
+#![feature(const_generics)]
+fn foo<T, const N: usize>() {}
+```
--- /dev/null
+A non-`'static` lifetime was used in a const generic. This is currently not
+allowed.
+
+Erroneous code example:
+
+```compile_fail,E0771
+#![feature(const_generics)]
+
+fn function_with_str<'a, const STRING: &'a str>() {} // error!
+```
+
+To fix this issue, the lifetime in the const generic need to be changed to
+`'static`:
+
+```
+#![feature(const_generics)]
+
+fn function_with_str<const STRING: &'static str>() {} // ok!
+```
+
+For more information, see [GitHub issue #74052].
+
+[GitHub issue #74052]: https://github.com/rust-lang/rust/issues/74052
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(crate_visibility_modifier)]
#![feature(nll)]
-#![cfg_attr(bootstrap, feature(track_caller))]
pub use emitter::ColorConfig;
pub fn set_trace_macros(&mut self, x: bool) {
self.ecfg.trace_mac = x
}
- pub fn ident_of(&self, st: &str, sp: Span) -> Ident {
- Ident::from_str_and_span(st, sp)
- }
pub fn std_path(&self, components: &[Symbol]) -> Vec<Ident> {
let def_site = self.with_def_site_ctxt(DUMMY_SP);
iter::once(Ident::new(kw::DollarCrate, def_site))
let err = self.std_path(&[sym::result, sym::Result, sym::Err]);
let err_path = self.path_global(sp, err);
- let binding_variable = self.ident_of("__try_var", sp);
+ let binding_variable = Ident::new(sym::__try_var, sp);
let binding_pat = self.pat_ident(sp, binding_variable);
let binding_expr = self.expr_ident(sp, binding_variable);
/// Allows `#[doc(masked)]`.
(active, doc_masked, "1.21.0", Some(44027), None),
+ /// Allows `#[doc(spotlight)]`.
+ (active, doc_spotlight, "1.22.0", Some(45040), None),
+
/// Allows `#[doc(include = "some-file")]`.
(active, external_doc, "1.22.0", Some(44732), None),
--- /dev/null
+//! Validity checking for fake lang items
+
+use crate::def_id::DefId;
+use crate::{lang_items, LangItem, LanguageItems};
+
+use rustc_data_structures::fx::FxHashMap;
+use rustc_span::symbol::{sym, Symbol};
+
+use lazy_static::lazy_static;
+
+macro_rules! fake_lang_items {
+ ($($item:ident, $name:ident, $method:ident;)*) => (
+
+lazy_static! {
+ pub static ref FAKE_ITEMS_REFS: FxHashMap<Symbol, LangItem> = {
+ let mut map = FxHashMap::default();
+ $(map.insert(sym::$name, lang_items::$item);)*
+ map
+ };
+}
+
+impl LanguageItems {
+ pub fn is_fake_lang_item(&self, item_def_id: DefId) -> bool {
+ let did = Some(item_def_id);
+
+ $(self.$method() == did)||*
+ }
+}
+
+) }
+
+fake_lang_items! {
+// Variant name, Symbol, Method name,
+ CountCodeRegionFnLangItem, count_code_region, count_code_region_fn;
+ CoverageCounterAddFnLangItem, coverage_counter_add, coverage_counter_add_fn;
+ CoverageCounterSubtractFnLangItem, coverage_counter_subtract, coverage_counter_subtract_fn;
+}
/// To resolve the called method to a `DefId`, call [`type_dependent_def_id`] with
/// the `hir_id` of the `MethodCall` node itself.
///
- /// [`type_dependent_def_id`]: ../ty/struct.TypeckTables.html#method.type_dependent_def_id
+ /// [`type_dependent_def_id`]: ../ty/struct.TypeckResults.html#method.type_dependent_def_id
MethodCall(&'hir PathSegment<'hir>, Span, &'hir [Expr<'hir>], Span),
/// A tuple (e.g., `(a, b, c, d)`).
Tup(&'hir [Expr<'hir>]),
///
/// To resolve the path to a `DefId`, call [`qpath_res`].
///
-/// [`qpath_res`]: ../rustc_middle/ty/struct.TypeckTables.html#method.qpath_res
+/// [`qpath_res`]: ../rustc_middle/ty/struct.TypeckResults.html#method.qpath_res
#[derive(RustcEncodable, RustcDecodable, Debug, HashStable_Generic)]
pub enum QPath<'hir> {
/// Path to a definition, optionally "fully-qualified" with a `Self`
Crate(&'hir CrateItem<'hir>),
}
-impl Node<'_> {
+impl<'hir> Node<'hir> {
pub fn ident(&self) -> Option<Ident> {
match self {
Node::TraitItem(TraitItem { ident, .. })
}
}
- pub fn fn_decl(&self) -> Option<&FnDecl<'_>> {
+ pub fn fn_decl(&self) -> Option<&FnDecl<'hir>> {
match self {
Node::TraitItem(TraitItem { kind: TraitItemKind::Fn(fn_sig, _), .. })
| Node::ImplItem(ImplItem { kind: ImplItemKind::Fn(fn_sig, _), .. })
}
}
- pub fn generics(&self) -> Option<&Generics<'_>> {
+ pub fn generics(&self) -> Option<&'hir Generics<'hir>> {
match self {
Node::TraitItem(TraitItem { generics, .. })
| Node::ImplItem(ImplItem { generics, .. }) => Some(generics),
CloneTraitLangItem, sym::clone, clone_trait, Target::Trait;
SyncTraitLangItem, sym::sync, sync_trait, Target::Trait;
DiscriminantKindTraitLangItem, sym::discriminant_kind, discriminant_kind_trait, Target::Trait;
+ // The associated item of `trait DiscriminantKind`.
+ DiscriminantTypeLangItem, sym::discriminant_type, discriminant_type, Target::AssocTy;
+
FreezeTraitLangItem, sym::freeze, freeze_trait, Target::Trait;
DropTraitLangItem, sym::drop, drop_trait, Target::Trait;
StartFnLangItem, sym::start, start_fn, Target::Fn;
- CountCodeRegionFnLangItem, sym::count_code_region, count_code_region_fn, Target::Fn;
-
EhPersonalityLangItem, sym::eh_personality, eh_personality, Target::Fn;
EhCatchTypeinfoLangItem, sym::eh_catch_typeinfo, eh_catch_typeinfo, Target::Static;
TerminationTraitLangItem, sym::termination, termination, Target::Trait;
TryTraitLangItem, kw::Try, try_trait, Target::Trait;
+
+ // language items related to source code coverage instrumentation (-Zinstrument-coverage)
+ CountCodeRegionFnLangItem, sym::count_code_region, count_code_region_fn, Target::Fn;
+ CoverageCounterAddFnLangItem, sym::coverage_counter_add, coverage_counter_add_fn, Target::Fn;
+ CoverageCounterSubtractFnLangItem, sym::coverage_counter_subtract, coverage_counter_subtract_fn, Target::Fn;
}
//! [rustc dev guide]: https://rustc-dev-guide.rust-lang.org/hir.html
#![feature(crate_visibility_modifier)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_fn)] // For the unsizing cast on `&[]`
#![feature(const_panic)]
#![feature(in_band_lifetimes)]
pub mod def;
pub mod definitions;
pub use rustc_span::def_id;
+pub mod fake_lang_items;
mod hir;
pub mod hir_id;
pub mod intravisit;
//! we will compare the fingerprint from the current and from the previous
//! compilation session as appropriate:
//!
-//! - `#[rustc_clean(cfg="rev2", except="typeck_tables_of")]` if we are
+//! - `#[rustc_clean(cfg="rev2", except="typeck")]` if we are
//! in `#[cfg(rev2)]`, then the fingerprints associated with
-//! `DepNode::typeck_tables_of(X)` must be DIFFERENT (`X` is the `DefId` of the
+//! `DepNode::typeck(X)` must be DIFFERENT (`X` is the `DefId` of the
//! current node).
//! - `#[rustc_clean(cfg="rev2")]` same as above, except that the
//! fingerprints must be the SAME (along with all other fingerprints).
label_strs::type_of,
// And a big part of compilation (that we eventually want to cache) is type inference
// information:
- label_strs::typeck_tables_of,
+ label_strs::typeck,
];
/// DepNodes for Hir, which is pretty much everything
}
/// Returns those indices that are true in rows `a` and `b`. This
- /// is an O(n) operation where `n` is the number of elements
+ /// is an *O*(*n*) operation where *n* is the number of elements
/// (somewhat independent from the actual size of the
/// intersection, in particular).
pub fn intersect_rows(&self, row1: R, row2: R) -> Vec<C> {
#![feature(allow_internal_unstable)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_fn)]
#![feature(const_panic)]
#![feature(extend_one)]
let scrut_expr = self.tcx.hir().expect_expr(scrut_hir_id);
let scrut_ty = if let hir::ExprKind::Call(_, args) = &scrut_expr.kind {
let arg_expr = args.first().expect("try desugaring call w/out arg");
- self.in_progress_tables
- .and_then(|tables| tables.borrow().expr_ty_opt(arg_expr))
+ self.in_progress_typeck_results.and_then(|typeck_results| {
+ typeck_results.borrow().expr_ty_opt(arg_expr)
+ })
} else {
bug!("try desugaring w/out call expr as scrutinee");
};
let hir = &self.tcx.hir();
// Attempt to obtain the span of the parameter so we can
// suggest adding an explicit lifetime bound to it.
- let generics =
- self.in_progress_tables.map(|table| table.borrow().hir_owner).map(|table_owner| {
- let hir_id = hir.as_local_hir_id(table_owner);
+ let generics = self
+ .in_progress_typeck_results
+ .map(|typeck_results| typeck_results.borrow().hir_owner)
+ .map(|owner| {
+ let hir_id = hir.as_local_hir_id(owner);
let parent_id = hir.get_parent_item(hir_id);
(
// Parent item could be a `mod`, so we check the HIR before calling:
} else {
None
},
- self.tcx.generics_of(table_owner.to_def_id()),
+ self.tcx.generics_of(owner.to_def_id()),
)
});
let type_param_span = match (generics, bound_kind) {
}
fn node_ty_contains_target(&mut self, hir_id: HirId) -> Option<Ty<'tcx>> {
- let ty_opt =
- self.infcx.in_progress_tables.and_then(|tables| tables.borrow().node_type_opt(hir_id));
+ let ty_opt = self
+ .infcx
+ .in_progress_typeck_results
+ .and_then(|typeck_results| typeck_results.borrow().node_type_opt(hir_id));
match ty_opt {
Some(ty) => {
let ty = self.infcx.resolve_vars_if_possible(&ty);
if let ExprKind::MethodCall(_, call_span, exprs, _) = expr.kind {
if call_span == self.target_span
&& Some(self.target)
- == self.infcx.in_progress_tables.and_then(|tables| {
- tables.borrow().node_type_opt(exprs.first().unwrap().hir_id).map(Into::into)
+ == self.infcx.in_progress_typeck_results.and_then(|typeck_results| {
+ typeck_results
+ .borrow()
+ .node_type_opt(exprs.first().unwrap().hir_id)
+ .map(Into::into)
})
{
self.found_exact_method_call = Some(&expr);
e: &Expr<'_>,
err: &mut DiagnosticBuilder<'_>,
) {
- if let (Some(tables), None) = (self.in_progress_tables, &segment.args) {
- let borrow = tables.borrow();
+ if let (Some(typeck_results), None) = (self.in_progress_typeck_results, &segment.args) {
+ let borrow = typeck_results.borrow();
if let Some((DefKind::AssocFn, did)) = borrow.type_dependent_def(e.hir_id) {
let generics = self.tcx.generics_of(did);
if !generics.params.is_empty() {
pub struct InferCtxt<'a, 'tcx> {
pub tcx: TyCtxt<'tcx>,
- /// During type-checking/inference of a body, `in_progress_tables`
- /// contains a reference to the tables being built up, which are
+ /// During type-checking/inference of a body, `in_progress_typeck_results`
+ /// contains a reference to the typeck results being built up, which are
/// used for reading closure kinds/signatures as they are inferred,
/// and for error reporting logic to read arbitrary node types.
- pub in_progress_tables: Option<&'a RefCell<ty::TypeckTables<'tcx>>>,
+ pub in_progress_typeck_results: Option<&'a RefCell<ty::TypeckResults<'tcx>>>,
pub inner: RefCell<InferCtxtInner<'tcx>>,
/// `F: for<'b, 'tcx> where 'tcx FnOnce(InferCtxt<'b, 'tcx>)`.
pub struct InferCtxtBuilder<'tcx> {
tcx: TyCtxt<'tcx>,
- fresh_tables: Option<RefCell<ty::TypeckTables<'tcx>>>,
+ fresh_typeck_results: Option<RefCell<ty::TypeckResults<'tcx>>>,
}
pub trait TyCtxtInferExt<'tcx> {
impl TyCtxtInferExt<'tcx> for TyCtxt<'tcx> {
fn infer_ctxt(self) -> InferCtxtBuilder<'tcx> {
- InferCtxtBuilder { tcx: self, fresh_tables: None }
+ InferCtxtBuilder { tcx: self, fresh_typeck_results: None }
}
}
impl<'tcx> InferCtxtBuilder<'tcx> {
/// Used only by `rustc_typeck` during body type-checking/inference,
- /// will initialize `in_progress_tables` with fresh `TypeckTables`.
- pub fn with_fresh_in_progress_tables(mut self, table_owner: LocalDefId) -> Self {
- self.fresh_tables = Some(RefCell::new(ty::TypeckTables::new(table_owner)));
+ /// will initialize `in_progress_typeck_results` with fresh `TypeckResults`.
+ pub fn with_fresh_in_progress_typeck_results(mut self, table_owner: LocalDefId) -> Self {
+ self.fresh_typeck_results = Some(RefCell::new(ty::TypeckResults::new(table_owner)));
self
}
}
pub fn enter<R>(&mut self, f: impl for<'a> FnOnce(InferCtxt<'a, 'tcx>) -> R) -> R {
- let InferCtxtBuilder { tcx, ref fresh_tables } = *self;
- let in_progress_tables = fresh_tables.as_ref();
+ let InferCtxtBuilder { tcx, ref fresh_typeck_results } = *self;
+ let in_progress_typeck_results = fresh_typeck_results.as_ref();
f(InferCtxt {
tcx,
- in_progress_tables,
+ in_progress_typeck_results,
inner: RefCell::new(InferCtxtInner::new()),
lexical_region_resolutions: RefCell::new(None),
selection_cache: Default::default(),
region_constraints_snapshot: RegionSnapshot,
universe: ty::UniverseIndex,
was_in_snapshot: bool,
- _in_progress_tables: Option<Ref<'a, ty::TypeckTables<'tcx>>>,
+ _in_progress_typeck_results: Option<Ref<'a, ty::TypeckResults<'tcx>>>,
}
impl<'a, 'tcx> InferCtxt<'a, 'tcx> {
region_constraints_snapshot: inner.unwrap_region_constraints().start_snapshot(),
universe: self.universe(),
was_in_snapshot: in_snapshot,
- // Borrow tables "in progress" (i.e., during typeck)
+ // Borrow typeck results "in progress" (i.e., during typeck)
// to ban writes from within a snapshot to them.
- _in_progress_tables: self.in_progress_tables.map(|tables| tables.borrow()),
+ _in_progress_typeck_results: self
+ .in_progress_typeck_results
+ .map(|typeck_results| typeck_results.borrow()),
}
}
region_constraints_snapshot,
universe,
was_in_snapshot,
- _in_progress_tables,
+ _in_progress_typeck_results,
} = snapshot;
self.in_snapshot.set(was_in_snapshot);
region_constraints_snapshot: _,
universe: _,
was_in_snapshot,
- _in_progress_tables,
+ _in_progress_typeck_results,
} = snapshot;
self.in_snapshot.set(was_in_snapshot);
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(const_fn)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_panic)]
#![feature(extend_one)]
#![feature(never_type)]
pub mod util;
pub use interface::{run_compiler, Config};
+pub use passes::{DEFAULT_EXTERN_QUERY_PROVIDERS, DEFAULT_QUERY_PROVIDERS};
pub use queries::Queries;
#[cfg(test)]
use crate::util;
use log::{info, log_enabled, warn};
+use once_cell::sync::Lazy;
use rustc_ast::mut_visit::MutVisitor;
use rustc_ast::{self, ast, visit};
use rustc_codegen_ssa::back::link::emit_metadata;
use rustc_middle::dep_graph::DepGraph;
use rustc_middle::middle;
use rustc_middle::middle::cstore::{CrateStore, MetadataLoader, MetadataLoaderDyn};
+use rustc_middle::ty::query::Providers;
use rustc_middle::ty::steal::Steal;
use rustc_middle::ty::{self, GlobalCtxt, ResolverOutputs, TyCtxt};
use rustc_mir as mir;
)
});
- // If we're actually rustdoc then there's no need to actually compile
- // anything, so switch everything to just looping
- let mut should_loop = sess.opts.actually_rustdoc;
if let Some(PpMode::PpmSource(PpSourceMode::PpmEveryBodyLoops)) = sess.opts.pretty {
- should_loop |= true;
- }
- if should_loop {
log::debug!("replacing bodies with loop {{}}");
util::ReplaceBodyWithLoop::new(&mut resolver).visit_crate(&mut krate);
}
Ok(outputs)
}
-pub fn default_provide(providers: &mut ty::query::Providers) {
+pub static DEFAULT_QUERY_PROVIDERS: Lazy<Providers> = Lazy::new(|| {
+ let providers = &mut Providers::default();
providers.analysis = analysis;
proc_macro_decls::provide(providers);
plugin::build::provide(providers);
rustc_lint::provide(providers);
rustc_symbol_mangling::provide(providers);
rustc_codegen_ssa::provide(providers);
-}
+ *providers
+});
-pub fn default_provide_extern(providers: &mut ty::query::Providers) {
- rustc_metadata::provide_extern(providers);
- rustc_codegen_ssa::provide_extern(providers);
-}
+pub static DEFAULT_EXTERN_QUERY_PROVIDERS: Lazy<Providers> = Lazy::new(|| {
+ let mut extern_providers = *DEFAULT_QUERY_PROVIDERS;
+ rustc_metadata::provide_extern(&mut extern_providers);
+ rustc_codegen_ssa::provide_extern(&mut extern_providers);
+ extern_providers
+});
pub struct QueryContext<'tcx>(&'tcx GlobalCtxt<'tcx>);
let query_result_on_disk_cache = rustc_incremental::load_query_result_cache(sess);
let codegen_backend = compiler.codegen_backend();
- let mut local_providers = ty::query::Providers::default();
- default_provide(&mut local_providers);
+ let mut local_providers = *DEFAULT_QUERY_PROVIDERS;
codegen_backend.provide(&mut local_providers);
- let mut extern_providers = local_providers;
- default_provide_extern(&mut extern_providers);
+ let mut extern_providers = *DEFAULT_EXTERN_QUERY_PROVIDERS;
+ codegen_backend.provide(&mut extern_providers);
codegen_backend.provide_extern(&mut extern_providers);
if let Some(callback) = compiler.override_queries {
use rustc_span::symbol::sym;
use std::any::Any;
use std::cell::{Ref, RefCell, RefMut};
-use std::mem;
use std::rc::Rc;
/// Represent the result of a query.
ret
}
-
- // This method is different to all the other methods in `Compiler` because
- // it lacks a `Queries` entry. It's also not currently used. It does serve
- // as an example of how `Compiler` can be used, with additional steps added
- // between some passes. And see `rustc_driver::run_compiler` for a more
- // complex example.
- pub fn compile(&self) -> Result<()> {
- let linker = self.enter(|queries| {
- queries.prepare_outputs()?;
-
- if self.session().opts.output_types.contains_key(&OutputType::DepInfo)
- && self.session().opts.output_types.len() == 1
- {
- return Ok(None);
- }
-
- queries.global_ctxt()?;
-
- // Drop AST after creating GlobalCtxt to free memory.
- mem::drop(queries.expansion()?.take());
-
- queries.ongoing_codegen()?;
-
- let linker = queries.linker()?;
- Ok(Some(linker))
- })?;
-
- if let Some(linker) = linker {
- linker.link()?
- }
-
- Ok(())
- }
}
untracked!(incremental, Some(String::from("abc")));
// `link_arg` is omitted because it just forwards to `link_args`.
untracked!(link_args, vec![String::from("abc"), String::from("def")]);
- untracked!(link_dead_code, true);
+ untracked!(link_dead_code, Some(true));
untracked!(linker, Some(PathBuf::from("linker")));
untracked!(linker_flavor, Some(LinkerFlavor::Gcc));
untracked!(no_stack_check, true);
static mut LOAD: fn() -> Box<dyn CodegenBackend> = || unreachable!();
INIT.call_once(|| {
- let codegen_name = sess
- .opts
- .debugging_opts
- .codegen_backend
- .as_ref()
- .unwrap_or(&sess.target.target.options.codegen_backend);
- let backend = match &codegen_name[..] {
+ let codegen_name = sess.opts.debugging_opts.codegen_backend.as_deref().unwrap_or("llvm");
+ let backend = match codegen_name {
filename if filename.contains('.') => load_backend_from_dylib(filename.as_ref()),
codegen_name => get_builtin_codegen_backend(codegen_name),
};
// Check if the method call actually calls the libcore
// `IntoIterator::into_iter`.
- let def_id = cx.tables().type_dependent_def_id(expr.hir_id).unwrap();
+ let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
match cx.tcx.trait_of_item(def_id) {
Some(trait_id) if cx.tcx.is_diagnostic_item(sym::IntoIterator, trait_id) => {}
_ => return,
// `Box` is the only thing that values can be moved out of via
// method call. `Box::new([1]).into_iter()` should trigger this
// lint.
- let mut recv_ty = cx.tables().expr_ty(receiver_arg);
+ let mut recv_ty = cx.typeck_results().expr_ty(receiver_arg);
let mut num_box_derefs = 0;
while recv_ty.is_box() {
num_box_derefs += 1;
// Make sure that there is an autoref coercion at the expected
// position. The first `num_box_derefs` adjustments are the derefs
// of the box.
- match cx.tables().expr_adjustments(receiver_arg).get(num_box_derefs) {
+ match cx.typeck_results().expr_adjustments(receiver_arg).get(num_box_derefs) {
Some(Adjustment { kind: Adjust::Borrow(_), .. }) => {}
_ => return,
}
// Emit lint diagnostic.
- let target = match cx.tables().expr_ty_adjusted(receiver_arg).kind {
+ let target = match cx.typeck_results().expr_ty_adjusted(receiver_arg).kind {
ty::Ref(_, ty::TyS { kind: ty::Array(..), .. }, _) => "[T; N]",
ty::Ref(_, ty::TyS { kind: ty::Slice(..), .. }, _) => "[T]",
}
fn check_expr(&mut self, cx: &LateContext<'_>, e: &hir::Expr<'_>) {
- let ty = cx.tables().node_type(e.hir_id);
+ let ty = cx.typeck_results().node_type(e.hir_id);
self.check_heap_type(cx, e.span, ty);
}
}
fn check_pat(&mut self, cx: &LateContext<'_>, pat: &hir::Pat<'_>) {
if let PatKind::Struct(ref qpath, field_pats, _) = pat.kind {
let variant = cx
- .tables()
+ .typeck_results()
.pat_ty(pat)
.ty_adt_def()
.expect("struct pattern type is not an ADT")
}
if let PatKind::Binding(binding_annot, _, ident, None) = fieldpat.pat.kind {
if cx.tcx.find_field_index(ident, &variant)
- == Some(cx.tcx.field_index(fieldpat.hir_id, cx.tables()))
+ == Some(cx.tcx.field_index(fieldpat.hir_id, cx.typeck_results()))
{
cx.struct_span_lint(NON_SHORTHAND_FIELD_PATTERNS, fieldpat.span, |lint| {
let mut err = lint
if !def_id_is_transmute(cx, did) {
return None;
}
- let sig = cx.tables().node_type(expr.hir_id).fn_sig(cx.tcx);
+ let sig = cx.typeck_results().node_type(expr.hir_id).fn_sig(cx.tcx);
let from = sig.inputs().skip_binder()[0];
let to = sig.output().skip_binder();
return Some((from, to));
}
} else if let hir::ExprKind::MethodCall(_, _, ref args, _) = expr.kind {
// Find problematic calls to `MaybeUninit::assume_init`.
- let def_id = cx.tables().type_dependent_def_id(expr.hir_id)?;
+ let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id)?;
if cx.tcx.is_diagnostic_item(sym::assume_init, def_id) {
// This is a call to *some* method named `assume_init`.
// See if the `self` parameter is one of the dangerous constructors.
None
}
+ /// Test if this enum has several actually "existing" variants.
+ /// Zero-sized uninhabited variants do not always have a tag assigned and thus do not "exist".
+ fn is_multi_variant(adt: &ty::AdtDef) -> bool {
+ // As an approximation, we only count dataless variants. Those are definitely inhabited.
+ let existing_variants = adt.variants.iter().filter(|v| v.fields.is_empty()).count();
+ existing_variants > 1
+ }
+
/// Return `Some` only if we are sure this type does *not*
/// allow zero initialization.
fn ty_find_init_error<'tcx>(
}
// Recurse and checks for some compound types.
Adt(adt_def, substs) if !adt_def.is_union() => {
- // First check f this ADT has a layout attribute (like `NonNull` and friends).
+ // First check if this ADT has a layout attribute (like `NonNull` and friends).
use std::ops::Bound;
match tcx.layout_scalar_valid_range(adt_def.did) {
// We exploit here that `layout_scalar_valid_range` will never
)
})
}
- // Multi-variant enums are tricky: if all but one variant are
- // uninhabited, we might actually do layout like for a single-variant
- // enum, and then even leaving them uninitialized could be okay.
- _ => None, // Conservative fallback for multi-variant enum.
+ // Multi-variant enum.
+ _ => {
+ if init == InitKind::Uninit && is_multi_variant(adt_def) {
+ let span = tcx.def_span(adt_def.did);
+ Some((
+ "enums have to be initialized to a variant".to_string(),
+ Some(span),
+ ))
+ } else {
+ // In principle, for zero-initialization we could figure out which variant corresponds
+ // to tag 0, and check that... but for now we just accept all zero-initializations.
+ None
+ }
+ }
}
}
Tuple(..) => {
// This conjures an instance of a type out of nothing,
// using zeroed or uninitialized memory.
// We are extremely conservative with what we warn about.
- let conjured_ty = cx.tables().expr_ty(expr);
+ let conjured_ty = cx.typeck_results().expr_ty(expr);
if let Some((msg, span)) = ty_find_init_error(cx.tcx, conjured_ty, init) {
cx.struct_span_lint(INVALID_VALUE, expr.span, |lint| {
let mut err = lint.build(&format!(
/// Current body, or `None` if outside a body.
pub enclosing_body: Option<hir::BodyId>,
- /// Type-checking side-tables for the current body. Access using the `tables`
- /// and `maybe_tables` methods, which handle querying the tables on demand.
+ /// Type-checking results for the current body. Access using the `typeck_results`
+ /// and `maybe_typeck_results` methods, which handle querying the typeck results on demand.
// FIXME(eddyb) move all the code accessing internal fields like this,
// to this module, to avoid exposing it to lint logic.
- pub(super) cached_typeck_tables: Cell<Option<&'tcx ty::TypeckTables<'tcx>>>,
+ pub(super) cached_typeck_results: Cell<Option<&'tcx ty::TypeckResults<'tcx>>>,
/// Parameter environment for the item we are in.
pub param_env: ty::ParamEnv<'tcx>,
}
impl<'tcx> LateContext<'tcx> {
- /// Gets the type-checking side-tables for the current body,
+ /// Gets the type-checking results for the current body,
/// or `None` if outside a body.
- pub fn maybe_typeck_tables(&self) -> Option<&'tcx ty::TypeckTables<'tcx>> {
- self.cached_typeck_tables.get().or_else(|| {
+ pub fn maybe_typeck_results(&self) -> Option<&'tcx ty::TypeckResults<'tcx>> {
+ self.cached_typeck_results.get().or_else(|| {
self.enclosing_body.map(|body| {
- let tables = self.tcx.body_tables(body);
- self.cached_typeck_tables.set(Some(tables));
- tables
+ let typeck_results = self.tcx.typeck_body(body);
+ self.cached_typeck_results.set(Some(typeck_results));
+ typeck_results
})
})
}
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- pub fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables().expect("`LateContext::tables` called outside of body")
+ pub fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results().expect("`LateContext::typeck_results` called outside of body")
}
/// Returns the final resolution of a `QPath`, or `Res::Err` if unavailable.
- /// Unlike `.tables().qpath_res(qpath, id)`, this can be used even outside
+ /// Unlike `.typeck_results().qpath_res(qpath, id)`, this can be used even outside
/// bodies (e.g. for paths in `hir::Ty`), without any risk of ICE-ing.
pub fn qpath_res(&self, qpath: &hir::QPath<'_>, id: hir::HirId) -> Res {
match *qpath {
hir::QPath::Resolved(_, ref path) => path.res,
hir::QPath::TypeRelative(..) => self
- .maybe_typeck_tables()
- .and_then(|tables| tables.type_dependent_def(id))
+ .maybe_typeck_results()
+ .and_then(|typeck_results| typeck_results.type_dependent_def(id))
.map_or(Res::Err, |(kind, def_id)| Res::Def(kind, def_id)),
}
}
fn visit_nested_body(&mut self, body_id: hir::BodyId) {
let old_enclosing_body = self.context.enclosing_body.replace(body_id);
- let old_cached_typeck_tables = self.context.cached_typeck_tables.get();
+ let old_cached_typeck_results = self.context.cached_typeck_results.get();
- // HACK(eddyb) avoid trashing `cached_typeck_tables` when we're
+ // HACK(eddyb) avoid trashing `cached_typeck_results` when we're
// nested in `visit_fn`, which may have already resulted in them
// being queried.
if old_enclosing_body != Some(body_id) {
- self.context.cached_typeck_tables.set(None);
+ self.context.cached_typeck_results.set(None);
}
let body = self.context.tcx.hir().body(body_id);
// See HACK comment above.
if old_enclosing_body != Some(body_id) {
- self.context.cached_typeck_tables.set(old_cached_typeck_tables);
+ self.context.cached_typeck_results.set(old_cached_typeck_results);
}
}
span: Span,
id: hir::HirId,
) {
- // Wrap in tables here, not just in visit_nested_body,
+ // Wrap in typeck results here, not just in visit_nested_body,
// in order for `check_fn` to be able to use them.
let old_enclosing_body = self.context.enclosing_body.replace(body_id);
- let old_cached_typeck_tables = self.context.cached_typeck_tables.take();
+ let old_cached_typeck_results = self.context.cached_typeck_results.take();
let body = self.context.tcx.hir().body(body_id);
lint_callback!(self, check_fn, fk, decl, body, span, id);
hir_visit::walk_fn(self, fk, decl, body_id, span, id);
lint_callback!(self, check_fn_post, fk, decl, body, span, id);
self.context.enclosing_body = old_enclosing_body;
- self.context.cached_typeck_tables.set(old_cached_typeck_tables);
+ self.context.cached_typeck_results.set(old_cached_typeck_results);
}
fn visit_variant_data(
let context = LateContext {
tcx,
enclosing_body: None,
- cached_typeck_tables: Cell::new(None),
+ cached_typeck_results: Cell::new(None),
param_env: ty::ParamEnv::empty(),
access_levels,
lint_store: unerased_lint_store(tcx),
let context = LateContext {
tcx,
enclosing_body: None,
- cached_typeck_tables: Cell::new(None),
+ cached_typeck_results: Cell::new(None),
param_env: ty::ParamEnv::empty(),
access_levels,
lint_store: unerased_lint_store(tcx),
#![feature(never_type)]
#![feature(nll)]
#![feature(or_patterns)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![recursion_limit = "256"]
#[macro_use]
repr_str, val, t, actually, t
));
if let Some(sugg_ty) =
- get_type_suggestion(&cx.tables().node_type(expr.hir_id), val, negative)
+ get_type_suggestion(&cx.typeck_results().node_type(expr.hir_id), val, negative)
{
if let Some(pos) = repr_str.chars().position(|c| c == 'i' || c == 'u') {
let (sans_suffix, _) = repr_str.split_at(pos);
if let Node::Expr(par_e) = cx.tcx.hir().get(parent_id) {
match par_e.kind {
hir::ExprKind::Cast(..) => {
- if let ty::Char = cx.tables().expr_ty(par_e).kind {
+ if let ty::Char = cx.typeck_results().expr_ty(par_e).kind {
cx.struct_span_lint(OVERFLOWING_LITERALS, par_e.span, |lint| {
lint.build("only `u8` can be cast into `char`")
.span_suggestion(
e: &'tcx hir::Expr<'tcx>,
lit: &hir::Lit,
) {
- match cx.tables().node_type(e.hir_id).kind {
+ match cx.typeck_results().node_type(e.hir_id).kind {
ty::Int(t) => {
match lit.node {
ast::LitKind::Int(v, ast::LitIntType::Signed(_) | ast::LitIntType::Unsuffixed) => {
// Normalize the binop so that the literal is always on the RHS in
// the comparison
let norm_binop = if swap { rev_binop(binop) } else { binop };
- match cx.tables().node_type(expr.hir_id).kind {
+ match cx.typeck_results().node_type(expr.hir_id).kind {
ty::Int(int_ty) => {
let (min, max) = int_ty_range(int_ty);
let lit_val: i128 = match lit.kind {
match ty.kind {
ty::FnPtr(_) => true,
ty::Ref(..) => true,
+ ty::Adt(def, _)
+ if def.is_box() && matches!(self.mode, ImproperCTypesMode::Definitions) =>
+ {
+ true
+ }
ty::Adt(def, substs) if def.repr.transparent() && !def.is_union() => {
let guaranteed_nonnull_optimization = self
.cx
}
/// Check if this enum can be safely exported based on the "nullable pointer optimization".
- /// Currently restricted to function pointers, references, `core::num::NonZero*`,
+ /// Currently restricted to function pointers, boxes, references, `core::num::NonZero*`,
/// `core::ptr::NonNull`, and `#[repr(transparent)]` newtypes.
fn is_repr_nullable_ptr(
&self,
}
match ty.kind {
+ ty::Adt(def, _)
+ if def.is_box() && matches!(self.mode, ImproperCTypesMode::Definitions) =>
+ {
+ FfiSafe
+ }
+
ty::Adt(def, substs) => {
if def.is_phantom_data() {
return FfiPhantom(ty);
return;
}
- let ty = cx.tables().expr_ty(&expr);
+ let ty = cx.typeck_results().expr_ty(&expr);
let type_permits_lack_of_use = check_must_use_ty(cx, ty, &expr, s.span, "", "", 1);
let mut fn_warned = false;
_ => None,
}
}
- hir::ExprKind::MethodCall(..) => cx.tables().type_dependent_def_id(expr.hir_id),
+ hir::ExprKind::MethodCall(..) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
_ => None,
};
if let Some(def_id) = maybe_def_id {
_ => return,
}
- for adj in cx.tables().expr_adjustments(e) {
+ for adj in cx.typeck_results().expr_adjustments(e) {
if let adjustment::Adjust::Borrow(adjustment::AutoBorrow::Ref(_, m)) = adj.kind {
cx.struct_span_lint(UNUSED_ALLOCATION, e.span, |lint| {
let msg = match m {
optional_components.push("riscv");
}
- let required_components =
- &["ipo", "bitreader", "bitwriter", "linker", "asmparser", "lto", "instrumentation"];
+ let required_components = &[
+ "ipo",
+ "bitreader",
+ "bitwriter",
+ "linker",
+ "asmparser",
+ "lto",
+ "coverage",
+ "instrumentation",
+ ];
let components = output(Command::new(&llvm_config).arg("--components"));
let mut components = components.split_whitespace().collect::<Vec<_>>();
cfg.file("../rustllvm/PassWrapper.cpp")
.file("../rustllvm/RustWrapper.cpp")
.file("../rustllvm/ArchiveWrapper.cpp")
+ .file("../rustllvm/CoverageMappingWrapper.cpp")
.file("../rustllvm/Linker.cpp")
.cpp(true)
.cpp_link_stdlib(None) // we handle this below
"stdc++"
};
+ // RISC-V requires libatomic for sub-word atomic operations
+ if target.starts_with("riscv") {
+ println!("cargo:rustc-link-lib=atomic");
+ }
+
// C++ runtime library
if !target.contains("msvc") {
if let Some(s) = llvm_static_stdcpp {
pub bytes: RefCell<Vec<u8>>,
}
+impl RustString {
+ pub fn len(&self) -> usize {
+ self.bytes.borrow().len()
+ }
+}
+
/// Appending to a Rust string -- used by RawRustStringOstream.
#[no_mangle]
-#[cfg_attr(not(bootstrap), allow(improper_ctypes_definitions))]
+#[allow(improper_ctypes_definitions)]
pub unsafe extern "C" fn LLVMRustStringWriteImpl(
sr: &RustString,
ptr: *const c_char,
let mut prefill_stream = quote! {};
let mut counter = 0u32;
let mut keys = HashSet::<String>::new();
+ let mut prev_key: Option<String> = None;
+ let mut errors = Vec::<String>::new();
- let mut check_dup = |str: &str| {
+ let mut check_dup = |str: &str, errors: &mut Vec<String>| {
if !keys.insert(str.to_string()) {
- panic!("Symbol `{}` is duplicated", str);
+ errors.push(format!("Symbol `{}` is duplicated", str));
}
};
+ let mut check_order = |str: &str, errors: &mut Vec<String>| {
+ if let Some(ref prev_str) = prev_key {
+ if str < prev_str {
+ errors.push(format!("Symbol `{}` must precede `{}`", str, prev_str));
+ }
+ }
+ prev_key = Some(str.to_string());
+ };
+
// Generate the listed keywords.
for keyword in &input.keywords.0 {
let name = &keyword.name;
let value = &keyword.value;
- check_dup(&value.value());
+ check_dup(&value.value(), &mut errors);
prefill_stream.extend(quote! {
#value,
});
Some(value) => value.value(),
None => name.to_string(),
};
- check_dup(&value);
+ check_dup(&value, &mut errors);
+ check_order(&name.to_string(), &mut errors);
prefill_stream.extend(quote! {
#value,
});
// Generate symbols for the strings "0", "1", ..., "9".
for n in 0..10 {
let n = n.to_string();
- check_dup(&n);
+ check_dup(&n, &mut errors);
prefill_stream.extend(quote! {
#n,
});
counter += 1;
}
+ if !errors.is_empty() {
+ for error in errors.into_iter() {
+ eprintln!("error: {}", error)
+ }
+ panic!("errors in `Keywords` and/or `Symbols`");
+ }
+
let tt = TokenStream::from(quote! {
macro_rules! keywords {
() => {
//! Validates all used crates and extern libraries and loads their metadata
-use crate::locator::{CrateLocator, CratePaths};
+use crate::dynamic_lib::DynamicLibrary;
+use crate::locator::{CrateError, CrateLocator, CratePaths};
use crate::rmeta::{CrateDep, CrateMetadata, CrateNumMap, CrateRoot, MetadataBlob};
use rustc_ast::expand::allocator::{global_allocator_spans, AllocatorKind};
use rustc_data_structures::fx::FxHashSet;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::Lrc;
-use rustc_errors::struct_span_err;
use rustc_expand::base::SyntaxExtension;
use rustc_hir::def_id::{CrateNum, LocalDefId, LOCAL_CRATE};
use rustc_hir::definitions::Definitions;
use rustc_index::vec::IndexVec;
-use rustc_middle::middle::cstore::DepKind;
-use rustc_middle::middle::cstore::{
- CrateSource, ExternCrate, ExternCrateSource, MetadataLoaderDyn,
-};
+use rustc_middle::middle::cstore::{CrateSource, DepKind, ExternCrate};
+use rustc_middle::middle::cstore::{ExternCrateSource, MetadataLoaderDyn};
use rustc_middle::ty::TyCtxt;
use rustc_session::config::{self, CrateType, ExternLocation};
use rustc_session::lint;
use log::{debug, info, log_enabled};
use proc_macro::bridge::client::ProcMacro;
use std::path::Path;
-use std::{cmp, fs};
+use std::{cmp, env, fs};
#[derive(Clone)]
pub struct CStore {
Loaded(Library),
}
-enum LoadError<'a> {
- LocatorError(CrateLocator<'a>),
-}
-
-impl<'a> LoadError<'a> {
- fn report(self) -> ! {
- match self {
- LoadError::LocatorError(locator) => locator.report_errs(),
- }
- }
-}
-
/// A reference to `CrateMetadata` that can also give access to whole crate store when necessary.
#[derive(Clone, Copy)]
crate struct CrateMetadataRef<'a> {
ret
}
- fn verify_no_symbol_conflicts(&self, span: Span, root: &CrateRoot<'_>) {
+ fn verify_no_symbol_conflicts(&self, root: &CrateRoot<'_>) -> Result<(), CrateError> {
// Check for (potential) conflicts with the local crate
if self.local_crate_name == root.name()
&& self.sess.local_crate_disambiguator() == root.disambiguator()
{
- struct_span_err!(
- self.sess,
- span,
- E0519,
- "the current crate is indistinguishable from one of its \
- dependencies: it has the same crate-name `{}` and was \
- compiled with the same `-C metadata` arguments. This \
- will result in symbol conflicts between the two.",
- root.name()
- )
- .emit()
+ return Err(CrateError::SymbolConflictsCurrent(root.name()));
}
// Check for conflicts with any crate loaded so far
+ let mut res = Ok(());
self.cstore.iter_crate_data(|_, other| {
if other.name() == root.name() && // same crate-name
- other.disambiguator() == root.disambiguator() && // same crate-disambiguator
+ other.disambiguator() == root.disambiguator() && // same crate-disambiguator
other.hash() != root.hash()
{
// but different SVH
- struct_span_err!(
- self.sess,
- span,
- E0523,
- "found two different crates with name `{}` that are \
- not distinguished by differing `-C metadata`. This \
- will result in symbol conflicts between the two.",
- root.name()
- )
- .emit();
+ res = Err(CrateError::SymbolConflictsOthers(root.name()));
}
});
+
+ res
}
fn register_crate(
&mut self,
host_lib: Option<Library>,
root: Option<&CratePaths>,
- span: Span,
lib: Library,
dep_kind: DepKind,
name: Symbol,
- ) -> CrateNum {
+ ) -> Result<CrateNum, CrateError> {
let _prof_timer = self.sess.prof.generic_activity("metadata_register_crate");
let Library { source, metadata } = lib;
let crate_root = metadata.get_root();
let host_hash = host_lib.as_ref().map(|lib| lib.metadata.get_root().hash());
- self.verify_no_symbol_conflicts(span, &crate_root);
+ self.verify_no_symbol_conflicts(&crate_root)?;
let private_dep =
self.sess.opts.externs.get(&name.as_str()).map(|e| e.is_private_dep).unwrap_or(false);
&crate_paths
};
- let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, span, dep_kind);
+ let cnum_map = self.resolve_crate_deps(root, &crate_root, &metadata, cnum, dep_kind)?;
let raw_proc_macros = if crate_root.is_proc_macro_crate() {
let temp_root;
None => (&source, &crate_root),
};
let dlsym_dylib = dlsym_source.dylib.as_ref().expect("no dylib for a proc-macro crate");
- Some(self.dlsym_proc_macros(&dlsym_dylib.0, dlsym_root.disambiguator(), span))
+ Some(self.dlsym_proc_macros(&dlsym_dylib.0, dlsym_root.disambiguator())?)
} else {
None
};
),
);
- cnum
+ Ok(cnum)
}
fn load_proc_macro<'b>(
&self,
locator: &mut CrateLocator<'b>,
path_kind: PathKind,
- ) -> Option<(LoadResult, Option<Library>)>
+ ) -> Result<Option<(LoadResult, Option<Library>)>, CrateError>
where
'a: 'b,
{
let (locator, target_result) = if self.sess.opts.debugging_opts.dual_proc_macros {
proc_macro_locator.reset();
let result = match self.load(&mut proc_macro_locator)? {
- LoadResult::Previous(cnum) => return Some((LoadResult::Previous(cnum), None)),
- LoadResult::Loaded(library) => Some(LoadResult::Loaded(library)),
+ Some(LoadResult::Previous(cnum)) => {
+ return Ok(Some((LoadResult::Previous(cnum), None)));
+ }
+ Some(LoadResult::Loaded(library)) => Some(LoadResult::Loaded(library)),
+ None => return Ok(None),
};
locator.hash = locator.host_hash;
// Use the locator when looking for the host proc macro crate, as that is required
locator.triple = TargetTriple::from_triple(config::host_triple());
locator.filesearch = self.sess.host_filesearch(path_kind);
- let host_result = self.load(locator)?;
+ let host_result = match self.load(locator)? {
+ Some(host_result) => host_result,
+ None => return Ok(None),
+ };
- Some(if self.sess.opts.debugging_opts.dual_proc_macros {
+ Ok(Some(if self.sess.opts.debugging_opts.dual_proc_macros {
let host_result = match host_result {
LoadResult::Previous(..) => {
panic!("host and target proc macros must be loaded in lock-step")
(target_result.unwrap(), Some(host_result))
} else {
(host_result, None)
- })
+ }))
}
fn resolve_crate<'b>(
if dep.is_none() {
self.used_extern_options.insert(name);
}
- if !name.as_str().is_ascii() {
- self.sess
- .struct_span_err(
- span,
- &format!("cannot load a crate with a non-ascii name `{}`", name,),
- )
- .emit();
- }
- self.maybe_resolve_crate(name, span, dep_kind, dep).unwrap_or_else(|err| err.report())
+ self.maybe_resolve_crate(name, dep_kind, dep)
+ .unwrap_or_else(|err| err.report(self.sess, span))
}
fn maybe_resolve_crate<'b>(
&'b mut self,
name: Symbol,
- span: Span,
mut dep_kind: DepKind,
dep: Option<(&'b CratePaths, &'b CrateDep)>,
- ) -> Result<CrateNum, LoadError<'b>> {
+ ) -> Result<CrateNum, CrateError> {
info!("resolving crate `{}`", name);
+ if !name.as_str().is_ascii() {
+ return Err(CrateError::NonAsciiName(name));
+ }
let (root, hash, host_hash, extra_filename, path_kind) = match dep {
Some((root, dep)) => (
Some(root),
extra_filename,
false, // is_host
path_kind,
- span,
root,
Some(false), // is_proc_macro
);
- self.load(&mut locator)
- .map(|r| (r, None))
- .or_else(|| {
+ match self.load(&mut locator)? {
+ Some(res) => (res, None),
+ None => {
dep_kind = DepKind::MacrosOnly;
- self.load_proc_macro(&mut locator, path_kind)
- })
- .ok_or_else(move || LoadError::LocatorError(locator))?
+ match self.load_proc_macro(&mut locator, path_kind)? {
+ Some(res) => res,
+ None => return Err(locator.into_error()),
+ }
+ }
+ }
};
match result {
Ok(cnum)
}
(LoadResult::Loaded(library), host_library) => {
- Ok(self.register_crate(host_library, root, span, library, dep_kind, name))
+ self.register_crate(host_library, root, library, dep_kind, name)
}
_ => panic!(),
}
}
- fn load(&self, locator: &mut CrateLocator<'_>) -> Option<LoadResult> {
- let library = locator.maybe_load_library_crate()?;
+ fn load(&self, locator: &mut CrateLocator<'_>) -> Result<Option<LoadResult>, CrateError> {
+ let library = match locator.maybe_load_library_crate()? {
+ Some(library) => library,
+ None => return Ok(None),
+ };
// In the case that we're loading a crate, but not matching
// against a hash, we could load a crate which has the same hash
// don't want to match a host crate against an equivalent target one
// already loaded.
let root = library.metadata.get_root();
- if locator.triple == self.sess.opts.target_triple {
+ Ok(Some(if locator.triple == self.sess.opts.target_triple {
let mut result = LoadResult::Loaded(library);
self.cstore.iter_crate_data(|cnum, data| {
if data.name() == root.name() && root.hash() == data.hash() {
result = LoadResult::Previous(cnum);
}
});
- Some(result)
+ result
} else {
- Some(LoadResult::Loaded(library))
- }
+ LoadResult::Loaded(library)
+ }))
}
fn update_extern_crate(&self, cnum: CrateNum, extern_crate: ExternCrate) {
crate_root: &CrateRoot<'_>,
metadata: &MetadataBlob,
krate: CrateNum,
- span: Span,
dep_kind: DepKind,
- ) -> CrateNumMap {
+ ) -> Result<CrateNumMap, CrateError> {
debug!("resolving deps of external crate");
if crate_root.is_proc_macro_crate() {
- return CrateNumMap::new();
+ return Ok(CrateNumMap::new());
}
// The map from crate numbers in the crate we're resolving to local crate numbers.
// We map 0 and all other holes in the map to our parent crate. The "additional"
// self-dependencies should be harmless.
- std::iter::once(krate)
- .chain(crate_root.decode_crate_deps(metadata).map(|dep| {
- info!(
- "resolving dep crate {} hash: `{}` extra filename: `{}`",
- dep.name, dep.hash, dep.extra_filename
- );
- let dep_kind = match dep_kind {
- DepKind::MacrosOnly => DepKind::MacrosOnly,
- _ => dep.kind,
- };
- self.resolve_crate(dep.name, span, dep_kind, Some((root, &dep)))
- }))
- .collect()
+ let deps = crate_root.decode_crate_deps(metadata);
+ let mut crate_num_map = CrateNumMap::with_capacity(1 + deps.len());
+ crate_num_map.push(krate);
+ for dep in deps {
+ info!(
+ "resolving dep crate {} hash: `{}` extra filename: `{}`",
+ dep.name, dep.hash, dep.extra_filename
+ );
+ let dep_kind = match dep_kind {
+ DepKind::MacrosOnly => DepKind::MacrosOnly,
+ _ => dep.kind,
+ };
+ let cnum = self.maybe_resolve_crate(dep.name, dep_kind, Some((root, &dep)))?;
+ crate_num_map.push(cnum);
+ }
+ Ok(crate_num_map)
}
fn dlsym_proc_macros(
&self,
path: &Path,
disambiguator: CrateDisambiguator,
- span: Span,
- ) -> &'static [ProcMacro] {
- use crate::dynamic_lib::DynamicLibrary;
- use std::env;
-
+ ) -> Result<&'static [ProcMacro], CrateError> {
// Make sure the path contains a / or the linker will search for it.
let path = env::current_dir().unwrap().join(path);
let lib = match DynamicLibrary::open(&path) {
Ok(lib) => lib,
- Err(err) => self.sess.span_fatal(span, &err),
+ Err(s) => return Err(CrateError::DlOpen(s)),
};
let sym = self.sess.generate_proc_macro_decls_symbol(disambiguator);
let decls = unsafe {
let sym = match lib.symbol(&sym) {
Ok(f) => f,
- Err(err) => self.sess.span_fatal(span, &err),
+ Err(s) => return Err(CrateError::DlSym(s)),
};
*(sym as *const &[ProcMacro])
};
// since the library can make things that will live arbitrarily long.
std::mem::forget(lib);
- decls
+ Ok(decls)
}
fn inject_panic_runtime(&mut self, krate: &ast::Crate) {
cnum
}
- pub fn maybe_process_path_extern(&mut self, name: Symbol, span: Span) -> Option<CrateNum> {
- self.maybe_resolve_crate(name, span, DepKind::Explicit, None).ok()
+ pub fn maybe_process_path_extern(&mut self, name: Symbol) -> Option<CrateNum> {
+ self.maybe_resolve_crate(name, DepKind::Explicit, None).ok()
}
}
use crate::rmeta::{rustc_version, MetadataBlob, METADATA_HEADER};
use rustc_data_structures::fx::{FxHashMap, FxHashSet};
+use rustc_data_structures::owning_ref::OwningRef;
use rustc_data_structures::svh::Svh;
use rustc_data_structures::sync::MetadataRef;
-use rustc_errors::{struct_span_err, DiagnosticBuilder};
+use rustc_errors::struct_span_err;
use rustc_middle::middle::cstore::{CrateSource, MetadataLoader};
use rustc_session::config::{self, CrateType};
use rustc_session::filesearch::{FileDoesntMatch, FileMatches, FileSearch};
use rustc_span::Span;
use rustc_target::spec::{Target, TargetTriple};
-use std::cmp;
-use std::fmt;
-use std::fs;
-use std::io::{self, Read};
-use std::ops::Deref;
-use std::path::{Path, PathBuf};
-use std::time::Instant;
-
use flate2::read::DeflateDecoder;
-
-use rustc_data_structures::owning_ref::OwningRef;
-
use log::{debug, info, warn};
-
-#[derive(Clone)]
-struct CrateMismatch {
- path: PathBuf,
- got: String,
-}
+use std::io::{Read, Result as IoResult, Write};
+use std::ops::Deref;
+use std::path::{Path, PathBuf};
+use std::{cmp, fmt, fs};
#[derive(Clone)]
crate struct CrateLocator<'a> {
pub target: &'a Target,
pub triple: TargetTriple,
pub filesearch: FileSearch<'a>,
- span: Span,
root: Option<&'a CratePaths>,
pub is_proc_macro: Option<bool>,
rejected_via_filename: Vec<CrateMismatch>,
}
+#[derive(Clone)]
crate struct CratePaths {
name: Symbol,
source: CrateSource,
}
#[derive(Copy, Clone, PartialEq)]
-enum CrateFlavor {
+crate enum CrateFlavor {
Rlib,
Rmeta,
Dylib,
extra_filename: Option<&'a str>,
is_host: bool,
path_kind: PathKind,
- span: Span,
root: Option<&'a CratePaths>,
is_proc_macro: Option<bool>,
) -> CrateLocator<'a> {
} else {
sess.target_filesearch(path_kind)
},
- span,
root,
is_proc_macro,
rejected_via_hash: Vec::new(),
self.rejected_via_filename.clear();
}
- crate fn maybe_load_library_crate(&mut self) -> Option<Library> {
+ crate fn maybe_load_library_crate(&mut self) -> Result<Option<Library>, CrateError> {
if !self.exact_paths.is_empty() {
return self.find_commandline_library();
}
let mut seen_paths = FxHashSet::default();
- match self.extra_filename {
- Some(s) => self
- .find_library_crate(s, &mut seen_paths)
- .or_else(|| self.find_library_crate("", &mut seen_paths)),
- None => self.find_library_crate("", &mut seen_paths),
- }
- }
-
- crate fn report_errs(self) -> ! {
- let add = match self.root {
- None => String::new(),
- Some(r) => format!(" which `{}` depends on", r.name),
- };
- let mut msg = "the following crate versions were found:".to_string();
- let mut err = if !self.rejected_via_hash.is_empty() {
- let mut err = struct_span_err!(
- self.sess,
- self.span,
- E0460,
- "found possibly newer version of crate `{}`{}",
- self.crate_name,
- add
- );
- err.note("perhaps that crate needs to be recompiled?");
- let mismatches = self.rejected_via_hash.iter();
- for &CrateMismatch { ref path, .. } in mismatches {
- msg.push_str(&format!("\ncrate `{}`: {}", self.crate_name, path.display()));
- }
- match self.root {
- None => {}
- Some(r) => {
- for path in r.source.paths() {
- msg.push_str(&format!("\ncrate `{}`: {}", r.name, path.display()));
- }
- }
- }
- err.note(&msg);
- err
- } else if !self.rejected_via_triple.is_empty() {
- let mut err = struct_span_err!(
- self.sess,
- self.span,
- E0461,
- "couldn't find crate `{}` \
- with expected target triple {}{}",
- self.crate_name,
- self.triple,
- add
- );
- let mismatches = self.rejected_via_triple.iter();
- for &CrateMismatch { ref path, ref got } in mismatches {
- msg.push_str(&format!(
- "\ncrate `{}`, target triple {}: {}",
- self.crate_name,
- got,
- path.display()
- ));
- }
- err.note(&msg);
- err
- } else if !self.rejected_via_kind.is_empty() {
- let mut err = struct_span_err!(
- self.sess,
- self.span,
- E0462,
- "found staticlib `{}` instead of rlib or dylib{}",
- self.crate_name,
- add
- );
- err.help("please recompile that crate using --crate-type lib");
- let mismatches = self.rejected_via_kind.iter();
- for &CrateMismatch { ref path, .. } in mismatches {
- msg.push_str(&format!("\ncrate `{}`: {}", self.crate_name, path.display()));
- }
- err.note(&msg);
- err
- } else if !self.rejected_via_version.is_empty() {
- let mut err = struct_span_err!(
- self.sess,
- self.span,
- E0514,
- "found crate `{}` compiled by an incompatible version \
- of rustc{}",
- self.crate_name,
- add
- );
- err.help(&format!(
- "please recompile that crate using this compiler ({})",
- rustc_version()
- ));
- let mismatches = self.rejected_via_version.iter();
- for &CrateMismatch { ref path, ref got } in mismatches {
- msg.push_str(&format!(
- "\ncrate `{}` compiled by {}: {}",
- self.crate_name,
- got,
- path.display()
- ));
- }
- err.note(&msg);
- err
- } else {
- let mut err = struct_span_err!(
- self.sess,
- self.span,
- E0463,
- "can't find crate for `{}`{}",
- self.crate_name,
- add
- );
-
- if (self.crate_name == sym::std || self.crate_name == sym::core)
- && self.triple != TargetTriple::from_triple(config::host_triple())
- {
- err.note(&format!("the `{}` target may not be installed", self.triple));
- } else if self.crate_name == sym::profiler_builtins {
- err.note(&"the compiler may have been built without the profiler runtime");
- }
- err.span_label(self.span, "can't find crate");
- err
- };
-
- if !self.rejected_via_filename.is_empty() {
- let dylibname = self.dylibname();
- let mismatches = self.rejected_via_filename.iter();
- for &CrateMismatch { ref path, .. } in mismatches {
- err.note(&format!(
- "extern location for {} is of an unknown type: {}",
- self.crate_name,
- path.display()
- ))
- .help(&format!(
- "file name should be lib*.rlib or {}*.{}",
- dylibname.0, dylibname.1
- ));
+ if let Some(extra_filename) = self.extra_filename {
+ if let library @ Some(_) = self.find_library_crate(extra_filename, &mut seen_paths)? {
+ return Ok(library);
}
}
-
- err.emit();
- self.sess.abort_if_errors();
- unreachable!();
+ self.find_library_crate("", &mut seen_paths)
}
fn find_library_crate(
&mut self,
extra_prefix: &str,
seen_paths: &mut FxHashSet<PathBuf>,
- ) -> Option<Library> {
- let dypair = self.dylibname();
- let staticpair = self.staticlibname();
-
+ ) -> Result<Option<Library>, CrateError> {
// want: crate_name.dir_part() + prefix + crate_name.file_part + "-"
- let dylib_prefix = format!("{}{}{}", dypair.0, self.crate_name, extra_prefix);
+ let dylib_prefix =
+ format!("{}{}{}", self.target.options.dll_prefix, self.crate_name, extra_prefix);
let rlib_prefix = format!("lib{}{}", self.crate_name, extra_prefix);
- let staticlib_prefix = format!("{}{}{}", staticpair.0, self.crate_name, extra_prefix);
+ let staticlib_prefix =
+ format!("{}{}{}", self.target.options.staticlib_prefix, self.crate_name, extra_prefix);
let mut candidates: FxHashMap<_, (FxHashMap<_, _>, FxHashMap<_, _>, FxHashMap<_, _>)> =
Default::default();
(&file[(rlib_prefix.len())..(file.len() - ".rlib".len())], CrateFlavor::Rlib)
} else if file.starts_with(&rlib_prefix) && file.ends_with(".rmeta") {
(&file[(rlib_prefix.len())..(file.len() - ".rmeta".len())], CrateFlavor::Rmeta)
- } else if file.starts_with(&dylib_prefix) && file.ends_with(&dypair.1) {
- (&file[(dylib_prefix.len())..(file.len() - dypair.1.len())], CrateFlavor::Dylib)
+ } else if file.starts_with(&dylib_prefix)
+ && file.ends_with(&self.target.options.dll_suffix)
+ {
+ (
+ &file
+ [(dylib_prefix.len())..(file.len() - self.target.options.dll_suffix.len())],
+ CrateFlavor::Dylib,
+ )
} else {
- if file.starts_with(&staticlib_prefix) && file.ends_with(&staticpair.1) {
+ if file.starts_with(&staticlib_prefix)
+ && file.ends_with(&self.target.options.staticlib_suffix)
+ {
staticlibs
.push(CrateMismatch { path: spf.path.clone(), got: "static".to_string() });
}
info!("lib candidate: {}", spf.path.display());
- let hash_str = hash.to_string();
- let slot = candidates.entry(hash_str).or_default();
- let (ref mut rlibs, ref mut rmetas, ref mut dylibs) = *slot;
+ let (rlibs, rmetas, dylibs) = candidates.entry(hash.to_string()).or_default();
fs::canonicalize(&spf.path)
.map(|p| {
if seen_paths.contains(&p) {
};
seen_paths.insert(p.clone());
match found_kind {
- CrateFlavor::Rlib => {
- rlibs.insert(p, kind);
- }
- CrateFlavor::Rmeta => {
- rmetas.insert(p, kind);
- }
- CrateFlavor::Dylib => {
- dylibs.insert(p, kind);
- }
- }
+ CrateFlavor::Rlib => rlibs.insert(p, kind),
+ CrateFlavor::Rmeta => rmetas.insert(p, kind),
+ CrateFlavor::Dylib => dylibs.insert(p, kind),
+ };
FileMatches
})
.unwrap_or(FileDoesntMatch)
// search is being performed for.
let mut libraries = FxHashMap::default();
for (_hash, (rlibs, rmetas, dylibs)) in candidates {
- if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs) {
+ if let Some((svh, lib)) = self.extract_lib(rlibs, rmetas, dylibs)? {
libraries.insert(svh, lib);
}
}
// what we've got and figure out if we found multiple candidates for
// libraries or not.
match libraries.len() {
- 0 => None,
- 1 => Some(libraries.into_iter().next().unwrap().1),
- _ => {
- let mut err = struct_span_err!(
- self.sess,
- self.span,
- E0464,
- "multiple matching crates for `{}`",
- self.crate_name
- );
- let candidates = libraries
- .iter()
- .filter_map(|(_, lib)| {
- let crate_name = &lib.metadata.get_root().name().as_str();
- match &(&lib.source.dylib, &lib.source.rlib) {
- &(&Some((ref pd, _)), &Some((ref pr, _))) => Some(format!(
- "\ncrate `{}`: {}\n{:>padding$}",
- crate_name,
- pd.display(),
- pr.display(),
- padding = 8 + crate_name.len()
- )),
- &(&Some((ref p, _)), &None) | &(&None, &Some((ref p, _))) => {
- Some(format!("\ncrate `{}`: {}", crate_name, p.display()))
- }
- &(&None, &None) => None,
- }
- })
- .collect::<String>();
- err.note(&format!("candidates:{}", candidates));
- err.emit();
- None
- }
+ 0 => Ok(None),
+ 1 => Ok(Some(libraries.into_iter().next().unwrap().1)),
+ _ => Err(CrateError::MultipleMatchingCrates(self.crate_name, libraries)),
}
}
rlibs: FxHashMap<PathBuf, PathKind>,
rmetas: FxHashMap<PathBuf, PathKind>,
dylibs: FxHashMap<PathBuf, PathKind>,
- ) -> Option<(Svh, Library)> {
+ ) -> Result<Option<(Svh, Library)>, CrateError> {
let mut slot = None;
// Order here matters, rmeta should come first. See comment in
// `extract_one` below.
let source = CrateSource {
- rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot),
- rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot),
- dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot),
+ rmeta: self.extract_one(rmetas, CrateFlavor::Rmeta, &mut slot)?,
+ rlib: self.extract_one(rlibs, CrateFlavor::Rlib, &mut slot)?,
+ dylib: self.extract_one(dylibs, CrateFlavor::Dylib, &mut slot)?,
};
- slot.map(|(svh, metadata)| (svh, Library { source, metadata }))
+ Ok(slot.map(|(svh, metadata)| (svh, Library { source, metadata })))
}
fn needs_crate_flavor(&self, flavor: CrateFlavor) -> bool {
m: FxHashMap<PathBuf, PathKind>,
flavor: CrateFlavor,
slot: &mut Option<(Svh, MetadataBlob)>,
- ) -> Option<(PathBuf, PathKind)> {
- let mut ret: Option<(PathBuf, PathKind)> = None;
- let mut error = 0;
-
+ ) -> Result<Option<(PathBuf, PathKind)>, CrateError> {
// If we are producing an rlib, and we've already loaded metadata, then
// we should not attempt to discover further crate sources (unless we're
// locating a proc macro; exact logic is in needs_crate_flavor). This means
// from the other crate sources.
if slot.is_some() {
if m.is_empty() || !self.needs_crate_flavor(flavor) {
- return None;
+ return Ok(None);
} else if m.len() == 1 {
- return Some(m.into_iter().next().unwrap());
+ return Ok(Some(m.into_iter().next().unwrap()));
}
}
- let mut err: Option<DiagnosticBuilder<'_>> = None;
+ let mut ret: Option<(PathBuf, PathKind)> = None;
+ let mut err_data: Option<Vec<PathBuf>> = None;
for (lib, kind) in m {
info!("{} reading metadata from: {}", flavor, lib.display());
let (hash, metadata) =
};
// If we see multiple hashes, emit an error about duplicate candidates.
if slot.as_ref().map_or(false, |s| s.0 != hash) {
- let mut e = struct_span_err!(
- self.sess,
- self.span,
- E0465,
- "multiple {} candidates for `{}` found",
- flavor,
- self.crate_name
- );
- e.span_note(
- self.span,
- &format!(r"candidate #1: {}", ret.as_ref().unwrap().0.display()),
- );
- if let Some(ref mut e) = err {
- e.emit();
+ if let Some(candidates) = err_data {
+ return Err(CrateError::MultipleCandidates(
+ self.crate_name,
+ flavor,
+ candidates,
+ ));
}
- err = Some(e);
- error = 1;
+ err_data = Some(vec![ret.as_ref().unwrap().0.clone()]);
*slot = None;
}
- if error > 0 {
- error += 1;
- err.as_mut()
- .unwrap()
- .span_note(self.span, &format!(r"candidate #{}: {}", error, lib.display()));
+ if let Some(candidates) = &mut err_data {
+ candidates.push(lib);
continue;
}
// As a result, we favor the sysroot crate here. Note that the
// candidates are all canonicalized, so we canonicalize the sysroot
// as well.
- if let Some((ref prev, _)) = ret {
+ if let Some((prev, _)) = &ret {
let sysroot = &self.sess.sysroot;
let sysroot = sysroot.canonicalize().unwrap_or_else(|_| sysroot.to_path_buf());
if prev.starts_with(&sysroot) {
ret = Some((lib, kind));
}
- if error > 0 {
- err.unwrap().emit();
- None
+ if let Some(candidates) = err_data {
+ Err(CrateError::MultipleCandidates(self.crate_name, flavor, candidates))
} else {
- ret
+ Ok(ret)
}
}
Some(hash)
}
- // Returns the corresponding (prefix, suffix) that files need to have for
- // dynamic libraries
- fn dylibname(&self) -> (String, String) {
- let t = &self.target;
- (t.options.dll_prefix.clone(), t.options.dll_suffix.clone())
- }
-
- // Returns the corresponding (prefix, suffix) that files need to have for
- // static libraries
- fn staticlibname(&self) -> (String, String) {
- let t = &self.target;
- (t.options.staticlib_prefix.clone(), t.options.staticlib_suffix.clone())
- }
-
- fn find_commandline_library(&mut self) -> Option<Library> {
+ fn find_commandline_library(&mut self) -> Result<Option<Library>, CrateError> {
// First, filter out all libraries that look suspicious. We only accept
// files which actually exist that have the correct naming scheme for
// rlibs/dylibs.
- let sess = self.sess;
- let dylibname = self.dylibname();
let mut rlibs = FxHashMap::default();
let mut rmetas = FxHashMap::default();
let mut dylibs = FxHashMap::default();
- {
- let crate_name = self.crate_name;
- let rejected_via_filename = &mut self.rejected_via_filename;
- let locs = self.exact_paths.iter().filter(|loc| {
- if !loc.exists() {
- sess.err(&format!(
- "extern location for {} does not exist: {}",
- crate_name,
- loc.display()
- ));
- return false;
- }
- let file = match loc.file_name().and_then(|s| s.to_str()) {
- Some(file) => file,
- None => {
- sess.err(&format!(
- "extern location for {} is not a file: {}",
- crate_name,
- loc.display()
- ));
- return false;
- }
- };
- if file.starts_with("lib") && (file.ends_with(".rlib") || file.ends_with(".rmeta"))
- {
- return true;
- } else {
- let (ref prefix, ref suffix) = dylibname;
- if file.starts_with(&prefix[..]) && file.ends_with(&suffix[..]) {
- return true;
- }
+ for loc in &self.exact_paths {
+ if !loc.exists() {
+ return Err(CrateError::ExternLocationNotExist(self.crate_name, loc.clone()));
+ }
+ let file = match loc.file_name().and_then(|s| s.to_str()) {
+ Some(file) => file,
+ None => {
+ return Err(CrateError::ExternLocationNotFile(self.crate_name, loc.clone()));
}
+ };
- rejected_via_filename
- .push(CrateMismatch { path: (*loc).clone(), got: String::new() });
-
- false
- });
-
- // Now that we have an iterator of good candidates, make sure
- // there's at most one rlib and at most one dylib.
- for loc in locs {
+ if file.starts_with("lib") && (file.ends_with(".rlib") || file.ends_with(".rmeta"))
+ || file.starts_with(&self.target.options.dll_prefix)
+ && file.ends_with(&self.target.options.dll_suffix)
+ {
+ // Make sure there's at most one rlib and at most one dylib.
if loc.file_name().unwrap().to_str().unwrap().ends_with(".rlib") {
rlibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag);
} else if loc.file_name().unwrap().to_str().unwrap().ends_with(".rmeta") {
} else {
dylibs.insert(fs::canonicalize(&loc).unwrap(), PathKind::ExternFlag);
}
+ } else {
+ self.rejected_via_filename
+ .push(CrateMismatch { path: loc.clone(), got: String::new() });
}
- };
+ }
// Extract the dylib/rlib/rmeta triple.
- self.extract_lib(rlibs, rmetas, dylibs).map(|(_, lib)| lib)
+ Ok(self.extract_lib(rlibs, rmetas, dylibs)?.map(|(_, lib)| lib))
}
-}
-// Just a small wrapper to time how long reading metadata takes.
-fn get_metadata_section(
- target: &Target,
- flavor: CrateFlavor,
- filename: &Path,
- loader: &dyn MetadataLoader,
-) -> Result<MetadataBlob, String> {
- let start = Instant::now();
- let ret = get_metadata_section_imp(target, flavor, filename, loader);
- info!("reading {:?} => {:?}", filename.file_name().unwrap(), start.elapsed());
- ret
+ crate fn into_error(self) -> CrateError {
+ CrateError::LocatorCombined(CombinedLocatorError {
+ crate_name: self.crate_name,
+ root: self.root.cloned(),
+ triple: self.triple,
+ dll_prefix: self.target.options.dll_prefix.clone(),
+ dll_suffix: self.target.options.dll_suffix.clone(),
+ rejected_via_hash: self.rejected_via_hash,
+ rejected_via_triple: self.rejected_via_triple,
+ rejected_via_kind: self.rejected_via_kind,
+ rejected_via_version: self.rejected_via_version,
+ rejected_via_filename: self.rejected_via_filename,
+ })
+ }
}
/// A trivial wrapper for `Mmap` that implements `StableDeref`.
unsafe impl stable_deref_trait::StableDeref for StableDerefMmap {}
-fn get_metadata_section_imp(
+fn get_metadata_section(
target: &Target,
flavor: CrateFlavor,
filename: &Path,
metadata_loader: &dyn MetadataLoader,
span: Span,
name: Symbol,
-) -> Option<(PathBuf, CrateDisambiguator)> {
+) -> (PathBuf, CrateDisambiguator) {
+ match find_plugin_registrar_impl(sess, metadata_loader, name) {
+ Ok(res) => res,
+ Err(err) => err.report(sess, span),
+ }
+}
+
+fn find_plugin_registrar_impl<'a>(
+ sess: &'a Session,
+ metadata_loader: &dyn MetadataLoader,
+ name: Symbol,
+) -> Result<(PathBuf, CrateDisambiguator), CrateError> {
info!("find plugin registrar `{}`", name);
- let target_triple = sess.opts.target_triple.clone();
- let host_triple = TargetTriple::from_triple(config::host_triple());
- let is_cross = target_triple != host_triple;
- let mut target_only = false;
let mut locator = CrateLocator::new(
sess,
metadata_loader,
None, // extra_filename
true, // is_host
PathKind::Crate,
- span,
None, // root
None, // is_proc_macro
);
- let library = locator.maybe_load_library_crate().or_else(|| {
- if !is_cross {
- return None;
- }
- // Try loading from target crates. This will abort later if we
- // try to load a plugin registrar function,
- target_only = true;
-
- locator.target = &sess.target.target;
- locator.triple = target_triple;
- locator.filesearch = sess.target_filesearch(PathKind::Crate);
-
- locator.maybe_load_library_crate()
- });
- let library = match library {
- Some(l) => l,
- None => locator.report_errs(),
- };
-
- if target_only {
- let message = format!(
- "plugin `{}` is not available for triple `{}` (only found {})",
- name,
- config::host_triple(),
- sess.opts.target_triple
- );
- struct_span_err!(sess, span, E0456, "{}", &message).emit();
- return None;
- }
-
- match library.source.dylib {
- Some(dylib) => Some((dylib.0, library.metadata.get_root().disambiguator())),
- None => {
- struct_span_err!(
- sess,
- span,
- E0457,
- "plugin `{}` only found in rlib format, but must be available \
- in dylib format",
- name
- )
- .emit();
- // No need to abort because the loading code will just ignore this
- // empty dylib.
- None
- }
+ match locator.maybe_load_library_crate()? {
+ Some(library) => match library.source.dylib {
+ Some(dylib) => Ok((dylib.0, library.metadata.get_root().disambiguator())),
+ None => Err(CrateError::NonDylibPlugin(name)),
+ },
+ None => Err(locator.into_error()),
}
}
target: &Target,
path: &Path,
metadata_loader: &dyn MetadataLoader,
- out: &mut dyn io::Write,
-) -> io::Result<()> {
+ out: &mut dyn Write,
+) -> IoResult<()> {
let filename = path.file_name().unwrap().to_str().unwrap();
let flavor = if filename.ends_with(".rlib") {
CrateFlavor::Rlib
Err(msg) => write!(out, "{}\n", msg),
}
}
+
+// ------------------------------------------ Error reporting -------------------------------------
+
+#[derive(Clone)]
+struct CrateMismatch {
+ path: PathBuf,
+ got: String,
+}
+
+/// Candidate rejection reasons collected during crate search.
+/// If no candidate is accepted, then these reasons are presented to the user,
+/// otherwise they are ignored.
+crate struct CombinedLocatorError {
+ crate_name: Symbol,
+ root: Option<CratePaths>,
+ triple: TargetTriple,
+ dll_prefix: String,
+ dll_suffix: String,
+ rejected_via_hash: Vec<CrateMismatch>,
+ rejected_via_triple: Vec<CrateMismatch>,
+ rejected_via_kind: Vec<CrateMismatch>,
+ rejected_via_version: Vec<CrateMismatch>,
+ rejected_via_filename: Vec<CrateMismatch>,
+}
+
+crate enum CrateError {
+ NonAsciiName(Symbol),
+ ExternLocationNotExist(Symbol, PathBuf),
+ ExternLocationNotFile(Symbol, PathBuf),
+ MultipleCandidates(Symbol, CrateFlavor, Vec<PathBuf>),
+ MultipleMatchingCrates(Symbol, FxHashMap<Svh, Library>),
+ SymbolConflictsCurrent(Symbol),
+ SymbolConflictsOthers(Symbol),
+ DlOpen(String),
+ DlSym(String),
+ LocatorCombined(CombinedLocatorError),
+ NonDylibPlugin(Symbol),
+}
+
+impl CrateError {
+ crate fn report(self, sess: &Session, span: Span) -> ! {
+ let mut err = match self {
+ CrateError::NonAsciiName(crate_name) => sess.struct_span_err(
+ span,
+ &format!("cannot load a crate with a non-ascii name `{}`", crate_name),
+ ),
+ CrateError::ExternLocationNotExist(crate_name, loc) => sess.struct_span_err(
+ span,
+ &format!("extern location for {} does not exist: {}", crate_name, loc.display()),
+ ),
+ CrateError::ExternLocationNotFile(crate_name, loc) => sess.struct_span_err(
+ span,
+ &format!("extern location for {} is not a file: {}", crate_name, loc.display()),
+ ),
+ CrateError::MultipleCandidates(crate_name, flavor, candidates) => {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0465,
+ "multiple {} candidates for `{}` found",
+ flavor,
+ crate_name,
+ );
+ for (i, candidate) in candidates.iter().enumerate() {
+ err.span_note(span, &format!("candidate #{}: {}", i + 1, candidate.display()));
+ }
+ err
+ }
+ CrateError::MultipleMatchingCrates(crate_name, libraries) => {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0464,
+ "multiple matching crates for `{}`",
+ crate_name
+ );
+ let candidates = libraries
+ .iter()
+ .filter_map(|(_, lib)| {
+ let crate_name = &lib.metadata.get_root().name().as_str();
+ match (&lib.source.dylib, &lib.source.rlib) {
+ (Some((pd, _)), Some((pr, _))) => Some(format!(
+ "\ncrate `{}`: {}\n{:>padding$}",
+ crate_name,
+ pd.display(),
+ pr.display(),
+ padding = 8 + crate_name.len()
+ )),
+ (Some((p, _)), None) | (None, Some((p, _))) => {
+ Some(format!("\ncrate `{}`: {}", crate_name, p.display()))
+ }
+ (None, None) => None,
+ }
+ })
+ .collect::<String>();
+ err.note(&format!("candidates:{}", candidates));
+ err
+ }
+ CrateError::SymbolConflictsCurrent(root_name) => struct_span_err!(
+ sess,
+ span,
+ E0519,
+ "the current crate is indistinguishable from one of its dependencies: it has the \
+ same crate-name `{}` and was compiled with the same `-C metadata` arguments. \
+ This will result in symbol conflicts between the two.",
+ root_name,
+ ),
+ CrateError::SymbolConflictsOthers(root_name) => struct_span_err!(
+ sess,
+ span,
+ E0523,
+ "found two different crates with name `{}` that are not distinguished by differing \
+ `-C metadata`. This will result in symbol conflicts between the two.",
+ root_name,
+ ),
+ CrateError::DlOpen(s) | CrateError::DlSym(s) => sess.struct_span_err(span, &s),
+ CrateError::LocatorCombined(locator) => {
+ let crate_name = locator.crate_name;
+ let add = match &locator.root {
+ None => String::new(),
+ Some(r) => format!(" which `{}` depends on", r.name),
+ };
+ let mut msg = "the following crate versions were found:".to_string();
+ let mut err = if !locator.rejected_via_hash.is_empty() {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0460,
+ "found possibly newer version of crate `{}`{}",
+ crate_name,
+ add,
+ );
+ err.note("perhaps that crate needs to be recompiled?");
+ let mismatches = locator.rejected_via_hash.iter();
+ for CrateMismatch { path, .. } in mismatches {
+ msg.push_str(&format!("\ncrate `{}`: {}", crate_name, path.display()));
+ }
+ if let Some(r) = locator.root {
+ for path in r.source.paths() {
+ msg.push_str(&format!("\ncrate `{}`: {}", r.name, path.display()));
+ }
+ }
+ err.note(&msg);
+ err
+ } else if !locator.rejected_via_triple.is_empty() {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0461,
+ "couldn't find crate `{}` with expected target triple {}{}",
+ crate_name,
+ locator.triple,
+ add,
+ );
+ let mismatches = locator.rejected_via_triple.iter();
+ for CrateMismatch { path, got } in mismatches {
+ msg.push_str(&format!(
+ "\ncrate `{}`, target triple {}: {}",
+ crate_name,
+ got,
+ path.display(),
+ ));
+ }
+ err.note(&msg);
+ err
+ } else if !locator.rejected_via_kind.is_empty() {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0462,
+ "found staticlib `{}` instead of rlib or dylib{}",
+ crate_name,
+ add,
+ );
+ err.help("please recompile that crate using --crate-type lib");
+ let mismatches = locator.rejected_via_kind.iter();
+ for CrateMismatch { path, .. } in mismatches {
+ msg.push_str(&format!("\ncrate `{}`: {}", crate_name, path.display()));
+ }
+ err.note(&msg);
+ err
+ } else if !locator.rejected_via_version.is_empty() {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0514,
+ "found crate `{}` compiled by an incompatible version of rustc{}",
+ crate_name,
+ add,
+ );
+ err.help(&format!(
+ "please recompile that crate using this compiler ({})",
+ rustc_version(),
+ ));
+ let mismatches = locator.rejected_via_version.iter();
+ for CrateMismatch { path, got } in mismatches {
+ msg.push_str(&format!(
+ "\ncrate `{}` compiled by {}: {}",
+ crate_name,
+ got,
+ path.display(),
+ ));
+ }
+ err.note(&msg);
+ err
+ } else {
+ let mut err = struct_span_err!(
+ sess,
+ span,
+ E0463,
+ "can't find crate for `{}`{}",
+ crate_name,
+ add,
+ );
+
+ if (crate_name == sym::std || crate_name == sym::core)
+ && locator.triple != TargetTriple::from_triple(config::host_triple())
+ {
+ err.note(&format!("the `{}` target may not be installed", locator.triple));
+ } else if crate_name == sym::profiler_builtins {
+ err.note(&"the compiler may have been built without the profiler runtime");
+ }
+ err.span_label(span, "can't find crate");
+ err
+ };
+
+ if !locator.rejected_via_filename.is_empty() {
+ let mismatches = locator.rejected_via_filename.iter();
+ for CrateMismatch { path, .. } in mismatches {
+ err.note(&format!(
+ "extern location for {} is of an unknown type: {}",
+ crate_name,
+ path.display(),
+ ))
+ .help(&format!(
+ "file name should be lib*.rlib or {}*.{}",
+ locator.dll_prefix, locator.dll_suffix
+ ));
+ }
+ }
+ err
+ }
+ CrateError::NonDylibPlugin(crate_name) => struct_span_err!(
+ sess,
+ span,
+ E0457,
+ "plugin `{}` only found in rlib format, but must be available in dylib format",
+ crate_name,
+ ),
+ };
+
+ err.emit();
+ sess.abort_if_errors();
+ unreachable!();
+ }
+}
debug!("EncodeContext::encode_info_for_closure({:?})", def_id);
// NOTE(eddyb) `tcx.type_of(def_id)` isn't used because it's fully generic,
- // including on the signature, which is inferred in `typeck_tables_of.
+ // including on the signature, which is inferred in `typeck.
let hir_id = self.tcx.hir().as_local_hir_id(def_id);
- let ty = self.tcx.typeck_tables_of(def_id).node_type(hir_id);
+ let ty = self.tcx.typeck(def_id).node_type(hir_id);
record!(self.tables.kind[def_id.to_def_id()] <- match ty.kind {
ty::Generator(..) => {
rustc_middle::mir::Promoted,
rustc_middle::mir::Body<'_x>
>;
- [decode] tables: rustc_middle::ty::TypeckTables<$tcx>, rustc_middle::ty::TypeckTables<'_x>;
+ [decode] typeck_results: rustc_middle::ty::TypeckResults<$tcx>, rustc_middle::ty::TypeckResults<'_x>;
[decode] borrowck_result:
rustc_middle::mir::BorrowCheckResult<$tcx>,
rustc_middle::mir::BorrowCheckResult<'_x>;
//! This module defines the `DepNode` type which the compiler uses to represent
-//! nodes in the dependency graph. A `DepNode` consists of a `DepKind` (which
+//! nodes in the dependency graph.
+//!
+//! A `DepNode` consists of a `DepKind` (which
//! specifies the kind of thing it represents, like a piece of HIR, MIR, etc)
-//! and a `Fingerprint`, a 128 bit hash value the exact meaning of which
+//! and a `Fingerprint`, a 128-bit hash value the exact meaning of which
//! depends on the node's `DepKind`. Together, the kind and the fingerprint
//! fully identify a dependency node, even across multiple compilation sessions.
//! In other words, the value of the fingerprint does not depend on anything
//! uniquely identify a given commit and has a few advantages:
//!
//! * A `DepNode` can simply be serialized to disk and loaded in another session
-//! without the need to do any "rebasing (like we have to do for Spans and
-//! NodeIds) or "retracing" like we had to do for `DefId` in earlier
-//! implementations of the dependency graph.
+//! without the need to do any "rebasing" (like we have to do for Spans and
+//! NodeIds) or "retracing" (like we had to do for `DefId` in earlier
+//! implementations of the dependency graph).
//! * A `Fingerprint` is just a bunch of bits, which allows `DepNode` to
//! implement `Copy`, `Sync`, `Send`, `Freeze`, etc.
//! * Since we just have a bit pattern, `DepNode` can be mapped from disk into
//! `DefId` it was computed from. In other cases, too much information gets
//! lost during fingerprint computation.
//!
-//! The `DepConstructor` enum, together with `DepNode::new()` ensures that only
+//! The `DepConstructor` enum, together with `DepNode::new()`, ensures that only
//! valid `DepNode` instances can be constructed. For example, the API does not
//! allow for constructing parameterless `DepNode`s with anything other
//! than a zeroed out fingerprint. More generally speaking, it relieves the
pub mod exports;
pub mod map;
+pub mod place;
use crate::ich::StableHashingContext;
use crate::ty::query::Providers;
--- /dev/null
+use crate::ty;
+use crate::ty::Ty;
+
+use rustc_hir::HirId;
+use rustc_target::abi::VariantIdx;
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
+pub enum PlaceBase {
+ /// A temporary variable
+ Rvalue,
+ /// A named `static` item
+ StaticItem,
+ /// A named local variable
+ Local(HirId),
+ /// An upvar referenced by closure env
+ Upvar(ty::UpvarId),
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
+pub enum ProjectionKind {
+ /// A dereference of a pointer, reference or `Box<T>` of the given type
+ Deref,
+
+ /// `B.F` where `B` is the base expression and `F` is
+ /// the field. The field is identified by which variant
+ /// it appears in along with a field index. The variant
+ /// is used for enums.
+ Field(u32, VariantIdx),
+
+ /// Some index like `B[x]`, where `B` is the base
+ /// expression. We don't preserve the index `x` because
+ /// we won't need it.
+ Index,
+
+ /// A subslice covering a range of values like `B[x..y]`.
+ Subslice,
+}
+
+#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
+pub struct Projection<'tcx> {
+ /// Type after the projection is being applied.
+ pub ty: Ty<'tcx>,
+
+ /// Defines the type of access
+ pub kind: ProjectionKind,
+}
+
+/// A `Place` represents how a value is located in memory.
+///
+/// This is an HIR version of `mir::Place`
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
+pub struct Place<'tcx> {
+ /// The type of the `PlaceBase`
+ pub base_ty: Ty<'tcx>,
+ /// The "outermost" place that holds this value.
+ pub base: PlaceBase,
+ /// How this place is derived from the base place.
+ pub projections: Vec<Projection<'tcx>>,
+}
+
+/// A `PlaceWithHirId` represents how a value is located in memory.
+///
+/// This is an HIR version of `mir::Place`
+#[derive(Clone, Debug, PartialEq, Eq, Hash, RustcEncodable, RustcDecodable, HashStable)]
+pub struct PlaceWithHirId<'tcx> {
+ /// `HirId` of the expression or pattern producing this value.
+ pub hir_id: HirId,
+
+ /// Information about the `Place`
+ pub place: Place<'tcx>,
+}
+
+impl<'tcx> PlaceWithHirId<'tcx> {
+ pub fn new(
+ hir_id: HirId,
+ base_ty: Ty<'tcx>,
+ base: PlaceBase,
+ projections: Vec<Projection<'tcx>>,
+ ) -> PlaceWithHirId<'tcx> {
+ PlaceWithHirId {
+ hir_id: hir_id,
+ place: Place { base_ty: base_ty, base: base, projections: projections },
+ }
+ }
+}
+
+impl<'tcx> Place<'tcx> {
+ /// Returns an iterator of the types that have to be dereferenced to access
+ /// the `Place`.
+ ///
+ /// The types are in the reverse order that they are applied. So if
+ /// `x: &*const u32` and the `Place` is `**x`, then the types returned are
+ ///`*const u32` then `&*const u32`.
+ pub fn deref_tys(&self) -> impl Iterator<Item = Ty<'tcx>> + '_ {
+ self.projections.iter().enumerate().rev().filter_map(move |(index, proj)| {
+ if ProjectionKind::Deref == proj.kind {
+ Some(self.ty_before_projection(index))
+ } else {
+ None
+ }
+ })
+ }
+
+ /// Returns the type of this `Place` after all projections have been applied.
+ pub fn ty(&self) -> Ty<'tcx> {
+ self.projections.last().map_or_else(|| self.base_ty, |proj| proj.ty)
+ }
+
+ /// Returns the type of this `Place` immediately before `projection_index`th projection
+ /// is applied.
+ pub fn ty_before_projection(&self, projection_index: usize) -> Ty<'tcx> {
+ assert!(projection_index < self.projections.len());
+ if projection_index == 0 { self.base_ty } else { self.projections[projection_index - 1].ty }
+ }
+}
#![feature(bool_to_option)]
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
+#![feature(cmp_min_max_by)]
#![feature(const_fn)]
#![feature(const_panic)]
-#![cfg_attr(not(bootstrap), feature(const_fn_transmute))]
+#![feature(const_fn_transmute)]
#![feature(core_intrinsics)]
#![feature(discriminant_kind)]
#![feature(drain_filter)]
#![feature(or_patterns)]
#![feature(range_is_empty)]
#![feature(min_specialization)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![feature(trusted_len)]
#![feature(stmt_expr_attributes)]
#![feature(test)]
/// Positional arguments to `libcore::count_code_region()`
pub mod count_code_region_args {
- pub const COUNTER_INDEX: usize = 0;
- pub const START_BYTE_POS: usize = 1;
- pub const END_BYTE_POS: usize = 2;
+ pub const FUNCTION_SOURCE_HASH: usize = 0;
+ pub const COUNTER_INDEX: usize = 1;
+ pub const START_BYTE_POS: usize = 2;
+ pub const END_BYTE_POS: usize = 3;
}
/// Positional arguments to `libcore::coverage_counter_add()` and
self.try_to_scalar()?.to_bits(size).ok()
}
+ pub fn try_to_bool(&self) -> Option<bool> {
+ match self.try_to_bits(Size::from_bytes(1))? {
+ 0 => Some(false),
+ 1 => Some(true),
+ _ => None,
+ }
+ }
+
+ pub fn try_to_machine_usize(&self, tcx: TyCtxt<'tcx>) -> Option<u64> {
+ Some(self.try_to_bits(tcx.data_layout.pointer_size)? as u64)
+ }
+
pub fn try_to_bits_for_ty(
&self,
tcx: TyCtxt<'tcx>,
.debugging_opts
.inline_in_all_cgus
.unwrap_or_else(|| tcx.sess.opts.optimize != OptLevel::No)
- && !tcx.sess.opts.cg.link_dead_code;
+ && tcx.sess.opts.cg.link_dead_code != Some(true);
match *self {
MonoItem::Fn(ref instance) => {
use rustc_hir::def_id::{DefId, LocalDefId};
use rustc_index::bit_set::BitMatrix;
use rustc_index::vec::IndexVec;
-use rustc_span::{Span, Symbol};
+use rustc_span::Span;
use rustc_target::abi::VariantIdx;
use smallvec::SmallVec;
use std::cell::Cell;
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
pub enum UnsafetyViolationKind {
- /// Only permitted in regular `fn`s, prohibitted in `const fn`s.
+ /// Only permitted in regular `fn`s, prohibited in `const fn`s.
General,
/// Permitted both in `const fn`s and regular `fn`s.
GeneralAndConstFn,
UnsafeFnBorrowPacked,
}
+#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
+pub enum UnsafetyViolationDetails {
+ CallToUnsafeFunction,
+ UseOfInlineAssembly,
+ InitializingTypeWith,
+ CastOfPointerToInt,
+ BorrowOfPackedField,
+ UseOfMutableStatic,
+ UseOfExternStatic,
+ DerefOfRawPointer,
+ AssignToNonCopyUnionField,
+ AccessToUnionField,
+ MutationOfLayoutConstrainedField,
+ BorrowOfLayoutConstrainedField,
+ CallToFunctionWith,
+}
+
+impl UnsafetyViolationDetails {
+ pub fn description_and_note(&self) -> (&'static str, &'static str) {
+ use UnsafetyViolationDetails::*;
+ match self {
+ CallToUnsafeFunction => (
+ "call to unsafe function",
+ "consult the function's documentation for information on how to avoid undefined \
+ behavior",
+ ),
+ UseOfInlineAssembly => (
+ "use of inline assembly",
+ "inline assembly is entirely unchecked and can cause undefined behavior",
+ ),
+ InitializingTypeWith => (
+ "initializing type with `rustc_layout_scalar_valid_range` attr",
+ "initializing a layout restricted type's field with a value outside the valid \
+ range is undefined behavior",
+ ),
+ CastOfPointerToInt => {
+ ("cast of pointer to int", "casting pointers to integers in constants")
+ }
+ BorrowOfPackedField => (
+ "borrow of packed field",
+ "fields of packed structs might be misaligned: dereferencing a misaligned pointer \
+ or even just creating a misaligned reference is undefined behavior",
+ ),
+ UseOfMutableStatic => (
+ "use of mutable static",
+ "mutable statics can be mutated by multiple threads: aliasing violations or data \
+ races will cause undefined behavior",
+ ),
+ UseOfExternStatic => (
+ "use of extern static",
+ "extern statics are not controlled by the Rust type system: invalid data, \
+ aliasing violations or data races will cause undefined behavior",
+ ),
+ DerefOfRawPointer => (
+ "dereference of raw pointer",
+ "raw pointers may be NULL, dangling or unaligned; they can violate aliasing rules \
+ and cause data races: all of these are undefined behavior",
+ ),
+ AssignToNonCopyUnionField => (
+ "assignment to non-`Copy` union field",
+ "the previous content of the field will be dropped, which causes undefined \
+ behavior if the field was not properly initialized",
+ ),
+ AccessToUnionField => (
+ "access to union field",
+ "the field may not be properly initialized: using uninitialized data will cause \
+ undefined behavior",
+ ),
+ MutationOfLayoutConstrainedField => (
+ "mutation of layout constrained field",
+ "mutating layout constrained fields cannot statically be checked for valid values",
+ ),
+ BorrowOfLayoutConstrainedField => (
+ "borrow of layout constrained field with interior mutability",
+ "references to fields of layout constrained fields lose the constraints. Coupled \
+ with interior mutability, the field can be changed to invalid values",
+ ),
+ CallToFunctionWith => (
+ "call to function with `#[target_feature]`",
+ "can only be called if the required target features are available",
+ ),
+ }
+ }
+}
+
#[derive(Copy, Clone, PartialEq, RustcEncodable, RustcDecodable, HashStable)]
pub struct UnsafetyViolation {
pub source_info: SourceInfo,
pub lint_root: hir::HirId,
- pub description: Symbol,
- pub details: Symbol,
pub kind: UnsafetyViolationKind,
+ pub details: UnsafetyViolationDetails,
}
#[derive(Clone, RustcEncodable, RustcDecodable, HashStable)]
#[derive(Debug, RustcEncodable, RustcDecodable, HashStable)]
pub struct BorrowCheckResult<'tcx> {
/// All the opaque types that are restricted to concrete types
- /// by this function. Unlike the value in `TypeckTables`, this has
+ /// by this function. Unlike the value in `TypeckResults`, this has
/// unerased regions.
pub concrete_opaque_types: FxHashMap<DefId, ty::ResolvedOpaqueTy<'tcx>>,
pub closure_requirements: Option<ClosureRegionRequirements<'tcx>>,
/// `InstrumentCoverage` MIR pass and can be retrieved via the `coverageinfo` query.
#[derive(Clone, RustcEncodable, RustcDecodable, Debug, HashStable)]
pub struct CoverageInfo {
- /// A hash value that can be used by the consumer of the coverage profile data to detect
- /// changes to the instrumented source of the associated MIR body (typically, for an
- /// individual function).
- pub hash: u64,
-
/// The total number of coverage region counters added to the MIR `Body`.
pub num_counters: u32,
+
+ /// The total number of coverage region counter expressions added to the MIR `Body`.
+ pub num_expressions: u32,
}
impl<'tcx> TyCtxt<'tcx> {
}
impl<'a, 'tcx> ExactSizeIterator for ReversePostorder<'a, 'tcx> {}
+
+/// Returns an iterator over all basic blocks reachable from the `START_BLOCK` in no particular
+/// order.
+///
+/// This is clearer than writing `preorder` in cases where the order doesn't matter.
+pub fn reachable<'a, 'tcx>(
+ body: &'a Body<'tcx>,
+) -> impl 'a + Iterator<Item = (BasicBlock, &'a BasicBlockData<'tcx>)> {
+ preorder(body)
+}
+
+/// Returns a `BitSet` containing all basic blocks reachable from the `START_BLOCK`.
+pub fn reachable_as_bitset(body: &Body<'tcx>) -> BitSet<BasicBlock> {
+ let mut iter = preorder(body);
+ (&mut iter).for_each(drop);
+ iter.visited
+}
desc { "type-checking all item bodies" }
}
- query typeck_tables_of(key: LocalDefId) -> &'tcx ty::TypeckTables<'tcx> {
+ query typeck(key: LocalDefId) -> &'tcx ty::TypeckResults<'tcx> {
desc { |tcx| "type-checking `{}`", tcx.def_path_str(key.to_def_id()) }
cache_on_disk_if { true }
}
- query typeck_tables_of_const_arg(
+ query typeck_const_arg(
key: (LocalDefId, DefId)
- ) -> &'tcx ty::TypeckTables<'tcx> {
+ ) -> &'tcx ty::TypeckResults<'tcx> {
desc {
|tcx| "type-checking the const argument `{}`",
tcx.def_path_str(key.0.to_def_id()),
}
}
- query diagnostic_only_typeck_tables_of(key: LocalDefId) -> &'tcx ty::TypeckTables<'tcx> {
+ query diagnostic_only_typeck(key: LocalDefId) -> &'tcx ty::TypeckResults<'tcx> {
desc { |tcx| "type-checking `{}`", tcx.def_path_str(key.to_def_id()) }
cache_on_disk_if { true }
load_cached(tcx, id) {
- let typeck_tables: Option<ty::TypeckTables<'tcx>> = tcx
+ let typeck_results: Option<ty::TypeckResults<'tcx>> = tcx
.queries.on_disk_cache
.try_load_query_result(tcx, id);
- typeck_tables.map(|x| &*tcx.arena.alloc(x))
+ typeck_results.map(|x| &*tcx.arena.alloc(x))
}
}
}
}
TypeChecking {
- query has_typeck_tables(def_id: DefId) -> bool {
+ query has_typeck_results(def_id: DefId) -> bool {
desc { |tcx| "checking whether `{}` has a body", tcx.def_path_str(def_id) }
}
use crate::ich::{self, StableHashingContext};
use crate::ty::fast_reject::SimplifiedType;
+use crate::ty::fold::TypeFoldable;
use crate::ty::{self, TyCtxt};
use rustc_data_structures::fx::FxHashMap;
use rustc_data_structures::stable_hasher::{HashStable, StableHasher};
start_from_impl: DefId,
) -> Result<Ancestors<'tcx>, ErrorReported> {
let specialization_graph = tcx.specialization_graph_of(trait_def_id);
- if specialization_graph.has_errored {
+
+ if specialization_graph.has_errored || tcx.type_of(start_from_impl).references_error() {
Err(ErrorReported)
} else {
Ok(Ancestors {
-use crate::mir::interpret::truncate;
-use rustc_target::abi::Size;
-
-#[derive(Copy, Clone)]
-/// A type for representing any integer. Only used for printing.
-// FIXME: Use this for the integer-tree representation needed for type level ints and
-// const generics?
-pub struct ConstInt {
- /// Number of bytes of the integer. Only 1, 2, 4, 8, 16 are legal values.
- size: u8,
- /// Whether the value is of a signed integer type.
- signed: bool,
- /// Whether the value is a `usize` or `isize` type.
- is_ptr_sized_integral: bool,
- /// Raw memory of the integer. All bytes beyond the `size` are unused and must be zero.
- raw: u128,
+use crate::mir::interpret::ConstValue;
+use crate::mir::interpret::{LitToConstInput, Scalar};
+use crate::ty::subst::InternalSubsts;
+use crate::ty::{self, Ty, TyCtxt};
+use crate::ty::{ParamEnv, ParamEnvAnd};
+use rustc_errors::ErrorReported;
+use rustc_hir as hir;
+use rustc_hir::def_id::LocalDefId;
+use rustc_macros::HashStable;
+
+mod int;
+mod kind;
+
+pub use int::*;
+pub use kind::*;
+
+/// Typed constant value.
+#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)]
+#[derive(HashStable)]
+pub struct Const<'tcx> {
+ pub ty: Ty<'tcx>,
+
+ pub val: ConstKind<'tcx>,
}
-impl ConstInt {
- pub fn new(raw: u128, size: Size, signed: bool, is_ptr_sized_integral: bool) -> Self {
- assert!(raw <= truncate(u128::MAX, size));
- Self { raw, size: size.bytes() as u8, signed, is_ptr_sized_integral }
+#[cfg(target_arch = "x86_64")]
+static_assert_size!(Const<'_>, 48);
+
+impl<'tcx> Const<'tcx> {
+ /// Literals and const generic parameters are eagerly converted to a constant, everything else
+ /// becomes `Unevaluated`.
+ pub fn from_anon_const(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx Self {
+ Self::from_opt_const_arg_anon_const(tcx, ty::WithOptConstParam::unknown(def_id))
}
-}
-impl std::fmt::Debug for ConstInt {
- fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let Self { size, signed, raw, is_ptr_sized_integral } = *self;
- if signed {
- let bit_size = size * 8;
- let min = 1u128 << (bit_size - 1);
- let max = min - 1;
- if raw == min {
- match (size, is_ptr_sized_integral) {
- (_, true) => write!(fmt, "isize::MIN"),
- (1, _) => write!(fmt, "i8::MIN"),
- (2, _) => write!(fmt, "i16::MIN"),
- (4, _) => write!(fmt, "i32::MIN"),
- (8, _) => write!(fmt, "i64::MIN"),
- (16, _) => write!(fmt, "i128::MIN"),
- _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
- }
- } else if raw == max {
- match (size, is_ptr_sized_integral) {
- (_, true) => write!(fmt, "isize::MAX"),
- (1, _) => write!(fmt, "i8::MAX"),
- (2, _) => write!(fmt, "i16::MAX"),
- (4, _) => write!(fmt, "i32::MAX"),
- (8, _) => write!(fmt, "i64::MAX"),
- (16, _) => write!(fmt, "i128::MAX"),
- _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
+ pub fn from_opt_const_arg_anon_const(
+ tcx: TyCtxt<'tcx>,
+ def: ty::WithOptConstParam<LocalDefId>,
+ ) -> &'tcx Self {
+ debug!("Const::from_anon_const(def={:?})", def);
+
+ let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
+
+ let body_id = match tcx.hir().get(hir_id) {
+ hir::Node::AnonConst(ac) => ac.body,
+ _ => span_bug!(
+ tcx.def_span(def.did.to_def_id()),
+ "from_anon_const can only process anonymous constants"
+ ),
+ };
+
+ let expr = &tcx.hir().body(body_id).value;
+
+ let ty = tcx.type_of(def.def_id_for_type_of());
+
+ let lit_input = match expr.kind {
+ hir::ExprKind::Lit(ref lit) => Some(LitToConstInput { lit: &lit.node, ty, neg: false }),
+ hir::ExprKind::Unary(hir::UnOp::UnNeg, ref expr) => match expr.kind {
+ hir::ExprKind::Lit(ref lit) => {
+ Some(LitToConstInput { lit: &lit.node, ty, neg: true })
}
+ _ => None,
+ },
+ _ => None,
+ };
+
+ if let Some(lit_input) = lit_input {
+ // If an error occurred, ignore that it's a literal and leave reporting the error up to
+ // mir.
+ if let Ok(c) = tcx.at(expr.span).lit_to_const(lit_input) {
+ return c;
} else {
- match size {
- 1 => write!(fmt, "{}", raw as i8)?,
- 2 => write!(fmt, "{}", raw as i16)?,
- 4 => write!(fmt, "{}", raw as i32)?,
- 8 => write!(fmt, "{}", raw as i64)?,
- 16 => write!(fmt, "{}", raw as i128)?,
- _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
- }
- if fmt.alternate() {
- match (size, is_ptr_sized_integral) {
- (_, true) => write!(fmt, "_isize")?,
- (1, _) => write!(fmt, "_i8")?,
- (2, _) => write!(fmt, "_i16")?,
- (4, _) => write!(fmt, "_i32")?,
- (8, _) => write!(fmt, "_i64")?,
- (16, _) => write!(fmt, "_i128")?,
- _ => bug!(),
- }
- }
- Ok(())
+ tcx.sess.delay_span_bug(expr.span, "Const::from_anon_const: couldn't lit_to_const");
}
- } else {
- let max = truncate(u128::MAX, Size::from_bytes(size));
- if raw == max {
- match (size, is_ptr_sized_integral) {
- (_, true) => write!(fmt, "usize::MAX"),
- (1, _) => write!(fmt, "u8::MAX"),
- (2, _) => write!(fmt, "u16::MAX"),
- (4, _) => write!(fmt, "u32::MAX"),
- (8, _) => write!(fmt, "u64::MAX"),
- (16, _) => write!(fmt, "u128::MAX"),
- _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
- }
- } else {
- match size {
- 1 => write!(fmt, "{}", raw as u8)?,
- 2 => write!(fmt, "{}", raw as u16)?,
- 4 => write!(fmt, "{}", raw as u32)?,
- 8 => write!(fmt, "{}", raw as u64)?,
- 16 => write!(fmt, "{}", raw as u128)?,
- _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
- }
- if fmt.alternate() {
- match (size, is_ptr_sized_integral) {
- (_, true) => write!(fmt, "_usize")?,
- (1, _) => write!(fmt, "_u8")?,
- (2, _) => write!(fmt, "_u16")?,
- (4, _) => write!(fmt, "_u32")?,
- (8, _) => write!(fmt, "_u64")?,
- (16, _) => write!(fmt, "_u128")?,
- _ => bug!(),
- }
- }
- Ok(())
+ }
+
+ // Unwrap a block, so that e.g. `{ P }` is recognised as a parameter. Const arguments
+ // currently have to be wrapped in curly brackets, so it's necessary to special-case.
+ let expr = match &expr.kind {
+ hir::ExprKind::Block(block, _) if block.stmts.is_empty() && block.expr.is_some() => {
+ block.expr.as_ref().unwrap()
+ }
+ _ => expr,
+ };
+
+ use hir::{def::DefKind::ConstParam, def::Res, ExprKind, Path, QPath};
+ let val = match expr.kind {
+ ExprKind::Path(QPath::Resolved(_, &Path { res: Res::Def(ConstParam, def_id), .. })) => {
+ // Find the name and index of the const parameter by indexing the generics of
+ // the parent item and construct a `ParamConst`.
+ let hir_id = tcx.hir().as_local_hir_id(def_id.expect_local());
+ let item_id = tcx.hir().get_parent_node(hir_id);
+ let item_def_id = tcx.hir().local_def_id(item_id);
+ let generics = tcx.generics_of(item_def_id.to_def_id());
+ let index =
+ generics.param_def_id_to_index[&tcx.hir().local_def_id(hir_id).to_def_id()];
+ let name = tcx.hir().name(hir_id);
+ ty::ConstKind::Param(ty::ParamConst::new(index, name))
}
+ _ => ty::ConstKind::Unevaluated(
+ def.to_global(),
+ InternalSubsts::identity_for_item(tcx, def.did.to_def_id()),
+ None,
+ ),
+ };
+
+ tcx.mk_const(ty::Const { val, ty })
+ }
+
+ #[inline]
+ /// Interns the given value as a constant.
+ pub fn from_value(tcx: TyCtxt<'tcx>, val: ConstValue<'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
+ tcx.mk_const(Self { val: ConstKind::Value(val), ty })
+ }
+
+ #[inline]
+ /// Interns the given scalar as a constant.
+ pub fn from_scalar(tcx: TyCtxt<'tcx>, val: Scalar, ty: Ty<'tcx>) -> &'tcx Self {
+ Self::from_value(tcx, ConstValue::Scalar(val), ty)
+ }
+
+ #[inline]
+ /// Creates a constant with the given integer value and interns it.
+ pub fn from_bits(tcx: TyCtxt<'tcx>, bits: u128, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> &'tcx Self {
+ let size = tcx
+ .layout_of(ty)
+ .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e))
+ .size;
+ Self::from_scalar(tcx, Scalar::from_uint(bits, size), ty.value)
+ }
+
+ #[inline]
+ /// Creates an interned zst constant.
+ pub fn zero_sized(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
+ Self::from_scalar(tcx, Scalar::zst(), ty)
+ }
+
+ #[inline]
+ /// Creates an interned bool constant.
+ pub fn from_bool(tcx: TyCtxt<'tcx>, v: bool) -> &'tcx Self {
+ Self::from_bits(tcx, v as u128, ParamEnv::empty().and(tcx.types.bool))
+ }
+
+ #[inline]
+ /// Creates an interned usize constant.
+ pub fn from_usize(tcx: TyCtxt<'tcx>, n: u64) -> &'tcx Self {
+ Self::from_bits(tcx, n as u128, ParamEnv::empty().and(tcx.types.usize))
+ }
+
+ #[inline]
+ /// Attempts to evaluate the given constant to bits. Can fail to evaluate in the presence of
+ /// generics (or erroneous code) or if the value can't be represented as bits (e.g. because it
+ /// contains const generic parameters or pointers).
+ pub fn try_eval_bits(
+ &self,
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ ty: Ty<'tcx>,
+ ) -> Option<u128> {
+ assert_eq!(self.ty, ty);
+ let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size;
+ // if `ty` does not depend on generic parameters, use an empty param_env
+ self.val.eval(tcx, param_env).try_to_bits(size)
+ }
+
+ #[inline]
+ pub fn try_eval_bool(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Option<bool> {
+ self.val.eval(tcx, param_env).try_to_bool()
+ }
+
+ #[inline]
+ pub fn try_eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Option<u64> {
+ self.val.eval(tcx, param_env).try_to_machine_usize(tcx)
+ }
+
+ #[inline]
+ /// Tries to evaluate the constant if it is `Unevaluated`. If that doesn't succeed, return the
+ /// unevaluated constant.
+ pub fn eval(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> &Const<'tcx> {
+ if let Some(val) = self.val.try_eval(tcx, param_env) {
+ match val {
+ Ok(val) => Const::from_value(tcx, val, self.ty),
+ Err(ErrorReported) => tcx.const_error(self.ty),
+ }
+ } else {
+ self
}
}
+
+ #[inline]
+ /// Panics if the value cannot be evaluated or doesn't contain a valid integer of the given type.
+ pub fn eval_bits(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>) -> u128 {
+ self.try_eval_bits(tcx, param_env, ty)
+ .unwrap_or_else(|| bug!("expected bits of {:#?}, got {:#?}", ty, self))
+ }
+
+ #[inline]
+ /// Panics if the value cannot be evaluated or doesn't contain a valid `usize`.
+ pub fn eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> u64 {
+ self.try_eval_usize(tcx, param_env)
+ .unwrap_or_else(|| bug!("expected usize, got {:#?}", self))
+ }
}
--- /dev/null
+use crate::mir::interpret::truncate;
+use rustc_target::abi::Size;
+
+#[derive(Copy, Clone)]
+/// A type for representing any integer. Only used for printing.
+// FIXME: Use this for the integer-tree representation needed for type level ints and
+// const generics?
+pub struct ConstInt {
+ /// Number of bytes of the integer. Only 1, 2, 4, 8, 16 are legal values.
+ size: u8,
+ /// Whether the value is of a signed integer type.
+ signed: bool,
+ /// Whether the value is a `usize` or `isize` type.
+ is_ptr_sized_integral: bool,
+ /// Raw memory of the integer. All bytes beyond the `size` are unused and must be zero.
+ raw: u128,
+}
+
+impl ConstInt {
+ pub fn new(raw: u128, size: Size, signed: bool, is_ptr_sized_integral: bool) -> Self {
+ assert!(raw <= truncate(u128::MAX, size));
+ Self { raw, size: size.bytes() as u8, signed, is_ptr_sized_integral }
+ }
+}
+
+impl std::fmt::Debug for ConstInt {
+ fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ let Self { size, signed, raw, is_ptr_sized_integral } = *self;
+ if signed {
+ let bit_size = size * 8;
+ let min = 1u128 << (bit_size - 1);
+ let max = min - 1;
+ if raw == min {
+ match (size, is_ptr_sized_integral) {
+ (_, true) => write!(fmt, "isize::MIN"),
+ (1, _) => write!(fmt, "i8::MIN"),
+ (2, _) => write!(fmt, "i16::MIN"),
+ (4, _) => write!(fmt, "i32::MIN"),
+ (8, _) => write!(fmt, "i64::MIN"),
+ (16, _) => write!(fmt, "i128::MIN"),
+ _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
+ }
+ } else if raw == max {
+ match (size, is_ptr_sized_integral) {
+ (_, true) => write!(fmt, "isize::MAX"),
+ (1, _) => write!(fmt, "i8::MAX"),
+ (2, _) => write!(fmt, "i16::MAX"),
+ (4, _) => write!(fmt, "i32::MAX"),
+ (8, _) => write!(fmt, "i64::MAX"),
+ (16, _) => write!(fmt, "i128::MAX"),
+ _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
+ }
+ } else {
+ match size {
+ 1 => write!(fmt, "{}", raw as i8)?,
+ 2 => write!(fmt, "{}", raw as i16)?,
+ 4 => write!(fmt, "{}", raw as i32)?,
+ 8 => write!(fmt, "{}", raw as i64)?,
+ 16 => write!(fmt, "{}", raw as i128)?,
+ _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
+ }
+ if fmt.alternate() {
+ match (size, is_ptr_sized_integral) {
+ (_, true) => write!(fmt, "_isize")?,
+ (1, _) => write!(fmt, "_i8")?,
+ (2, _) => write!(fmt, "_i16")?,
+ (4, _) => write!(fmt, "_i32")?,
+ (8, _) => write!(fmt, "_i64")?,
+ (16, _) => write!(fmt, "_i128")?,
+ _ => bug!(),
+ }
+ }
+ Ok(())
+ }
+ } else {
+ let max = truncate(u128::MAX, Size::from_bytes(size));
+ if raw == max {
+ match (size, is_ptr_sized_integral) {
+ (_, true) => write!(fmt, "usize::MAX"),
+ (1, _) => write!(fmt, "u8::MAX"),
+ (2, _) => write!(fmt, "u16::MAX"),
+ (4, _) => write!(fmt, "u32::MAX"),
+ (8, _) => write!(fmt, "u64::MAX"),
+ (16, _) => write!(fmt, "u128::MAX"),
+ _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
+ }
+ } else {
+ match size {
+ 1 => write!(fmt, "{}", raw as u8)?,
+ 2 => write!(fmt, "{}", raw as u16)?,
+ 4 => write!(fmt, "{}", raw as u32)?,
+ 8 => write!(fmt, "{}", raw as u64)?,
+ 16 => write!(fmt, "{}", raw as u128)?,
+ _ => bug!("ConstInt 0x{:x} with size = {} and signed = {}", raw, size, signed),
+ }
+ if fmt.alternate() {
+ match (size, is_ptr_sized_integral) {
+ (_, true) => write!(fmt, "_usize")?,
+ (1, _) => write!(fmt, "_u8")?,
+ (2, _) => write!(fmt, "_u16")?,
+ (4, _) => write!(fmt, "_u32")?,
+ (8, _) => write!(fmt, "_u64")?,
+ (16, _) => write!(fmt, "_u128")?,
+ _ => bug!(),
+ }
+ }
+ Ok(())
+ }
+ }
+ }
+}
--- /dev/null
+use crate::mir::interpret::ConstValue;
+use crate::mir::interpret::Scalar;
+use crate::mir::Promoted;
+use crate::ty::subst::{InternalSubsts, SubstsRef};
+use crate::ty::ParamEnv;
+use crate::ty::{self, TyCtxt, TypeFoldable};
+use rustc_errors::ErrorReported;
+use rustc_hir::def_id::DefId;
+use rustc_macros::HashStable;
+use rustc_target::abi::Size;
+
+/// Represents a constant in Rust.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
+#[derive(HashStable)]
+pub enum ConstKind<'tcx> {
+ /// A const generic parameter.
+ Param(ty::ParamConst),
+
+ /// Infer the value of the const.
+ Infer(InferConst<'tcx>),
+
+ /// Bound const variable, used only when preparing a trait query.
+ Bound(ty::DebruijnIndex, ty::BoundVar),
+
+ /// A placeholder const - universally quantified higher-ranked const.
+ Placeholder(ty::PlaceholderConst),
+
+ /// Used in the HIR by using `Unevaluated` everywhere and later normalizing to one of the other
+ /// variants when the code is monomorphic enough for that.
+ Unevaluated(ty::WithOptConstParam<DefId>, SubstsRef<'tcx>, Option<Promoted>),
+
+ /// Used to hold computed value.
+ Value(ConstValue<'tcx>),
+
+ /// A placeholder for a const which could not be computed; this is
+ /// propagated to avoid useless error messages.
+ Error(ty::sty::DelaySpanBugEmitted),
+}
+
+#[cfg(target_arch = "x86_64")]
+static_assert_size!(ConstKind<'_>, 40);
+
+impl<'tcx> ConstKind<'tcx> {
+ #[inline]
+ pub fn try_to_value(self) -> Option<ConstValue<'tcx>> {
+ if let ConstKind::Value(val) = self { Some(val) } else { None }
+ }
+
+ #[inline]
+ pub fn try_to_scalar(self) -> Option<Scalar> {
+ self.try_to_value()?.try_to_scalar()
+ }
+
+ #[inline]
+ pub fn try_to_bits(self, size: Size) -> Option<u128> {
+ self.try_to_value()?.try_to_bits(size)
+ }
+
+ #[inline]
+ pub fn try_to_bool(self) -> Option<bool> {
+ self.try_to_value()?.try_to_bool()
+ }
+
+ #[inline]
+ pub fn try_to_machine_usize(self, tcx: TyCtxt<'tcx>) -> Option<u64> {
+ self.try_to_value()?.try_to_machine_usize(tcx)
+ }
+}
+
+/// An inference variable for a const, for use in const generics.
+#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
+#[derive(HashStable)]
+pub enum InferConst<'tcx> {
+ /// Infer the value of the const.
+ Var(ty::ConstVid<'tcx>),
+ /// A fresh const variable. See `infer::freshen` for more details.
+ Fresh(u32),
+}
+
+impl<'tcx> ConstKind<'tcx> {
+ #[inline]
+ /// Tries to evaluate the constant if it is `Unevaluated`. If that doesn't succeed, return the
+ /// unevaluated constant.
+ pub fn eval(self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Self {
+ self.try_eval(tcx, param_env).and_then(Result::ok).map(ConstKind::Value).unwrap_or(self)
+ }
+
+ #[inline]
+ /// Tries to evaluate the constant if it is `Unevaluated`. If that isn't possible or necessary
+ /// return `None`.
+ pub(super) fn try_eval(
+ self,
+ tcx: TyCtxt<'tcx>,
+ param_env: ParamEnv<'tcx>,
+ ) -> Option<Result<ConstValue<'tcx>, ErrorReported>> {
+ if let ConstKind::Unevaluated(def, substs, promoted) = self {
+ use crate::mir::interpret::ErrorHandled;
+
+ let param_env_and_substs = param_env.with_reveal_all().and(substs);
+
+ // HACK(eddyb) this erases lifetimes even though `const_eval_resolve`
+ // also does later, but we want to do it before checking for
+ // inference variables.
+ let param_env_and_substs = tcx.erase_regions(¶m_env_and_substs);
+
+ // HACK(eddyb) when the query key would contain inference variables,
+ // attempt using identity substs and `ParamEnv` instead, that will succeed
+ // when the expression doesn't depend on any parameters.
+ // FIXME(eddyb, skinny121) pass `InferCtxt` into here when it's available, so that
+ // we can call `infcx.const_eval_resolve` which handles inference variables.
+ let param_env_and_substs = if param_env_and_substs.needs_infer() {
+ tcx.param_env(def.did).and(InternalSubsts::identity_for_item(tcx, def.did))
+ } else {
+ param_env_and_substs
+ };
+
+ // FIXME(eddyb) maybe the `const_eval_*` methods should take
+ // `ty::ParamEnvAnd<SubstsRef>` instead of having them separate.
+ let (param_env, substs) = param_env_and_substs.into_parts();
+ // try to resolve e.g. associated constants to their definition on an impl, and then
+ // evaluate the const.
+ match tcx.const_eval_resolve(param_env, def, substs, promoted, None) {
+ // NOTE(eddyb) `val` contains no lifetimes/types/consts,
+ // and we use the original type, so nothing from `substs`
+ // (which may be identity substs, see above),
+ // can leak through `val` into the const we return.
+ Ok(val) => Some(Ok(val)),
+ Err(ErrorHandled::TooGeneric | ErrorHandled::Linted) => None,
+ Err(ErrorHandled::Reported(e)) => Some(Err(e)),
+ }
+ } else {
+ None
+ }
+ }
+}
}
/// Validate that the given HirId (respectively its `local_id` part) can be
-/// safely used as a key in the tables of a TypeckTable. For that to be
+/// safely used as a key in the maps of a TypeckResults. For that to be
/// the case, the HirId must have the same `owner` as all the other IDs in
/// this table (signified by `hir_owner`). Otherwise the HirId
/// would be in a different frame of reference and using its `local_id`
/// would result in lookup errors, or worse, in silently wrong data being
/// stored/returned.
-fn validate_hir_id_for_typeck_tables(hir_owner: LocalDefId, hir_id: hir::HirId) {
+fn validate_hir_id_for_typeck_results(hir_owner: LocalDefId, hir_id: hir::HirId) {
if hir_id.owner != hir_owner {
ty::tls::with(|tcx| {
bug!(
- "node {} with HirId::owner {:?} cannot be placed in TypeckTables with hir_owner {:?}",
+ "node {} with HirId::owner {:?} cannot be placed in TypeckResults with hir_owner {:?}",
tcx.hir().node_to_string(hir_id),
hir_id.owner,
hir_owner
impl<'a, V> LocalTableInContext<'a, V> {
pub fn contains_key(&self, id: hir::HirId) -> bool {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.contains_key(&id.local_id)
}
pub fn get(&self, id: hir::HirId) -> Option<&V> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.get(&id.local_id)
}
impl<'a, V> LocalTableInContextMut<'a, V> {
pub fn get_mut(&mut self, id: hir::HirId) -> Option<&mut V> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.get_mut(&id.local_id)
}
pub fn entry(&mut self, id: hir::HirId) -> Entry<'_, hir::ItemLocalId, V> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.entry(id.local_id)
}
pub fn insert(&mut self, id: hir::HirId, val: V) -> Option<V> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.insert(id.local_id, val)
}
pub fn remove(&mut self, id: hir::HirId) -> Option<V> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.data.remove(&id.local_id)
}
}
}
#[derive(RustcEncodable, RustcDecodable, Debug)]
-pub struct TypeckTables<'tcx> {
+pub struct TypeckResults<'tcx> {
/// The `HirId::owner` all `ItemLocalId`s in this table are relative to.
pub hir_owner: LocalDefId,
pub generator_interior_types: Vec<GeneratorInteriorTypeCause<'tcx>>,
}
-impl<'tcx> TypeckTables<'tcx> {
- pub fn new(hir_owner: LocalDefId) -> TypeckTables<'tcx> {
- TypeckTables {
+impl<'tcx> TypeckResults<'tcx> {
+ pub fn new(hir_owner: LocalDefId) -> TypeckResults<'tcx> {
+ TypeckResults {
hir_owner,
type_dependent_defs: Default::default(),
field_indices: Default::default(),
}
pub fn type_dependent_def(&self, id: HirId) -> Option<(DefKind, DefId)> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.type_dependent_defs.get(&id.local_id).cloned().and_then(|r| r.ok())
}
}
pub fn node_type_opt(&self, id: hir::HirId) -> Option<Ty<'tcx>> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.node_types.get(&id.local_id).cloned()
}
}
pub fn node_substs(&self, id: hir::HirId) -> SubstsRef<'tcx> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.node_substs.get(&id.local_id).cloned().unwrap_or_else(|| InternalSubsts::empty())
}
pub fn node_substs_opt(&self, id: hir::HirId) -> Option<SubstsRef<'tcx>> {
- validate_hir_id_for_typeck_tables(self.hir_owner, id);
+ validate_hir_id_for_typeck_results(self.hir_owner, id);
self.node_substs.get(&id.local_id).cloned()
}
}
pub fn expr_adjustments(&self, expr: &hir::Expr<'_>) -> &[ty::adjustment::Adjustment<'tcx>] {
- validate_hir_id_for_typeck_tables(self.hir_owner, expr.hir_id);
+ validate_hir_id_for_typeck_results(self.hir_owner, expr.hir_id);
self.adjustments.get(&expr.hir_id.local_id).map_or(&[], |a| &a[..])
}
}
pub fn is_coercion_cast(&self, hir_id: hir::HirId) -> bool {
- validate_hir_id_for_typeck_tables(self.hir_owner, hir_id);
+ validate_hir_id_for_typeck_results(self.hir_owner, hir_id);
self.coercion_casts.contains(&hir_id.local_id)
}
}
}
-impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckTables<'tcx> {
+impl<'a, 'tcx> HashStable<StableHashingContext<'a>> for TypeckResults<'tcx> {
fn hash_stable(&self, hcx: &mut StableHashingContext<'a>, hasher: &mut StableHasher) {
- let ty::TypeckTables {
+ let ty::TypeckResults {
hir_owner,
ref type_dependent_defs,
ref field_indices,
}
impl<'tcx> TyCtxt<'tcx> {
- pub fn typeck_tables_of_opt_const_arg(
+ pub fn typeck_opt_const_arg(
self,
def: ty::WithOptConstParam<LocalDefId>,
- ) -> &'tcx TypeckTables<'tcx> {
+ ) -> &'tcx TypeckResults<'tcx> {
if let Some(param_did) = def.const_param_did {
- self.typeck_tables_of_const_arg((def.did, param_did))
+ self.typeck_const_arg((def.did, param_did))
} else {
- self.typeck_tables_of(def.did)
+ self.typeck(def.did)
}
}
.iter_enumerated()
.all(|(i, v)| v.discr == ty::VariantDiscr::Relative(i.as_u32()));
+ let mut niche_filling_layout = None;
+
// Niche-filling enum optimization.
if !def.repr.inhibit_enum_layout_opt() && no_explicit_discriminants {
let mut dataful_variant = None;
let largest_niche =
Niche::from_scalar(dl, offset, niche_scalar.clone());
- return Ok(tcx.intern_layout(Layout {
+ niche_filling_layout = Some(Layout {
variants: Variants::Multiple {
tag: niche_scalar,
tag_encoding: TagEncoding::Niche {
largest_niche,
size,
align,
- }));
+ });
}
}
}
let largest_niche = Niche::from_scalar(dl, Size::ZERO, tag.clone());
- tcx.intern_layout(Layout {
+ let tagged_layout = Layout {
variants: Variants::Multiple {
tag,
tag_encoding: TagEncoding::Direct,
abi,
align,
size,
- })
+ };
+
+ let best_layout = match (tagged_layout, niche_filling_layout) {
+ (tagged_layout, Some(niche_filling_layout)) => {
+ // Pick the smaller layout; otherwise,
+ // pick the layout with the larger niche; otherwise,
+ // pick tagged as it has simpler codegen.
+ cmp::min_by_key(tagged_layout, niche_filling_layout, |layout| {
+ let niche_size =
+ layout.largest_niche.as_ref().map_or(0, |n| n.available(dl));
+ (layout.size, cmp::Reverse(niche_size))
+ })
+ }
+ (tagged_layout, None) => tagged_layout,
+ };
+
+ tcx.intern_layout(best_layout)
}
// Types with no meaningful known layout.
pub use self::sty::{BoundRegion, EarlyBoundRegion, FreeRegion, Region};
pub use self::sty::{CanonicalPolyFnSig, FnSig, GenSig, PolyFnSig, PolyGenSig};
pub use self::sty::{ClosureSubsts, GeneratorSubsts, TypeAndMut, UpvarSubsts};
-pub use self::sty::{Const, ConstKind, ExistentialProjection, PolyExistentialProjection};
pub use self::sty::{ConstVid, FloatVid, IntVid, RegionVid, TyVid};
-pub use self::sty::{ExistentialPredicate, InferConst, InferTy, ParamConst, ParamTy, ProjectionTy};
+pub use self::sty::{ExistentialPredicate, InferTy, ParamConst, ParamTy, ProjectionTy};
+pub use self::sty::{ExistentialProjection, PolyExistentialProjection};
pub use self::sty::{ExistentialTraitRef, PolyExistentialTraitRef};
pub use self::sty::{PolyTraitRef, TraitRef, TyKind};
pub use crate::ty::diagnostics::*;
UserType, UserTypeAnnotationIndex,
};
pub use self::context::{
- CtxtInterners, GeneratorInteriorTypeCause, GlobalCtxt, Lift, TypeckTables,
+ CtxtInterners, GeneratorInteriorTypeCause, GlobalCtxt, Lift, TypeckResults,
};
pub use self::instance::{Instance, InstanceDef};
pub use self::query::queries;
-pub use self::consts::ConstInt;
+pub use self::consts::{Const, ConstInt, ConstKind, InferConst};
pub mod adjustment;
pub mod binding;
/// in case `did` is a const argument.
///
/// This is used to prevent cycle errors during typeck
-/// as `type_of(const_arg)` depends on `typeck_tables_of(owning_body)`
+/// as `type_of(const_arg)` depends on `typeck(owning_body)`
/// which once again requires the type of its generic arguments.
///
/// Luckily we only need to deal with const arguments once we
}
impl<'tcx> TyCtxt<'tcx> {
- pub fn body_tables(self, body: hir::BodyId) -> &'tcx TypeckTables<'tcx> {
- self.typeck_tables_of(self.hir().body_owner_def_id(body))
+ pub fn typeck_body(self, body: hir::BodyId) -> &'tcx TypeckResults<'tcx> {
+ self.typeck(self.hir().body_owner_def_id(body))
}
/// Returns an iterator of the `DefId`s for all body-owners in this
is_associated_item.then(|| self.associated_item(def_id))
}
- pub fn field_index(self, hir_id: hir::HirId, tables: &TypeckTables<'_>) -> usize {
- tables.field_indices().get(hir_id).cloned().expect("no index for a field")
+ pub fn field_index(self, hir_id: hir::HirId, typeck_results: &TypeckResults<'_>) -> usize {
+ typeck_results.field_indices().get(hir_id).cloned().expect("no index for a field")
}
pub fn find_field_index(self, ident: Ident, variant: &VariantDef) -> Option<usize> {
// FIXME(eddyb) `name` should never be empty, but it
// currently is for `extern { ... }` "foreign modules".
- let name = disambiguated_data.data.as_symbol().as_str();
- if !name.is_empty() {
+ let name = disambiguated_data.data.as_symbol();
+ if name != kw::Invalid {
if !self.empty_path {
write!(self, "::")?;
}
- if Ident::from_str(&name).is_raw_guess() {
+ if Ident::with_dummy_span(name).is_raw_guess() {
write!(self, "r#")?;
}
write!(self, "{}", name)?;
/// Provides an interface to incremental compilation data cached from the
/// previous compilation session. This data will eventually include the results
-/// of a few selected queries (like `typeck_tables_of` and `mir_optimized`) and
+/// of a few selected queries (like `typeck` and `mir_optimized`) and
/// any diagnostics that have been emitted during a query.
pub struct OnDiskCache<'sess> {
// The complete cache data in serialized form.
match def_key.disambiguated_data.data {
DefPathData::CrateRoot => {
- name = self.tcx.original_crate_name(def_id.krate).as_str();
+ name = self.tcx.original_crate_name(def_id.krate);
dis = "";
end_index = 3;
}
other => {
- name = other.as_symbol().as_str();
+ name = other.as_symbol();
if def_key.disambiguated_data.disambiguator == 0 {
dis = "";
end_index = 3;
}
}
+ let name = &*name.as_str();
let components = [
StringComponent::Ref(parent_string_id),
StringComponent::Value("::"),
- StringComponent::Value(&name[..]),
+ StringComponent::Value(name),
StringComponent::Value(dis),
];
use self::TyKind::*;
use crate::infer::canonical::Canonical;
-use crate::mir::interpret::ConstValue;
-use crate::mir::interpret::{LitToConstInput, Scalar};
-use crate::mir::Promoted;
use crate::ty::subst::{GenericArg, InternalSubsts, Subst, SubstsRef};
use crate::ty::{
self, AdtDef, DefIdTree, Discr, Ty, TyCtxt, TypeFlags, TypeFoldable, WithConstness,
};
-use crate::ty::{List, ParamEnv, ParamEnvAnd, TyS};
+use crate::ty::{List, ParamEnv, TyS};
use polonius_engine::Atom;
use rustc_ast::ast;
use rustc_data_structures::captures::Captures;
-use rustc_errors::ErrorReported;
use rustc_hir as hir;
-use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_hir::def_id::DefId;
use rustc_index::vec::Idx;
use rustc_macros::HashStable;
use rustc_span::symbol::{kw, Ident, Symbol};
-use rustc_target::abi::{Size, VariantIdx};
+use rustc_target::abi::VariantIdx;
use rustc_target::spec::abi;
use std::borrow::Cow;
use std::cmp::Ordering;
ParamConst::new(def.index, def.name)
}
- pub fn to_const(self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Const<'tcx> {
+ pub fn to_const(self, tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx ty::Const<'tcx> {
tcx.mk_const_param(self.index, self.name, ty)
}
}
tcx.layout_of(tcx.param_env(did).and(self)).map(|layout| layout.is_zst()).unwrap_or(false)
}
}
-
-/// Typed constant value.
-#[derive(Copy, Clone, Debug, Hash, RustcEncodable, RustcDecodable, Eq, PartialEq, Ord, PartialOrd)]
-#[derive(HashStable)]
-pub struct Const<'tcx> {
- pub ty: Ty<'tcx>,
-
- pub val: ConstKind<'tcx>,
-}
-
-#[cfg(target_arch = "x86_64")]
-static_assert_size!(Const<'_>, 48);
-
-impl<'tcx> Const<'tcx> {
- /// Literals and const generic parameters are eagerly converted to a constant, everything else
- /// becomes `Unevaluated`.
- pub fn from_anon_const(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &'tcx Self {
- Self::from_opt_const_arg_anon_const(tcx, ty::WithOptConstParam::unknown(def_id))
- }
-
- pub fn from_opt_const_arg_anon_const(
- tcx: TyCtxt<'tcx>,
- def: ty::WithOptConstParam<LocalDefId>,
- ) -> &'tcx Self {
- debug!("Const::from_anon_const(def={:?})", def);
-
- let hir_id = tcx.hir().local_def_id_to_hir_id(def.did);
-
- let body_id = match tcx.hir().get(hir_id) {
- hir::Node::AnonConst(ac) => ac.body,
- _ => span_bug!(
- tcx.def_span(def.did.to_def_id()),
- "from_anon_const can only process anonymous constants"
- ),
- };
-
- let expr = &tcx.hir().body(body_id).value;
-
- let ty = tcx.type_of(def.def_id_for_type_of());
-
- let lit_input = match expr.kind {
- hir::ExprKind::Lit(ref lit) => Some(LitToConstInput { lit: &lit.node, ty, neg: false }),
- hir::ExprKind::Unary(hir::UnOp::UnNeg, ref expr) => match expr.kind {
- hir::ExprKind::Lit(ref lit) => {
- Some(LitToConstInput { lit: &lit.node, ty, neg: true })
- }
- _ => None,
- },
- _ => None,
- };
-
- if let Some(lit_input) = lit_input {
- // If an error occurred, ignore that it's a literal and leave reporting the error up to
- // mir.
- if let Ok(c) = tcx.at(expr.span).lit_to_const(lit_input) {
- return c;
- } else {
- tcx.sess.delay_span_bug(expr.span, "Const::from_anon_const: couldn't lit_to_const");
- }
- }
-
- // Unwrap a block, so that e.g. `{ P }` is recognised as a parameter. Const arguments
- // currently have to be wrapped in curly brackets, so it's necessary to special-case.
- let expr = match &expr.kind {
- hir::ExprKind::Block(block, _) if block.stmts.is_empty() && block.expr.is_some() => {
- block.expr.as_ref().unwrap()
- }
- _ => expr,
- };
-
- use hir::{def::DefKind::ConstParam, def::Res, ExprKind, Path, QPath};
- let val = match expr.kind {
- ExprKind::Path(QPath::Resolved(_, &Path { res: Res::Def(ConstParam, def_id), .. })) => {
- // Find the name and index of the const parameter by indexing the generics of
- // the parent item and construct a `ParamConst`.
- let hir_id = tcx.hir().as_local_hir_id(def_id.expect_local());
- let item_id = tcx.hir().get_parent_node(hir_id);
- let item_def_id = tcx.hir().local_def_id(item_id);
- let generics = tcx.generics_of(item_def_id.to_def_id());
- let index =
- generics.param_def_id_to_index[&tcx.hir().local_def_id(hir_id).to_def_id()];
- let name = tcx.hir().name(hir_id);
- ty::ConstKind::Param(ty::ParamConst::new(index, name))
- }
- _ => ty::ConstKind::Unevaluated(
- def.to_global(),
- InternalSubsts::identity_for_item(tcx, def.did.to_def_id()),
- None,
- ),
- };
-
- tcx.mk_const(ty::Const { val, ty })
- }
-
- #[inline]
- /// Interns the given value as a constant.
- pub fn from_value(tcx: TyCtxt<'tcx>, val: ConstValue<'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
- tcx.mk_const(Self { val: ConstKind::Value(val), ty })
- }
-
- #[inline]
- /// Interns the given scalar as a constant.
- pub fn from_scalar(tcx: TyCtxt<'tcx>, val: Scalar, ty: Ty<'tcx>) -> &'tcx Self {
- Self::from_value(tcx, ConstValue::Scalar(val), ty)
- }
-
- #[inline]
- /// Creates a constant with the given integer value and interns it.
- pub fn from_bits(tcx: TyCtxt<'tcx>, bits: u128, ty: ParamEnvAnd<'tcx, Ty<'tcx>>) -> &'tcx Self {
- let size = tcx
- .layout_of(ty)
- .unwrap_or_else(|e| panic!("could not compute layout for {:?}: {:?}", ty, e))
- .size;
- Self::from_scalar(tcx, Scalar::from_uint(bits, size), ty.value)
- }
-
- #[inline]
- /// Creates an interned zst constant.
- pub fn zero_sized(tcx: TyCtxt<'tcx>, ty: Ty<'tcx>) -> &'tcx Self {
- Self::from_scalar(tcx, Scalar::zst(), ty)
- }
-
- #[inline]
- /// Creates an interned bool constant.
- pub fn from_bool(tcx: TyCtxt<'tcx>, v: bool) -> &'tcx Self {
- Self::from_bits(tcx, v as u128, ParamEnv::empty().and(tcx.types.bool))
- }
-
- #[inline]
- /// Creates an interned usize constant.
- pub fn from_usize(tcx: TyCtxt<'tcx>, n: u64) -> &'tcx Self {
- Self::from_bits(tcx, n as u128, ParamEnv::empty().and(tcx.types.usize))
- }
-
- #[inline]
- /// Attempts to evaluate the given constant to bits. Can fail to evaluate in the presence of
- /// generics (or erroneous code) or if the value can't be represented as bits (e.g. because it
- /// contains const generic parameters or pointers).
- pub fn try_eval_bits(
- &self,
- tcx: TyCtxt<'tcx>,
- param_env: ParamEnv<'tcx>,
- ty: Ty<'tcx>,
- ) -> Option<u128> {
- assert_eq!(self.ty, ty);
- let size = tcx.layout_of(param_env.with_reveal_all().and(ty)).ok()?.size;
- // if `ty` does not depend on generic parameters, use an empty param_env
- self.eval(tcx, param_env).val.try_to_bits(size)
- }
-
- #[inline]
- /// Tries to evaluate the constant if it is `Unevaluated`. If that doesn't succeed, return the
- /// unevaluated constant.
- pub fn eval(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> &Const<'tcx> {
- if let ConstKind::Unevaluated(def, substs, promoted) = self.val {
- use crate::mir::interpret::ErrorHandled;
-
- let param_env_and_substs = param_env.with_reveal_all().and(substs);
-
- // HACK(eddyb) this erases lifetimes even though `const_eval_resolve`
- // also does later, but we want to do it before checking for
- // inference variables.
- let param_env_and_substs = tcx.erase_regions(¶m_env_and_substs);
-
- // HACK(eddyb) when the query key would contain inference variables,
- // attempt using identity substs and `ParamEnv` instead, that will succeed
- // when the expression doesn't depend on any parameters.
- // FIXME(eddyb, skinny121) pass `InferCtxt` into here when it's available, so that
- // we can call `infcx.const_eval_resolve` which handles inference variables.
- let param_env_and_substs = if param_env_and_substs.needs_infer() {
- tcx.param_env(def.did).and(InternalSubsts::identity_for_item(tcx, def.did))
- } else {
- param_env_and_substs
- };
-
- // FIXME(eddyb) maybe the `const_eval_*` methods should take
- // `ty::ParamEnvAnd<SubstsRef>` instead of having them separate.
- let (param_env, substs) = param_env_and_substs.into_parts();
- // try to resolve e.g. associated constants to their definition on an impl, and then
- // evaluate the const.
- match tcx.const_eval_resolve(param_env, def, substs, promoted, None) {
- // NOTE(eddyb) `val` contains no lifetimes/types/consts,
- // and we use the original type, so nothing from `substs`
- // (which may be identity substs, see above),
- // can leak through `val` into the const we return.
- Ok(val) => Const::from_value(tcx, val, self.ty),
- Err(ErrorHandled::TooGeneric | ErrorHandled::Linted) => self,
- Err(ErrorHandled::Reported(ErrorReported)) => tcx.const_error(self.ty),
- }
- } else {
- self
- }
- }
-
- #[inline]
- pub fn try_eval_bool(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Option<bool> {
- self.try_eval_bits(tcx, param_env, tcx.types.bool).and_then(|v| match v {
- 0 => Some(false),
- 1 => Some(true),
- _ => None,
- })
- }
-
- #[inline]
- pub fn try_eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> Option<u64> {
- self.try_eval_bits(tcx, param_env, tcx.types.usize).map(|v| v as u64)
- }
-
- #[inline]
- /// Panics if the value cannot be evaluated or doesn't contain a valid integer of the given type.
- pub fn eval_bits(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>, ty: Ty<'tcx>) -> u128 {
- self.try_eval_bits(tcx, param_env, ty)
- .unwrap_or_else(|| bug!("expected bits of {:#?}, got {:#?}", ty, self))
- }
-
- #[inline]
- /// Panics if the value cannot be evaluated or doesn't contain a valid `usize`.
- pub fn eval_usize(&self, tcx: TyCtxt<'tcx>, param_env: ParamEnv<'tcx>) -> u64 {
- self.eval_bits(tcx, param_env, tcx.types.usize) as u64
- }
-}
-
-/// Represents a constant in Rust.
-#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
-#[derive(HashStable)]
-pub enum ConstKind<'tcx> {
- /// A const generic parameter.
- Param(ParamConst),
-
- /// Infer the value of the const.
- Infer(InferConst<'tcx>),
-
- /// Bound const variable, used only when preparing a trait query.
- Bound(DebruijnIndex, BoundVar),
-
- /// A placeholder const - universally quantified higher-ranked const.
- Placeholder(ty::PlaceholderConst),
-
- /// Used in the HIR by using `Unevaluated` everywhere and later normalizing to one of the other
- /// variants when the code is monomorphic enough for that.
- Unevaluated(ty::WithOptConstParam<DefId>, SubstsRef<'tcx>, Option<Promoted>),
-
- /// Used to hold computed value.
- Value(ConstValue<'tcx>),
-
- /// A placeholder for a const which could not be computed; this is
- /// propagated to avoid useless error messages.
- Error(DelaySpanBugEmitted),
-}
-
-#[cfg(target_arch = "x86_64")]
-static_assert_size!(ConstKind<'_>, 40);
-
-impl<'tcx> ConstKind<'tcx> {
- #[inline]
- pub fn try_to_scalar(&self) -> Option<Scalar> {
- if let ConstKind::Value(val) = self { val.try_to_scalar() } else { None }
- }
-
- #[inline]
- pub fn try_to_bits(&self, size: Size) -> Option<u128> {
- if let ConstKind::Value(val) = self { val.try_to_bits(size) } else { None }
- }
-}
-
-/// An inference variable for a const, for use in const generics.
-#[derive(Copy, Clone, Debug, Eq, PartialEq, PartialOrd, Ord, RustcEncodable, RustcDecodable, Hash)]
-#[derive(HashStable)]
-pub enum InferConst<'tcx> {
- /// Infer the value of the const.
- Var(ConstVid<'tcx>),
- /// A fresh const variable. See `infer::freshen` for more details.
- Fresh(u32),
-}
/// This is a significant `DefId` because, when we do
/// type-checking, we type-check this fn item and all of its
/// (transitive) closures together. Therefore, when we fetch the
- /// `typeck_tables_of` the closure, for example, we really wind up
- /// fetching the `typeck_tables_of` the enclosing fn item.
+ /// `typeck` the closure, for example, we really wind up
+ /// fetching the `typeck` the enclosing fn item.
pub fn closure_base_def_id(self, def_id: DefId) -> DefId {
let mut def_id = def_id;
while self.is_closure(def_id) {
.ty;
let needs_note = match ty.kind {
ty::Closure(id, _) => {
- let tables = self.infcx.tcx.typeck_tables_of(id.expect_local());
+ let tables = self.infcx.tcx.typeck(id.expect_local());
let hir_id = self.infcx.tcx.hir().as_local_hir_id(id.expect_local());
tables.closure_kind_origins().get(hir_id).is_none()
.opt_name(fn_hir_id)
.map(|name| format!("function `{}`", name))
.unwrap_or_else(|| {
- match &self
- .infcx
- .tcx
- .typeck_tables_of(self.mir_def_id)
- .node_type(fn_hir_id)
- .kind
+ match &self.infcx.tcx.typeck(self.mir_def_id).node_type(fn_hir_id).kind
{
ty::Closure(..) => "enclosing closure",
ty::Generator(..) => "enclosing generator",
let hir_id = self.infcx.tcx.hir().as_local_hir_id(did);
if let Some((span, name)) =
- self.infcx.tcx.typeck_tables_of(did).closure_kind_origins().get(hir_id)
+ self.infcx.tcx.typeck(did).closure_kind_origins().get(hir_id)
{
diag.span_note(
*span,
let hir_id = self.infcx.tcx.hir().as_local_hir_id(did);
if let Some((span, name)) =
- self.infcx.tcx.typeck_tables_of(did).closure_kind_origins().get(hir_id)
+ self.infcx.tcx.typeck(did).closure_kind_origins().get(hir_id)
{
diag.span_note(
*span,
.map(|(pos, _)| pos)
.next();
let def_id = hir.local_def_id(item_id);
- let tables = self.infcx.tcx.typeck_tables_of(def_id);
+ let tables = self.infcx.tcx.typeck(def_id);
if let Some(ty::FnDef(def_id, _)) =
tables.node_type_opt(func.hir_id).as_ref().map(|ty| &ty.kind)
{
}
// Gather the upvars of a closure, if any.
- let tables = tcx.typeck_tables_of_opt_const_arg(def);
+ let tables = tcx.typeck_opt_const_arg(def);
if let Some(ErrorReported) = tables.tainted_by_errors {
infcx.set_tainted_by_errors();
}
if !self.tcx().is_closure(self.mir_def_id.to_def_id()) {
user_provided_sig = None;
} else {
- let typeck_tables = self.tcx().typeck_tables_of(self.mir_def_id);
+ let typeck_results = self.tcx().typeck(self.mir_def_id);
user_provided_sig =
- match typeck_tables.user_provided_sigs.get(&self.mir_def_id.to_def_id()) {
+ match typeck_results.user_provided_sigs.get(&self.mir_def_id.to_def_id()) {
None => None,
Some(user_provided_poly_sig) => {
// Instantiate the canonicalized variables from
let tcx = infcx.tcx;
let param_env = self.param_env;
let body = self.body;
- let concrete_opaque_types = &tcx.typeck_tables_of(anon_owner_def_id).concrete_opaque_types;
+ let concrete_opaque_types = &tcx.typeck(anon_owner_def_id).concrete_opaque_types;
let mut opaque_type_values = Vec::new();
debug!("eq_opaque_type_and_type: mir_def_id={:?}", self.mir_def_id);
let defining_ty = if self.mir_def.did.to_def_id() == closure_base_def_id {
tcx.type_of(closure_base_def_id)
} else {
- let tables = tcx.typeck_tables_of(self.mir_def.did);
+ let tables = tcx.typeck(self.mir_def.did);
tables.node_type(self.mir_hir_id)
};
let def = cid.instance.def.with_opt_param();
if let Some(def) = def.as_local() {
- if tcx.has_typeck_tables(def.did) {
- if let Some(error_reported) = tcx.typeck_tables_of_opt_const_arg(def).tainted_by_errors
- {
+ if tcx.has_typeck_results(def.did) {
+ if let Some(error_reported) = tcx.typeck_opt_const_arg(def).tainted_by_errors {
return Err(ErrorHandled::Reported(error_reported));
}
}
///
/// When this flag is set, we need to reset to an entry set before doing a seek.
state_needs_reset: bool,
+
+ #[cfg(debug_assertions)]
+ reachable_blocks: BitSet<BasicBlock>,
}
impl<'mir, 'tcx, A, R> ResultsCursor<'mir, 'tcx, A, R>
state_needs_reset: true,
state: BitSet::new_empty(bits_per_block),
pos: CursorPosition::block_entry(mir::START_BLOCK),
+
+ #[cfg(debug_assertions)]
+ reachable_blocks: mir::traversal::reachable_as_bitset(body),
}
}
///
/// For backward dataflow analyses, this is the dataflow state after the terminator.
pub(super) fn seek_to_block_entry(&mut self, block: BasicBlock) {
+ #[cfg(debug_assertions)]
+ assert!(self.reachable_blocks.contains(block));
+
self.state.overwrite(&self.results.borrow().entry_set_for_block(block));
self.pos = CursorPosition::block_entry(block);
self.state_needs_reset = false;
visit_results(body, blocks, self, vis)
}
+ pub fn visit_reachable_with(
+ &self,
+ body: &'mir mir::Body<'tcx>,
+ vis: &mut impl ResultsVisitor<'mir, 'tcx, FlowState = BitSet<A::Idx>>,
+ ) {
+ let blocks = mir::traversal::reachable(body);
+ visit_results(body, blocks.map(|(bb, _)| bb), self, vis)
+ }
+
pub fn visit_in_rpo_with(
&self,
body: &'mir mir::Body<'tcx>,
}
}
- // Add blocks that are not reachable from START_BLOCK to the work queue. These blocks will
- // be processed after the ones added above.
- //
- // FIXME(ecstaticmorse): Is this actually necessary? In principle, we shouldn't need to
- // know the dataflow state in unreachable basic blocks.
- for bb in body.basic_blocks().indices() {
- dirty_queue.insert(bb);
- }
-
let mut state = BitSet::new_empty(bits_per_block);
while let Some(bb) = dirty_queue.pop() {
let bb_data = &body[bb];
{
let mut state = results.new_flow_state(body);
+ #[cfg(debug_assertions)]
+ let reachable_blocks = mir::traversal::reachable_as_bitset(body);
+
for block in blocks {
+ #[cfg(debug_assertions)]
+ assert!(reachable_blocks.contains(block));
+
let block_data = &body[block];
V::Direction::visit_results_in_block(&mut state, block, block_data, results, vis);
}
// do not continue if typeck errors occurred (can only occur in local crate)
let def = instance.with_opt_param();
if let Some(def) = def.as_local() {
- if self.tcx.has_typeck_tables(def.did) {
- if let Some(error_reported) =
- self.tcx.typeck_tables_of_opt_const_arg(def).tainted_by_errors
- {
+ if self.tcx.has_typeck_results(def.did) {
+ if let Some(error_reported) = self.tcx.typeck_opt_const_arg(def).tainted_by_errors {
throw_inval!(TypeckError(error_reported))
}
}
let (dest, ret) = match ret {
None => match intrinsic_name {
sym::transmute => throw_ub_format!("transmuting to uninhabited type"),
+ sym::unreachable => throw_ub!(Unreachable),
sym::abort => M::abort(self)?,
// Unsupported diverging intrinsic.
_ => return Ok(false),
// FIXME: This should be an assert instead of an error, but if we transmute within an
// array length computation, `typeck` may not have yet been run and errored out. In fact
// most likey we *are* running `typeck` right now. Investigate whether we can bail out
- // on `typeck_tables().has_errors` at all const eval entry points.
+ // on `typeck_results().has_errors` at all const eval entry points.
debug!("Size mismatch when transmuting!\nsrc: {:#?}\ndest: {:#?}", src, dest);
self.tcx.sess.delay_span_bug(
self.cur_span(),
ty::Closure(def_id, _) | ty::Generator(def_id, _, _) => {
let mut name = None;
if let Some(def_id) = def_id.as_local() {
- let tables = self.ecx.tcx.typeck_tables_of(def_id);
+ let tables = self.ecx.tcx.typeck(def_id);
if let Some(upvars) = tables.closure_captures.get(&def_id.to_def_id()) {
// Sometimes the index is beyond the number of upvars (seen
// for a generator).
#![feature(box_patterns)]
#![feature(box_syntax)]
#![feature(const_fn)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
-#![cfg_attr(bootstrap, feature(const_loop))]
#![feature(const_panic)]
#![feature(crate_visibility_modifier)]
#![feature(decl_macro)]
// Next we try to make as many symbols "internal" as possible, so LLVM has
// more freedom to optimize.
- if !tcx.sess.opts.cg.link_dead_code {
+ if tcx.sess.opts.cg.link_dead_code != Some(true) {
let _prof_timer = tcx.prof.generic_activity("cgu_partitioning_internalize_symbols");
internalize_symbols(tcx, &mut post_inlining, inlining_map);
}
}
}
None => {
- if tcx.sess.opts.cg.link_dead_code {
+ if tcx.sess.opts.cg.link_dead_code == Some(true) {
MonoItemCollectionMode::Eager
} else {
MonoItemCollectionMode::Lazy
use rustc_middle::ty::{self, TyCtxt};
use rustc_session::lint::builtin::{SAFE_PACKED_BORROWS, UNSAFE_OP_IN_UNSAFE_FN, UNUSED_UNSAFE};
use rustc_session::lint::Level;
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::symbol::sym;
use std::ops::Bound;
let sig = func_ty.fn_sig(self.tcx);
if let hir::Unsafety::Unsafe = sig.unsafety() {
self.require_unsafe(
- "call to unsafe function",
- "consult the function's documentation for information on how to avoid \
- undefined behavior",
UnsafetyViolationKind::GeneralAndConstFn,
+ UnsafetyViolationDetails::CallToUnsafeFunction,
)
}
}
TerminatorKind::InlineAsm { .. } => self.require_unsafe(
- "use of inline assembly",
- "inline assembly is entirely unchecked and can cause undefined behavior",
UnsafetyViolationKind::General,
+ UnsafetyViolationDetails::UseOfInlineAssembly,
),
}
self.super_terminator(terminator, location);
}
StatementKind::LlvmInlineAsm { .. } => self.require_unsafe(
- "use of inline assembly",
- "inline assembly is entirely unchecked and can cause undefined behavior",
UnsafetyViolationKind::General,
+ UnsafetyViolationDetails::UseOfInlineAssembly,
),
}
self.super_statement(statement, location);
match self.tcx.layout_scalar_valid_range(def.did) {
(Bound::Unbounded, Bound::Unbounded) => {}
_ => self.require_unsafe(
- "initializing type with `rustc_layout_scalar_valid_range` attr",
- "initializing a layout restricted type's field with a value \
- outside the valid range is undefined behavior",
UnsafetyViolationKind::GeneralAndConstFn,
+ UnsafetyViolationDetails::InitializingTypeWith,
),
}
}
match (cast_in, cast_out) {
(CastTy::Ptr(_) | CastTy::FnPtr, CastTy::Int(_)) => {
self.require_unsafe(
- "cast of pointer to int",
- "casting pointers to integers in constants",
UnsafetyViolationKind::General,
+ UnsafetyViolationDetails::CastOfPointerToInt,
);
}
_ => {}
if context.is_borrow() {
if util::is_disaligned(self.tcx, self.body, self.param_env, *place) {
self.require_unsafe(
- "borrow of packed field",
- "fields of packed structs might be misaligned: dereferencing a \
- misaligned pointer or even just creating a misaligned reference \
- is undefined behavior",
UnsafetyViolationKind::BorrowPacked,
+ UnsafetyViolationDetails::BorrowOfPackedField,
);
}
}
if context.is_borrow() {
if util::is_disaligned(self.tcx, self.body, self.param_env, *place) {
self.require_unsafe(
- "borrow of packed field",
- "fields of packed structs might be misaligned: dereferencing a \
- misaligned pointer or even just creating a misaligned reference \
- is undefined behavior",
UnsafetyViolationKind::BorrowPacked,
+ UnsafetyViolationDetails::BorrowOfPackedField,
);
}
}
if let Some(box LocalInfo::StaticRef { def_id, .. }) = decl.local_info {
if self.tcx.is_mutable_static(def_id) {
self.require_unsafe(
- "use of mutable static",
- "mutable statics can be mutated by multiple threads: aliasing \
- violations or data races will cause undefined behavior",
UnsafetyViolationKind::General,
+ UnsafetyViolationDetails::UseOfMutableStatic,
);
return;
} else if self.tcx.is_foreign_item(def_id) {
self.require_unsafe(
- "use of extern static",
- "extern statics are not controlled by the Rust type system: \
- invalid data, aliasing violations or data races will cause \
- undefined behavior",
UnsafetyViolationKind::General,
+ UnsafetyViolationDetails::UseOfExternStatic,
);
return;
}
let base_ty = Place::ty_from(place.local, proj_base, self.body, self.tcx).ty;
match base_ty.kind {
ty::RawPtr(..) => self.require_unsafe(
- "dereference of raw pointer",
- "raw pointers may be NULL, dangling or unaligned; they can violate \
- aliasing rules and cause data races: all of these are undefined \
- behavior",
UnsafetyViolationKind::General,
+ UnsafetyViolationDetails::DerefOfRawPointer,
),
ty::Adt(adt, _) => {
if adt.is_union() {
self.param_env,
) {
self.require_unsafe(
- "assignment to non-`Copy` union field",
- "the previous content of the field will be dropped, which \
- causes undefined behavior if the field was not properly \
- initialized",
UnsafetyViolationKind::GeneralAndConstFn,
+ UnsafetyViolationDetails::AssignToNonCopyUnionField,
)
} else {
// write to non-move union, safe
}
} else {
self.require_unsafe(
- "access to union field",
- "the field may not be properly initialized: using \
- uninitialized data will cause undefined behavior",
UnsafetyViolationKind::GeneralAndConstFn,
+ UnsafetyViolationDetails::AccessToUnionField,
)
}
}
}
impl<'a, 'tcx> UnsafetyChecker<'a, 'tcx> {
- fn require_unsafe(
- &mut self,
- description: &'static str,
- details: &'static str,
- kind: UnsafetyViolationKind,
- ) {
+ fn require_unsafe(&mut self, kind: UnsafetyViolationKind, details: UnsafetyViolationDetails) {
let source_info = self.source_info;
let lint_root = self.body.source_scopes[self.source_info.scope]
.local_data
.assert_crate_local()
.lint_root;
self.register_violations(
- &[UnsafetyViolation {
- source_info,
- lint_root,
- description: Symbol::intern(description),
- details: Symbol::intern(details),
- kind,
- }],
+ &[UnsafetyViolation { source_info, lint_root, kind, details }],
&[],
);
}
if self.tcx.layout_scalar_valid_range(def.did)
!= (Bound::Unbounded, Bound::Unbounded)
{
- let (description, details) = if is_mut_use {
- (
- "mutation of layout constrained field",
- "mutating layout constrained fields cannot statically be \
- checked for valid values",
- )
+ let details = if is_mut_use {
+ UnsafetyViolationDetails::MutationOfLayoutConstrainedField
// Check `is_freeze` as late as possible to avoid cycle errors
// with opaque types.
.ty
.is_freeze(self.tcx.at(self.source_info.span), self.param_env)
{
- (
- "borrow of layout constrained field with interior \
- mutability",
- "references to fields of layout constrained fields \
- lose the constraints. Coupled with interior mutability, \
- the field can be changed to invalid values",
- )
+ UnsafetyViolationDetails::BorrowOfLayoutConstrainedField
} else {
continue;
};
- self.require_unsafe(
- description,
- details,
- UnsafetyViolationKind::GeneralAndConstFn,
- );
+ self.require_unsafe(UnsafetyViolationKind::GeneralAndConstFn, details);
}
}
}
// Is `callee_features` a subset of `calling_features`?
if !callee_features.iter().all(|feature| self_features.contains(feature)) {
self.require_unsafe(
- "call to function with `#[target_feature]`",
- "can only be called if the required target features are available",
UnsafetyViolationKind::GeneralAndConstFn,
+ UnsafetyViolationDetails::CallToFunctionWith,
)
}
}
let UnsafetyCheckResult { violations, unsafe_blocks } = tcx.unsafety_check_result(def_id);
- for &UnsafetyViolation { source_info, lint_root, description, details, kind } in
- violations.iter()
- {
+ for &UnsafetyViolation { source_info, lint_root, kind, details } in violations.iter() {
+ let (description, note) = details.description_and_note();
+
// Report an error.
let unsafe_fn_msg =
if unsafe_op_in_unsafe_fn_allowed(tcx, lint_root) { " function or" } else { "" };
description,
unsafe_fn_msg,
)
- .span_label(source_info.span, &*description.as_str())
- .note(&details.as_str())
+ .span_label(source_info.span, description)
+ .note(note)
.emit();
}
UnsafetyViolationKind::BorrowPacked => {
"{} is unsafe and requires unsafe{} block (error E0133)",
description, unsafe_fn_msg,
))
- .note(&details.as_str())
+ .note(note)
.emit()
},
)
"{} is unsafe and requires unsafe block (error E0133)",
description,
))
- .span_label(source_info.span, &*description.as_str())
- .note(&details.as_str())
+ .span_label(source_info.span, description)
+ .note(note)
.emit();
},
),
"{} is unsafe and requires unsafe block (error E0133)",
description,
))
- .span_label(source_info.span, &*description.as_str())
- .note(&details.as_str())
+ .span_label(source_info.span, description)
+ .note(note)
.emit();
})
}
lint: &'static lint::Lint,
source_info: SourceInfo,
message: &'static str,
- panic: AssertKind<ConstInt>,
+ panic: AssertKind<impl std::fmt::Debug>,
) -> Option<()> {
let lint_root = self.lint_root(source_info)?;
self.tcx.struct_span_lint_hir(lint, lint_root, source_info.span, |lint| {
let expected = ScalarMaybeUninit::from(Scalar::from_bool(*expected));
let value_const = self.ecx.read_scalar(value).unwrap();
if expected != value_const {
+ enum DbgVal<T> {
+ Val(T),
+ Underscore,
+ }
+ impl<T: std::fmt::Debug> std::fmt::Debug for DbgVal<T> {
+ fn fmt(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::Val(val) => val.fmt(fmt),
+ Self::Underscore => fmt.write_str("_"),
+ }
+ }
+ }
let mut eval_to_int = |op| {
- let op = self
- .eval_operand(op, source_info)
- .expect("if we got here, it must be const");
- self.ecx.read_immediate(op).unwrap().to_const_int()
+ // This can be `None` if the lhs wasn't const propagated and we just
+ // triggered the assert on the value of the rhs.
+ match self.eval_operand(op, source_info) {
+ Some(op) => {
+ DbgVal::Val(self.ecx.read_immediate(op).unwrap().to_const_int())
+ }
+ None => DbgVal::Underscore,
+ }
};
let msg = match msg {
AssertKind::DivisionByZero(op) => {
local_conflicts: BitMatrix::from_row_n(&ineligible_locals, body.local_decls.len()),
};
- // Visit only reachable basic blocks. The exact order is not important.
- let reachable_blocks = traversal::preorder(body).map(|(bb, _)| bb);
- requires_storage.visit_with(body, reachable_blocks, &mut visitor);
+ requires_storage.visit_reachable_with(body, &mut visitor);
let local_conflicts = visitor.local_conflicts;
// represents a single function. Validate and/or correct if inlining (which should be disabled
// if -Zinstrument-coverage is enabled) and/or monomorphization invalidates these assumptions.
let count_code_region_fn = tcx.require_lang_item(lang_items::CountCodeRegionFnLangItem, None);
+ let coverage_counter_add_fn =
+ tcx.require_lang_item(lang_items::CoverageCounterAddFnLangItem, None);
+ let coverage_counter_subtract_fn =
+ tcx.require_lang_item(lang_items::CoverageCounterSubtractFnLangItem, None);
// The `num_counters` argument to `llvm.instrprof.increment` is the number of injected
// counters, with each counter having an index from `0..num_counters-1`. MIR optimization
// may split and duplicate some BasicBlock sequences. Simply counting the calls may not
// not work; but computing the num_counters by adding `1` to the highest index (for a given
// instrumented function) is valid.
+ //
+ // `num_expressions` is the number of counter expressions added to the MIR body. Both
+ // `num_counters` and `num_expressions` are used to initialize new vectors, during backend
+ // code generate, to lookup counters and expressions by their simple u32 indexes.
let mut num_counters: u32 = 0;
- for terminator in traversal::preorder(mir_body)
- .map(|(_, data)| (data, count_code_region_fn))
- .filter_map(terminators_that_call_given_fn)
+ let mut num_expressions: u32 = 0;
+ for terminator in
+ traversal::preorder(mir_body).map(|(_, data)| data).filter_map(call_terminators)
{
- if let TerminatorKind::Call { args, .. } = &terminator.kind {
- let index_arg = args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
- let index =
- mir::Operand::scalar_from_const(index_arg).to_u32().expect("index arg is u32");
- num_counters = std::cmp::max(num_counters, index + 1);
- }
- }
- let hash = if num_counters > 0 { hash_mir_source(tcx, mir_def_id) } else { 0 };
- CoverageInfo { num_counters, hash }
-}
-
-fn terminators_that_call_given_fn(
- (data, fn_def_id): (&'tcx BasicBlockData<'tcx>, DefId),
-) -> Option<&'tcx Terminator<'tcx>> {
- if let Some(terminator) = &data.terminator {
- if let TerminatorKind::Call { func: Operand::Constant(func), .. } = &terminator.kind {
- if let FnDef(called_fn_def_id, _) = func.literal.ty.kind {
- if called_fn_def_id == fn_def_id {
- return Some(&terminator);
+ if let TerminatorKind::Call { func: Operand::Constant(func), args, .. } = &terminator.kind {
+ match func.literal.ty.kind {
+ FnDef(id, _) if id == count_code_region_fn => {
+ let index_arg =
+ args.get(count_code_region_args::COUNTER_INDEX).expect("arg found");
+ let counter_index = mir::Operand::scalar_from_const(index_arg)
+ .to_u32()
+ .expect("index arg is u32");
+ num_counters = std::cmp::max(num_counters, counter_index + 1);
+ }
+ FnDef(id, _)
+ if id == coverage_counter_add_fn || id == coverage_counter_subtract_fn =>
+ {
+ let index_arg = args
+ .get(coverage_counter_expression_args::COUNTER_EXPRESSION_INDEX)
+ .expect("arg found");
+ let translated_index = mir::Operand::scalar_from_const(index_arg)
+ .to_u32()
+ .expect("index arg is u32");
+ // Counter expressions start with "translated indexes", descending from
+ // `u32::MAX`, so the range of expression indexes is disjoint from the range of
+ // counter indexes. This way, both counters and expressions can be operands in
+ // other expressions.
+ let expression_index = u32::MAX - translated_index;
+ num_expressions = std::cmp::max(num_expressions, expression_index + 1);
}
+ _ => {}
}
}
}
- None
+ CoverageInfo { num_counters, num_expressions }
}
-struct Instrumentor<'tcx> {
- tcx: TyCtxt<'tcx>,
- num_counters: u32,
+fn call_terminators(data: &'tcx BasicBlockData<'tcx>) -> Option<&'tcx Terminator<'tcx>> {
+ let terminator = data.terminator();
+ match terminator.kind {
+ TerminatorKind::Call { .. } => Some(terminator),
+ _ => None,
+ }
}
impl<'tcx> MirPass<'tcx> for InstrumentCoverage {
// If the InstrumentCoverage pass is called on promoted MIRs, skip them.
// See: https://github.com/rust-lang/rust/pull/73011#discussion_r438317601
if src.promoted.is_none() {
- debug!(
- "instrumenting {:?}, span: {}",
- src.def_id(),
- tcx.sess.source_map().span_to_string(mir_body.span)
- );
- Instrumentor::new(tcx).inject_counters(mir_body);
+ Instrumentor::new(tcx, src, mir_body).inject_counters();
}
}
}
}
-impl<'tcx> Instrumentor<'tcx> {
- fn new(tcx: TyCtxt<'tcx>) -> Self {
- Self { tcx, num_counters: 0 }
+/// Distinguishes the expression operators.
+enum Op {
+ Add,
+ Subtract,
+}
+
+struct Instrumentor<'a, 'tcx> {
+ tcx: TyCtxt<'tcx>,
+ mir_def_id: DefId,
+ mir_body: &'a mut mir::Body<'tcx>,
+ hir_body: &'tcx rustc_hir::Body<'tcx>,
+ function_source_hash: Option<u64>,
+ num_counters: u32,
+ num_expressions: u32,
+}
+
+impl<'a, 'tcx> Instrumentor<'a, 'tcx> {
+ fn new(tcx: TyCtxt<'tcx>, src: MirSource<'tcx>, mir_body: &'a mut mir::Body<'tcx>) -> Self {
+ let mir_def_id = src.def_id();
+ let hir_body = hir_body(tcx, mir_def_id);
+ Self {
+ tcx,
+ mir_def_id,
+ mir_body,
+ hir_body,
+ function_source_hash: None,
+ num_counters: 0,
+ num_expressions: 0,
+ }
}
+ /// Counter IDs start from zero and go up.
fn next_counter(&mut self) -> u32 {
+ assert!(self.num_counters < u32::MAX - self.num_expressions);
let next = self.num_counters;
self.num_counters += 1;
next
}
- fn inject_counters(&mut self, mir_body: &mut mir::Body<'tcx>) {
+ /// Expression IDs start from u32::MAX and go down because a CounterExpression can reference
+ /// (add or subtract counts) of both Counter regions and CounterExpression regions. The indexes
+ /// of each type of region must be contiguous, but also must be unique across both sets.
+ /// The expression IDs are eventually translated into region indexes (starting after the last
+ /// counter index, for the given function), during backend code generation, by the helper method
+ /// `rustc_codegen_ssa::coverageinfo::map::FunctionCoverage::translate_expressions()`.
+ fn next_expression(&mut self) -> u32 {
+ assert!(self.num_counters < u32::MAX - self.num_expressions);
+ let next = u32::MAX - self.num_expressions;
+ self.num_expressions += 1;
+ next
+ }
+
+ fn function_source_hash(&mut self) -> u64 {
+ match self.function_source_hash {
+ Some(hash) => hash,
+ None => {
+ let hash = hash_mir_source(self.tcx, self.hir_body);
+ self.function_source_hash.replace(hash);
+ hash
+ }
+ }
+ }
+
+ fn inject_counters(&mut self) {
+ let body_span = self.hir_body.value.span;
+ debug!(
+ "instrumenting {:?}, span: {}",
+ self.mir_def_id,
+ self.tcx.sess.source_map().span_to_string(body_span)
+ );
+
// FIXME(richkadel): As a first step, counters are only injected at the top of each
// function. The complete solution will inject counters at each conditional code branch.
- let code_region = mir_body.span;
let next_block = START_BLOCK;
- self.inject_counter(mir_body, code_region, next_block);
+ self.inject_counter(body_span, next_block);
+
+ // FIXME(richkadel): The next step to implement source based coverage analysis will be
+ // instrumenting branches within functions, and some regions will be counted by "counter
+ // expression". The function to inject counter expression is implemented. Replace this
+ // "fake use" with real use.
+ let fake_use = false;
+ if fake_use {
+ let add = false;
+ if add {
+ self.inject_counter_expression(body_span, next_block, 1, Op::Add, 2);
+ } else {
+ self.inject_counter_expression(body_span, next_block, 1, Op::Subtract, 2);
+ }
+ }
}
- fn inject_counter(
- &mut self,
- mir_body: &mut mir::Body<'tcx>,
- code_region: Span,
- next_block: BasicBlock,
- ) {
+ fn inject_counter(&mut self, code_region: Span, next_block: BasicBlock) -> u32 {
+ let counter_id = self.next_counter();
+ let function_source_hash = self.function_source_hash();
let injection_point = code_region.shrink_to_lo();
let count_code_region_fn = function_handle(
injection_point,
);
- let index = self.next_counter();
-
let mut args = Vec::new();
use count_code_region_args::*;
+ debug_assert_eq!(FUNCTION_SOURCE_HASH, args.len());
+ args.push(self.const_u64(function_source_hash, injection_point));
+
debug_assert_eq!(COUNTER_INDEX, args.len());
- args.push(self.const_u32(index, injection_point));
+ args.push(self.const_u32(counter_id, injection_point));
debug_assert_eq!(START_BYTE_POS, args.len());
args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
debug_assert_eq!(END_BYTE_POS, args.len());
args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
- let mut patch = MirPatch::new(mir_body);
+ self.inject_call(count_code_region_fn, args, injection_point, next_block);
- let temp = patch.new_temp(self.tcx.mk_unit(), code_region);
- let new_block = patch.new_block(placeholder_block(code_region));
+ counter_id
+ }
+
+ fn inject_counter_expression(
+ &mut self,
+ code_region: Span,
+ next_block: BasicBlock,
+ lhs: u32,
+ op: Op,
+ rhs: u32,
+ ) -> u32 {
+ let expression_id = self.next_expression();
+ let injection_point = code_region.shrink_to_lo();
+
+ let count_code_region_fn = function_handle(
+ self.tcx,
+ self.tcx.require_lang_item(
+ match op {
+ Op::Add => lang_items::CoverageCounterAddFnLangItem,
+ Op::Subtract => lang_items::CoverageCounterSubtractFnLangItem,
+ },
+ None,
+ ),
+ injection_point,
+ );
+
+ let mut args = Vec::new();
+
+ use coverage_counter_expression_args::*;
+ debug_assert_eq!(COUNTER_EXPRESSION_INDEX, args.len());
+ args.push(self.const_u32(expression_id, injection_point));
+
+ debug_assert_eq!(LEFT_INDEX, args.len());
+ args.push(self.const_u32(lhs, injection_point));
+
+ debug_assert_eq!(RIGHT_INDEX, args.len());
+ args.push(self.const_u32(rhs, injection_point));
+
+ debug_assert_eq!(START_BYTE_POS, args.len());
+ args.push(self.const_u32(code_region.lo().to_u32(), injection_point));
+
+ debug_assert_eq!(END_BYTE_POS, args.len());
+ args.push(self.const_u32(code_region.hi().to_u32(), injection_point));
+
+ self.inject_call(count_code_region_fn, args, injection_point, next_block);
+
+ expression_id
+ }
+
+ fn inject_call(
+ &mut self,
+ func: Operand<'tcx>,
+ args: Vec<Operand<'tcx>>,
+ fn_span: Span,
+ next_block: BasicBlock,
+ ) {
+ let mut patch = MirPatch::new(self.mir_body);
+
+ let temp = patch.new_temp(self.tcx.mk_unit(), fn_span);
+ let new_block = patch.new_block(placeholder_block(fn_span));
patch.patch_terminator(
new_block,
TerminatorKind::Call {
- func: count_code_region_fn,
+ func,
args,
// new_block will swapped with the next_block, after applying patch
destination: Some((Place::from(temp), new_block)),
cleanup: None,
from_hir_call: false,
- fn_span: injection_point,
+ fn_span,
},
);
patch.add_statement(new_block.start_location(), StatementKind::StorageLive(temp));
patch.add_statement(next_block.start_location(), StatementKind::StorageDead(temp));
- patch.apply(mir_body);
+ patch.apply(self.mir_body);
// To insert the `new_block` in front of the first block in the counted branch (the
// `next_block`), just swap the indexes, leaving the rest of the graph unchanged.
- mir_body.basic_blocks_mut().swap(next_block, new_block);
+ self.mir_body.basic_blocks_mut().swap(next_block, new_block);
}
fn const_u32(&self, value: u32, span: Span) -> Operand<'tcx> {
Operand::const_from_scalar(self.tcx, self.tcx.types.u32, Scalar::from_u32(value), span)
}
+
+ fn const_u64(&self, value: u64, span: Span) -> Operand<'tcx> {
+ Operand::const_from_scalar(self.tcx, self.tcx.types.u64, Scalar::from_u64(value), span)
+ }
}
fn function_handle<'tcx>(tcx: TyCtxt<'tcx>, fn_def_id: DefId, span: Span) -> Operand<'tcx> {
}
}
-fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> u64 {
+fn hir_body<'tcx>(tcx: TyCtxt<'tcx>, def_id: DefId) -> &'tcx rustc_hir::Body<'tcx> {
let hir_node = tcx.hir().get_if_local(def_id).expect("DefId is local");
let fn_body_id = hir::map::associated_body(hir_node).expect("HIR node is a function with body");
- let hir_body = tcx.hir().body(fn_body_id);
+ tcx.hir().body(fn_body_id)
+}
+
+fn hash_mir_source<'tcx>(tcx: TyCtxt<'tcx>, hir_body: &'tcx rustc_hir::Body<'tcx>) -> u64 {
let mut hcx = tcx.create_no_span_stable_hashing_context();
hash(&mut hcx, &hir_body.value).to_smaller_hash()
}
let ptr_ty = ptr.ty;
// Create an *internal* temp for the pointer, so that unsafety
// checking won't complain about the raw pointer assignment.
- let ptr_temp = this.local_decls.push(LocalDecl::with_source_info(
- ptr_ty,
- source_info,
- ).internal());
+ let ptr_temp = this
+ .local_decls
+ .push(LocalDecl::with_source_info(ptr_ty, source_info).internal());
let ptr_temp = Place::from(ptr_temp);
let block = unpack!(this.into(ptr_temp, block, ptr));
this.into(this.hir.tcx().mk_place_deref(ptr_temp), block, val)
Some((destination, success))
},
from_hir_call,
- fn_span
+ fn_span,
},
);
success.unit()
// These cases don't actually need a destination
ExprKind::Assign { .. }
| ExprKind::AssignOp { .. }
- | ExprKind::Continue { .. }
- | ExprKind::Break { .. }
- | ExprKind::LlvmInlineAsm { .. }
- | ExprKind::Return { .. } => {
+ | ExprKind::LlvmInlineAsm { .. } => {
unpack!(block = this.stmt_expr(block, expr, None));
this.cfg.push_assign_unit(block, source_info, destination, this.hir.tcx());
block.unit()
}
+ ExprKind::Continue { .. } | ExprKind::Break { .. } | ExprKind::Return { .. } => {
+ unpack!(block = this.stmt_expr(block, expr, None));
+ // No assign, as these have type `!`.
+ block.unit()
+ }
+
// Avoid creating a temporary
ExprKind::VarRef { .. }
| ExprKind::SelfRef
use super::lints;
-crate fn mir_built<'tcx>(tcx: TyCtxt<'tcx>, def: ty::WithOptConstParam<LocalDefId>) -> &'tcx ty::steal::Steal<Body<'tcx>> {
+crate fn mir_built<'tcx>(
+ tcx: TyCtxt<'tcx>,
+ def: ty::WithOptConstParam<LocalDefId>,
+) -> &'tcx ty::steal::Steal<Body<'tcx>> {
if def.const_param_did.is_none() {
if let const_param_did @ Some(_) = tcx.opt_const_param_of(def.did) {
return tcx.mir_built(ty::WithOptConstParam { const_param_did, ..def });
tcx.infer_ctxt().enter(|infcx| {
let cx = Cx::new(&infcx, def, id);
- let body = if let Some(ErrorReported) = cx.tables().tainted_by_errors {
+ let body = if let Some(ErrorReported) = cx.typeck_results().tainted_by_errors {
build::construct_error(cx, body_id)
} else if cx.body_owner_kind.is_fn_or_closure() {
// fetch the fully liberated fn signature (that is, all bound
// types/lifetimes replaced)
- let fn_sig = cx.tables().liberated_fn_sigs()[id];
+ let fn_sig = cx.typeck_results().liberated_fn_sigs()[id];
let fn_def_id = tcx.hir().local_def_id(id);
let safety = match fn_sig.unsafety {
vec![ArgInfo(liberated_closure_env_ty(tcx, id, body_id), None, None, None)]
}
ty::Generator(..) => {
- let gen_ty = tcx.body_tables(body_id).node_type(id);
+ let gen_ty = tcx.typeck_body(body_id).node_type(id);
// The resume argument may be missing, in that case we need to provide it here.
// It will always be `()` in this case.
let arguments = implicit_argument.into_iter().chain(explicit_arguments);
let (yield_ty, return_ty) = if body.generator_kind.is_some() {
- let gen_ty = tcx.body_tables(body_id).node_type(id);
+ let gen_ty = tcx.typeck_body(body_id).node_type(id);
let gen_sig = match gen_ty.kind {
ty::Generator(_, gen_substs, ..) => gen_substs.as_generator().sig(),
_ => span_bug!(tcx.hir().span(id), "generator w/o generator type: {:?}", ty),
// place to be the type of the constant because NLL typeck will
// equate them.
- let return_ty = cx.tables().node_type(id);
+ let return_ty = cx.typeck_results().node_type(id);
build::construct_const(cx, body_id, return_ty, return_ty_span)
};
closure_expr_id: hir::HirId,
body_id: hir::BodyId,
) -> Ty<'_> {
- let closure_ty = tcx.body_tables(body_id).node_type(closure_expr_id);
+ let closure_ty = tcx.typeck_body(body_id).node_type(closure_expr_id);
let (closure_def_id, closure_substs) = match closure_ty.kind {
ty::Closure(closure_def_id, closure_substs) => (closure_def_id, closure_substs),
let tcx = self.hir.tcx();
let tcx_hir = tcx.hir();
- let hir_tables = self.hir.tables();
+ let hir_typeck_results = self.hir.typeck_results();
// In analyze_closure() in upvar.rs we gathered a list of upvars used by a
- // indexed closure and we stored in a map called closure_captures in TypeckTables
+ // indexed closure and we stored in a map called closure_captures in TypeckResults
// with the closure's DefId. Here, we run through that vec of UpvarIds for
// the given closure and use the necessary information to create upvar
// debuginfo and to fill `self.upvar_mutbls`.
- if let Some(upvars) = hir_tables.closure_captures.get(&fn_def_id) {
+ if let Some(upvars) = hir_typeck_results.closure_captures.get(&fn_def_id) {
let closure_env_arg = Local::new(1);
let mut closure_env_projs = vec![];
let mut closure_ty = self.local_decls[closure_env_arg].ty;
self.upvar_mutbls = upvars_with_tys
.enumerate()
.map(|(i, ((&var_id, &upvar_id), ty))| {
- let capture = hir_tables.upvar_capture(upvar_id);
+ let capture = hir_typeck_results.upvar_capture(upvar_id);
let mut mutability = Mutability::Not;
let mut name = kw::Invalid;
if let Some(Node::Binding(pat)) = tcx_hir.find(var_id) {
if let hir::PatKind::Binding(_, _, ident, _) = pat.kind {
name = ident.name;
- match hir_tables.extract_binding_mode(tcx.sess, pat.hir_id, pat.span) {
+ match hir_typeck_results
+ .extract_binding_mode(tcx.sess, pat.hir_id, pat.span)
+ {
Some(ty::BindByValue(hir::Mutability::Mut)) => {
mutability = Mutability::Mut;
}
let mut pattern = cx.pattern_from_hir(&local.pat);
if let Some(ty) = &local.ty {
- if let Some(&user_ty) = cx.tables.user_provided_types().get(ty.hir_id) {
+ if let Some(&user_ty) = cx.typeck_results.user_provided_types().get(ty.hir_id) {
debug!("mirror_stmts: user_ty={:?}", user_ty);
pattern = Pat {
ty: pattern.ty,
cx: &mut Cx<'a, 'tcx>,
block: &'tcx hir::Block<'tcx>,
) -> ExprRef<'tcx> {
- let block_ty = cx.tables().node_type(block.hir_id);
+ let block_ty = cx.typeck_results().node_type(block.hir_id);
let temp_lifetime = cx.region_scope_tree.temporary_scope(block.hir_id.local_id);
let expr = Expr {
ty: block_ty,
let mut expr = make_mirror_unadjusted(cx, self);
// Now apply adjustments, if any.
- for adjustment in cx.tables().expr_adjustments(self) {
+ for adjustment in cx.typeck_results().expr_adjustments(self) {
debug!("make_mirror: expr={:?} applying adjustment={:?}", expr, adjustment);
expr = apply_adjustment(cx, self, expr, adjustment);
}
cx: &mut Cx<'a, 'tcx>,
expr: &'tcx hir::Expr<'tcx>,
) -> Expr<'tcx> {
- let expr_ty = cx.tables().expr_ty(expr);
+ let expr_ty = cx.typeck_results().expr_ty(expr);
let temp_lifetime = cx.region_scope_tree.temporary_scope(expr.hir_id.local_id);
let kind = match expr.kind {
}
hir::ExprKind::Call(ref fun, ref args) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
// The callee is something implementing Fn, FnMut, or FnOnce.
// Find the actual method implementation being called and
// build the appropriate UFCS call expression with the
let method = method_callee(cx, expr, fun.span, None);
- let arg_tys = args.iter().map(|e| cx.tables().expr_ty_adjusted(e));
+ let arg_tys = args.iter().map(|e| cx.typeck_results().expr_ty_adjusted(e));
let tupled_args = Expr {
ty: cx.tcx.mk_tup(arg_tys),
temp_lifetime,
None
};
if let Some((adt_def, index)) = adt_data {
- let substs = cx.tables().node_substs(fun.hir_id);
- let user_provided_types = cx.tables().user_provided_types();
+ let substs = cx.typeck_results().node_substs(fun.hir_id);
+ let user_provided_types = cx.typeck_results().user_provided_types();
let user_ty = user_provided_types.get(fun.hir_id).copied().map(|mut u_ty| {
if let UserType::TypeOf(ref mut did, _) = &mut u_ty.value {
*did = adt_def.did;
}
} else {
ExprKind::Call {
- ty: cx.tables().node_type(fun.hir_id),
+ ty: cx.typeck_results().node_type(fun.hir_id),
fun: fun.to_ref(),
args: args.to_ref(),
from_hir_call: true,
}
hir::ExprKind::AssignOp(op, ref lhs, ref rhs) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
overloaded_operator(cx, expr, vec![lhs.to_ref(), rhs.to_ref()])
} else {
ExprKind::AssignOp { op: bin_op(op.node), lhs: lhs.to_ref(), rhs: rhs.to_ref() }
},
hir::ExprKind::Binary(op, ref lhs, ref rhs) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
overloaded_operator(cx, expr, vec![lhs.to_ref(), rhs.to_ref()])
} else {
// FIXME overflow
}
hir::ExprKind::Index(ref lhs, ref index) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
overloaded_place(cx, expr, expr_ty, None, vec![lhs.to_ref(), index.to_ref()])
} else {
ExprKind::Index { lhs: lhs.to_ref(), index: index.to_ref() }
}
hir::ExprKind::Unary(hir::UnOp::UnDeref, ref arg) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
overloaded_place(cx, expr, expr_ty, None, vec![arg.to_ref()])
} else {
ExprKind::Deref { arg: arg.to_ref() }
}
hir::ExprKind::Unary(hir::UnOp::UnNot, ref arg) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
overloaded_operator(cx, expr, vec![arg.to_ref()])
} else {
ExprKind::Unary { op: UnOp::Not, arg: arg.to_ref() }
}
hir::ExprKind::Unary(hir::UnOp::UnNeg, ref arg) => {
- if cx.tables().is_method_call(expr) {
+ if cx.typeck_results().is_method_call(expr) {
overloaded_operator(cx, expr, vec![arg.to_ref()])
} else {
if let hir::ExprKind::Lit(ref lit) = arg.kind {
hir::ExprKind::Struct(ref qpath, ref fields, ref base) => match expr_ty.kind {
ty::Adt(adt, substs) => match adt.adt_kind() {
AdtKind::Struct | AdtKind::Union => {
- let user_provided_types = cx.tables().user_provided_types();
+ let user_provided_types = cx.typeck_results().user_provided_types();
let user_ty = user_provided_types.get(expr.hir_id).copied();
debug!("make_mirror_unadjusted: (struct/union) user_ty={:?}", user_ty);
ExprKind::Adt {
fields: field_refs(cx, fields),
base: base.as_ref().map(|base| FruInfo {
base: base.to_ref(),
- field_types: cx.tables().fru_field_types()[expr.hir_id].clone(),
+ field_types: cx.typeck_results().fru_field_types()[expr.hir_id].clone(),
}),
}
}
AdtKind::Enum => {
- let res = cx.tables().qpath_res(qpath, expr.hir_id);
+ let res = cx.typeck_results().qpath_res(qpath, expr.hir_id);
match res {
Res::Def(DefKind::Variant, variant_id) => {
assert!(base.is_none());
let index = adt.variant_index_with_id(variant_id);
- let user_provided_types = cx.tables().user_provided_types();
+ let user_provided_types = cx.typeck_results().user_provided_types();
let user_ty = user_provided_types.get(expr.hir_id).copied();
debug!("make_mirror_unadjusted: (variant) user_ty={:?}", user_ty);
ExprKind::Adt {
},
hir::ExprKind::Closure(..) => {
- let closure_ty = cx.tables().expr_ty(expr);
+ let closure_ty = cx.typeck_results().expr_ty(expr);
let (def_id, substs, movability) = match closure_ty.kind {
ty::Closure(def_id, substs) => (def_id, UpvarSubsts::Closure(substs), None),
ty::Generator(def_id, substs, movability) => {
}
hir::ExprKind::Path(ref qpath) => {
- let res = cx.tables().qpath_res(qpath, expr.hir_id);
+ let res = cx.typeck_results().qpath_res(qpath, expr.hir_id);
convert_path_expr(cx, expr, res)
}
};
let temp_lifetime =
cx.region_scope_tree.temporary_scope(expr.hir_id.local_id);
- let res = cx.tables().qpath_res(qpath, expr.hir_id);
+ let res = cx.typeck_results().qpath_res(qpath, expr.hir_id);
let ty;
match res {
Res::Def(DefKind::Fn, _) | Res::Def(DefKind::AssocFn, _) => {
- ty = cx.tables().node_type(expr.hir_id);
+ ty = cx.typeck_results().node_type(expr.hir_id);
let user_ty = user_substs_applied_to_res(cx, expr.hir_id, res);
InlineAsmOperand::SymFn {
expr: Expr {
}
hir::ExprKind::Field(ref source, ..) => ExprKind::Field {
lhs: source.to_ref(),
- name: Field::new(cx.tcx.field_index(expr.hir_id, cx.tables)),
+ name: Field::new(cx.tcx.field_index(expr.hir_id, cx.typeck_results)),
},
hir::ExprKind::Cast(ref source, ref cast_ty) => {
// Check for a user-given type annotation on this `cast`
- let user_provided_types = cx.tables.user_provided_types();
+ let user_provided_types = cx.typeck_results.user_provided_types();
let user_ty = user_provided_types.get(cast_ty.hir_id);
debug!(
// Check to see if this cast is a "coercion cast", where the cast is actually done
// using a coercion (or is a no-op).
- let cast = if cx.tables().is_coercion_cast(source.hir_id) {
+ let cast = if cx.typeck_results().is_coercion_cast(source.hir_id) {
// Convert the lexpr to a vexpr.
ExprKind::Use { source: source.to_ref() }
- } else if cx.tables().expr_ty(source).is_region_ptr() {
+ } else if cx.typeck_results().expr_ty(source).is_region_ptr() {
// Special cased so that we can type check that the element
// type of the source matches the pointed to type of the
// destination.
// The correct solution would be to add symbolic computations to miri,
// so we wouldn't have to compute and store the actual value
let var = if let hir::ExprKind::Path(ref qpath) = source.kind {
- let res = cx.tables().qpath_res(qpath, source.hir_id);
- cx.tables().node_type(source.hir_id).ty_adt_def().and_then(
- |adt_def| match res {
+ let res = cx.typeck_results().qpath_res(qpath, source.hir_id);
+ cx.typeck_results().node_type(source.hir_id).ty_adt_def().and_then(|adt_def| {
+ match res {
Res::Def(
DefKind::Ctor(CtorOf::Variant, CtorKind::Const),
variant_ctor_id,
Some((d, o, ty))
}
_ => None,
- },
- )
+ }
+ })
} else {
None
};
}
}
hir::ExprKind::Type(ref source, ref ty) => {
- let user_provided_types = cx.tables.user_provided_types();
+ let user_provided_types = cx.typeck_results.user_provided_types();
let user_ty = user_provided_types.get(ty.hir_id).copied();
debug!("make_mirror_unadjusted: (type) user_ty={:?}", user_ty);
if source.is_syntactic_place_expr() {
| Res::Def(DefKind::Ctor(_, CtorKind::Fn), _)
| Res::Def(DefKind::Const, _)
| Res::Def(DefKind::AssocConst, _) => {
- cx.tables().user_provided_types().get(hir_id).copied()
+ cx.typeck_results().user_provided_types().get(hir_id).copied()
}
// A unit struct/variant which is used as a value (e.g.,
Some((def_id, substs)) => (def_id, substs, None),
None => {
let (kind, def_id) = cx
- .tables()
+ .typeck_results()
.type_dependent_def(expr.hir_id)
.unwrap_or_else(|| span_bug!(expr.span, "no type-dependent def for method callee"));
let user_ty = user_substs_applied_to_res(cx, expr.hir_id, Res::Def(kind, def_id));
debug!("method_callee: user_ty={:?}", user_ty);
- (def_id, cx.tables().node_substs(expr.hir_id), user_ty)
+ (def_id, cx.typeck_results().node_substs(expr.hir_id), user_ty)
}
};
let ty = cx.tcx().mk_fn_def(def_id, substs);
expr: &'tcx hir::Expr<'tcx>,
res: Res,
) -> ExprKind<'tcx> {
- let substs = cx.tables().node_substs(expr.hir_id);
+ let substs = cx.typeck_results().node_substs(expr.hir_id);
match res {
// A regular function, constructor function or a constant.
Res::Def(DefKind::Fn, _)
let user_ty = user_substs_applied_to_res(cx, expr.hir_id, res);
debug!("convert_path_expr: user_ty={:?}", user_ty);
ExprKind::Literal {
- literal: ty::Const::zero_sized(cx.tcx, cx.tables().node_type(expr.hir_id)),
+ literal: ty::Const::zero_sized(cx.tcx, cx.typeck_results().node_type(expr.hir_id)),
user_ty,
}
}
let name = cx.tcx.hir().name(hir_id);
let val = ty::ConstKind::Param(ty::ParamConst::new(index, name));
ExprKind::Literal {
- literal: cx.tcx.mk_const(ty::Const { val, ty: cx.tables().node_type(expr.hir_id) }),
+ literal: cx
+ .tcx
+ .mk_const(ty::Const { val, ty: cx.typeck_results().node_type(expr.hir_id) }),
user_ty: None,
}
}
substs,
None,
),
- ty: cx.tables().node_type(expr.hir_id),
+ ty: cx.typeck_results().node_type(expr.hir_id),
}),
user_ty,
}
}
Res::Def(DefKind::Ctor(_, CtorKind::Const), def_id) => {
- let user_provided_types = cx.tables.user_provided_types();
+ let user_provided_types = cx.typeck_results.user_provided_types();
let user_provided_type = user_provided_types.get(expr.hir_id).copied();
debug!("convert_path_expr: user_provided_type={:?}", user_provided_type);
- let ty = cx.tables().node_type(expr.hir_id);
+ let ty = cx.typeck_results().node_type(expr.hir_id);
match ty.kind {
// A unit struct/variant which is used as a value.
// We return a completely different ExprKind here to account for this special case.
var_hir_id: hir::HirId,
) -> ExprKind<'tcx> {
let upvar_index = cx
- .tables()
+ .typeck_results()
.closure_captures
.get(&cx.body_owner)
.and_then(|upvars| upvars.get_full(&var_hir_id).map(|(i, _, _)| i));
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: closure_def_id.expect_local(),
};
- let var_ty = cx.tables().node_type(var_hir_id);
+ let var_ty = cx.typeck_results().node_type(var_hir_id);
// FIXME free regions in closures are not right
let closure_ty = cx
- .tables()
+ .typeck_results()
.node_type(cx.tcx.hir().local_def_id_to_hir_id(upvar_id.closure_expr_id));
// FIXME we're just hard-coding the idea that the
// ...but the upvar might be an `&T` or `&mut T` capture, at which
// point we need an implicit deref
- match cx.tables().upvar_capture(upvar_id) {
+ match cx.typeck_results().upvar_capture(upvar_id) {
ty::UpvarCapture::ByValue => field_kind,
ty::UpvarCapture::ByRef(borrow) => ExprKind::Deref {
arg: Expr {
// line up (this is because `*x` and `x[y]` represent places):
let recv_ty = match args[0] {
- ExprRef::Hair(e) => cx.tables().expr_ty_adjusted(e),
+ ExprRef::Hair(e) => cx.typeck_results().expr_ty_adjusted(e),
ExprRef::Mirror(ref e) => e.ty,
};
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: cx.tcx.hir().local_def_id(closure_expr.hir_id),
};
- let upvar_capture = cx.tables().upvar_capture(upvar_id);
+ let upvar_capture = cx.typeck_results().upvar_capture(upvar_id);
let temp_lifetime = cx.region_scope_tree.temporary_scope(closure_expr.hir_id.local_id);
- let var_ty = cx.tables().node_type(var_hir_id);
+ let var_ty = cx.typeck_results().node_type(var_hir_id);
let captured_var = Expr {
temp_lifetime,
ty: var_ty,
fields
.iter()
.map(|field| FieldExprRef {
- name: Field::new(cx.tcx.field_index(field.hir_id, cx.tables)),
+ name: Field::new(cx.tcx.field_index(field.hir_id, cx.typeck_results)),
expr: field.expr.to_ref(),
})
.collect()
crate identity_substs: &'tcx InternalSubsts<'tcx>,
crate region_scope_tree: &'tcx region::ScopeTree,
- crate tables: &'a ty::TypeckTables<'tcx>,
+ crate typeck_results: &'a ty::TypeckResults<'tcx>,
/// This is `Constness::Const` if we are compiling a `static`,
/// `const`, or the body of a `const fn`.
src_id: hir::HirId,
) -> Cx<'a, 'tcx> {
let tcx = infcx.tcx;
- let tables = tcx.typeck_tables_of_opt_const_arg(def);
+ let typeck_results = tcx.typeck_opt_const_arg(def);
let body_owner_kind = tcx.hir().body_owner_kind(src_id);
let constness = match body_owner_kind {
param_env: tcx.param_env(def.did),
identity_substs: InternalSubsts::identity_for_item(tcx, def.did.to_def_id()),
region_scope_tree: tcx.region_scope_tree(def.did),
- tables,
+ typeck_results,
constness,
body_owner: def.did.to_def_id(),
body_owner_kind,
Node::Pat(p) | Node::Binding(p) => p,
node => bug!("pattern became {:?}", node),
};
- Pat::from_hir(self.tcx, self.param_env, self.tables(), p)
+ Pat::from_hir(self.tcx, self.param_env, self.typeck_results(), p)
}
crate fn trait_method(
self.tcx
}
- crate fn tables(&self) -> &'a ty::TypeckTables<'tcx> {
- self.tables
+ crate fn typeck_results(&self) -> &'a ty::TypeckResults<'tcx> {
+ self.typeck_results
}
crate fn check_overflow(&self) -> bool {
self.tcx()
}
- fn tables(&self) -> &ty::TypeckTables<'tcx> {
- self.tables()
+ fn typeck_results(&self) -> &ty::TypeckResults<'tcx> {
+ self.typeck_results()
}
}
let mut visitor = MatchVisitor {
tcx,
- tables: tcx.body_tables(body_id),
+ typeck_results: tcx.typeck_body(body_id),
param_env: tcx.param_env(def_id),
pattern_arena: TypedArena::default(),
};
struct MatchVisitor<'a, 'tcx> {
tcx: TyCtxt<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
param_env: ty::ParamEnv<'tcx>,
pattern_arena: TypedArena<super::Pat<'tcx>>,
}
pat: &'tcx hir::Pat<'tcx>,
have_errors: &mut bool,
) -> (&'p super::Pat<'tcx>, Ty<'tcx>) {
- let mut patcx = PatCtxt::new(self.tcx, self.param_env, self.tables);
+ let mut patcx = PatCtxt::new(self.tcx, self.param_env, self.typeck_results);
patcx.include_lint_checks();
let pattern = patcx.lower_pattern(pat);
let pattern_ty = pattern.ty;
// Fifth, check if the match is exhaustive.
// Note: An empty match isn't the same as an empty matrix for diagnostics purposes,
// since an empty matrix can occur when there are arms, if those arms all have guards.
- let scrut_ty = self.tables.expr_ty_adjusted(scrut);
+ let scrut_ty = self.typeck_results.expr_ty_adjusted(scrut);
let is_empty_match = inlined_arms.is_empty();
check_exhaustive(&mut cx, scrut_ty, scrut.span, &matrix, scrut.hir_id, is_empty_match);
}
pat.walk_always(|p| {
if let hir::PatKind::Binding(_, _, ident, None) = p.kind {
if let Some(ty::BindByValue(hir::Mutability::Not)) =
- cx.tables.extract_binding_mode(cx.tcx.sess, p.hir_id, p.span)
+ cx.typeck_results.extract_binding_mode(cx.tcx.sess, p.hir_id, p.span)
{
- let pat_ty = cx.tables.pat_ty(p).peel_refs();
+ let pat_ty = cx.typeck_results.pat_ty(p).peel_refs();
if let ty::Adt(edef, _) = pat_ty.kind {
if edef.is_enum()
&& edef.variants.iter().any(|variant| {
/// Check if a by-value binding is by-value. That is, check if the binding's type is not `Copy`.
fn is_binding_by_move(cx: &MatchVisitor<'_, '_>, hir_id: HirId, span: Span) -> bool {
- !cx.tables.node_type(hir_id).is_copy_modulo_regions(cx.tcx.at(span), cx.param_env)
+ !cx.typeck_results.node_type(hir_id).is_copy_modulo_regions(cx.tcx.at(span), cx.param_env)
}
/// Check the legality of legality of by-move bindings.
fn check_legality_of_move_bindings(cx: &mut MatchVisitor<'_, '_>, has_guard: bool, pat: &Pat<'_>) {
let sess = cx.tcx.sess;
- let tables = cx.tables;
+ let typeck_results = cx.typeck_results;
// Find all by-ref spans.
let mut by_ref_spans = Vec::new();
pat.each_binding(|_, hir_id, span, _| {
- if let Some(ty::BindByReference(_)) = tables.extract_binding_mode(sess, hir_id, span) {
+ if let Some(ty::BindByReference(_)) =
+ typeck_results.extract_binding_mode(sess, hir_id, span)
+ {
by_ref_spans.push(span);
}
});
};
pat.walk_always(|p| {
if let hir::PatKind::Binding(.., sub) = &p.kind {
- if let Some(ty::BindByValue(_)) = tables.extract_binding_mode(sess, p.hir_id, p.span) {
+ if let Some(ty::BindByValue(_)) =
+ typeck_results.extract_binding_mode(sess, p.hir_id, p.span)
+ {
if is_binding_by_move(cx, p.hir_id, p.span) {
check_move(p, sub.as_deref());
}
};
let binding_span = pat.span.with_hi(name.span.hi());
- let tables = cx.tables;
+ let typeck_results = cx.typeck_results;
let sess = cx.tcx.sess;
// Get the binding move, extract the mutability if by-ref.
- let mut_outer = match tables.extract_binding_mode(sess, pat.hir_id, pat.span) {
+ let mut_outer = match typeck_results.extract_binding_mode(sess, pat.hir_id, pat.span) {
Some(ty::BindByValue(_)) if is_binding_by_move(cx, pat.hir_id, pat.span) => {
// We have `x @ pat` where `x` is by-move. Reject all borrows in `pat`.
let mut conflicts_ref = Vec::new();
sub.each_binding(|_, hir_id, span, _| {
- match tables.extract_binding_mode(sess, hir_id, span) {
+ match typeck_results.extract_binding_mode(sess, hir_id, span) {
Some(ty::BindByValue(_)) | None => {}
Some(ty::BindByReference(_)) => conflicts_ref.push(span),
}
let occurs_because = format!(
"move occurs because `{}` has type `{}` which does not implement the `Copy` trait",
name,
- tables.node_type(pat.hir_id),
+ typeck_results.node_type(pat.hir_id),
);
sess.struct_span_err(pat.span, "borrow of moved value")
.span_label(binding_span, format!("value moved into `{}` here", name))
let mut conflicts_mut_mut = Vec::new();
let mut conflicts_mut_ref = Vec::new();
sub.each_binding(|_, hir_id, span, name| {
- match tables.extract_binding_mode(sess, hir_id, span) {
+ match typeck_results.extract_binding_mode(sess, hir_id, span) {
Some(ty::BindByReference(mut_inner)) => match (mut_outer, mut_inner) {
(Mutability::Not, Mutability::Not) => {} // Both sides are `ref`.
(Mutability::Mut, Mutability::Mut) => conflicts_mut_mut.push((span, name)), // 2x `ref mut`.
crate struct PatCtxt<'a, 'tcx> {
crate tcx: TyCtxt<'tcx>,
crate param_env: ty::ParamEnv<'tcx>,
- crate tables: &'a ty::TypeckTables<'tcx>,
+ crate typeck_results: &'a ty::TypeckResults<'tcx>,
crate errors: Vec<PatternError>,
include_lint_checks: bool,
}
crate fn from_hir(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
pat: &'tcx hir::Pat<'tcx>,
) -> Self {
- let mut pcx = PatCtxt::new(tcx, param_env, tables);
+ let mut pcx = PatCtxt::new(tcx, param_env, typeck_results);
let result = pcx.lower_pattern(pat);
if !pcx.errors.is_empty() {
let msg = format!("encountered errors lowering pattern: {:?}", pcx.errors);
crate fn new(
tcx: TyCtxt<'tcx>,
param_env: ty::ParamEnv<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
) -> Self {
- PatCtxt { tcx, param_env, tables, errors: vec![], include_lint_checks: false }
+ PatCtxt { tcx, param_env, typeck_results, errors: vec![], include_lint_checks: false }
}
crate fn include_lint_checks(&mut self) -> &mut Self {
// adjustments in *reverse order* (last-in-first-out, so that the last `Deref` inserted
// gets the least-dereferenced type).
let unadjusted_pat = self.lower_pattern_unadjusted(pat);
- self.tables.pat_adjustments().get(pat.hir_id).unwrap_or(&vec![]).iter().rev().fold(
+ self.typeck_results.pat_adjustments().get(pat.hir_id).unwrap_or(&vec![]).iter().rev().fold(
unadjusted_pat,
|pat, ref_ty| {
debug!("{:?}: wrapping pattern with type {:?}", pat, ref_ty);
}
fn lower_pattern_unadjusted(&mut self, pat: &'tcx hir::Pat<'tcx>) -> Pat<'tcx> {
- let mut ty = self.tables.node_type(pat.hir_id);
+ let mut ty = self.typeck_results.node_type(pat.hir_id);
if let ty::Error(_) = ty.kind {
// Avoid ICEs (e.g., #50577 and #50585).
}
hir::PatKind::Binding(_, id, ident, ref sub) => {
- let bm =
- *self.tables.pat_binding_modes().get(pat.hir_id).expect("missing binding mode");
+ let bm = *self
+ .typeck_results
+ .pat_binding_modes()
+ .get(pat.hir_id)
+ .expect("missing binding mode");
let (mutability, mode) = match bm {
ty::BindByValue(mutbl) => (mutbl, BindingMode::ByValue),
ty::BindByReference(hir::Mutability::Mut) => (
}
hir::PatKind::TupleStruct(ref qpath, ref pats, ddpos) => {
- let res = self.tables.qpath_res(qpath, pat.hir_id);
+ let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
let adt_def = match ty.kind {
ty::Adt(adt_def, _) => adt_def,
_ => span_bug!(pat.span, "tuple struct pattern not applied to an ADT {:?}", ty),
}
hir::PatKind::Struct(ref qpath, ref fields, _) => {
- let res = self.tables.qpath_res(qpath, pat.hir_id);
+ let res = self.typeck_results.qpath_res(qpath, pat.hir_id);
let subpatterns = fields
.iter()
.map(|field| FieldPat {
- field: Field::new(self.tcx.field_index(field.hir_id, self.tables)),
+ field: Field::new(self.tcx.field_index(field.hir_id, self.typeck_results)),
pattern: self.lower_pattern(&field.pat),
})
.collect();
/// it to `const_to_pat`. Any other path (like enum variants without fields)
/// is converted to the corresponding pattern via `lower_variant_or_leaf`.
fn lower_path(&mut self, qpath: &hir::QPath<'_>, id: hir::HirId, span: Span) -> Pat<'tcx> {
- let ty = self.tables.node_type(id);
- let res = self.tables.qpath_res(qpath, id);
+ let ty = self.typeck_results.node_type(id);
+ let res = self.typeck_results.qpath_res(qpath, id);
let pat_from_kind = |kind| Pat { span, ty, kind: Box::new(kind) };
// Use `Reveal::All` here because patterns are always monomorphic even if their function
// isn't.
let param_env_reveal_all = self.param_env.with_reveal_all();
- let substs = self.tables.node_substs(id);
+ let substs = self.typeck_results.node_substs(id);
let instance = match ty::Instance::resolve(self.tcx, param_env_reveal_all, def_id, substs) {
Ok(Some(i)) => i,
Ok(None) => {
match self.tcx.const_eval_instance(param_env_reveal_all, instance, Some(span)) {
Ok(value) => {
- let const_ = ty::Const::from_value(self.tcx, value, self.tables.node_type(id));
+ let const_ =
+ ty::Const::from_value(self.tcx, value, self.typeck_results.node_type(id));
let pattern = self.const_to_pat(&const_, id, span, mir_structural_match_violation);
return pattern;
}
- let user_provided_types = self.tables().user_provided_types();
+ let user_provided_types = self.typeck_results().user_provided_types();
if let Some(u_ty) = user_provided_types.get(id) {
let user_ty = PatTyProj::from_user_type(*u_ty);
Pat {
_ => span_bug!(expr.span, "not a literal: {:?}", expr),
};
- let lit_input = LitToConstInput { lit: &lit.node, ty: self.tables.expr_ty(expr), neg };
+ let lit_input =
+ LitToConstInput { lit: &lit.node, ty: self.typeck_results.expr_ty(expr), neg };
match self.tcx.at(expr.span).lit_to_const(lit_input) {
Ok(val) => *self.const_to_pat(val, expr.hir_id, lit.span, false).kind,
Err(LitToConstError::UnparseableFloat) => {
self.tcx
}
- fn tables(&self) -> &ty::TypeckTables<'tcx> {
- self.tables
+ fn typeck_results(&self) -> &ty::TypeckResults<'tcx> {
+ self.typeck_results
}
}
crate trait UserAnnotatedTyHelpers<'tcx> {
fn tcx(&self) -> TyCtxt<'tcx>;
- fn tables(&self) -> &ty::TypeckTables<'tcx>;
+ fn typeck_results(&self) -> &ty::TypeckResults<'tcx>;
/// Looks up the type associated with this hir-id and applies the
/// user-given substitutions; the hir-id must map to a suitable
&self,
hir_id: hir::HirId,
) -> Option<CanonicalUserType<'tcx>> {
- let user_provided_types = self.tables().user_provided_types();
+ let user_provided_types = self.typeck_results().user_provided_types();
let mut user_ty = *user_provided_types.get(hir_id)?;
debug!("user_subts_applied_to_ty_of_hir_id: user_ty={:?}", user_ty);
- let ty = self.tables().node_type(hir_id);
+ let ty = self.typeck_results().node_type(hir_id);
match ty.kind {
ty::Adt(adt_def, ..) => {
if let UserType::TypeOf(ref mut did, _) = &mut user_ty.value {
#![feature(box_patterns)]
#![feature(box_syntax)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_fn)]
#![feature(const_panic)]
#![feature(crate_visibility_modifier)]
match self.parse_ty() {
Ok(ty) => (None, Some(ty)),
Err(mut err) => {
- // Rewind to before attempting to parse the type and continue parsing.
- let parser_snapshot_after_type =
- mem::replace(self, parser_snapshot_before_type);
if let Ok(snip) = self.span_to_snippet(pat.span) {
err.span_label(pat.span, format!("while parsing the type for `{}`", snip));
}
- (Some((parser_snapshot_after_type, colon_sp, err)), None)
+ let err = if self.check(&token::Eq) {
+ err.emit();
+ None
+ } else {
+ // Rewind to before attempting to parse the type and continue parsing.
+ let parser_snapshot_after_type =
+ mem::replace(self, parser_snapshot_before_type);
+ Some((parser_snapshot_after_type, colon_sp, err))
+ };
+ (err, None)
}
}
} else {
self.check_target_feature(attr, span, target)
} else if attr.check_name(sym::track_caller) {
self.check_track_caller(&attr.span, attrs, span, target)
+ } else if attr.check_name(sym::doc) {
+ self.check_doc_alias(attr)
} else {
true
};
}
}
+ fn check_doc_alias(&self, attr: &Attribute) -> bool {
+ if let Some(mi) = attr.meta() {
+ if let Some(list) = mi.meta_item_list() {
+ for meta in list {
+ if meta.check_name(sym::alias) {
+ if !meta.is_value_str()
+ || meta
+ .value_str()
+ .map(|s| s.to_string())
+ .unwrap_or_else(String::new)
+ .is_empty()
+ {
+ self.tcx
+ .sess
+ .struct_span_err(
+ meta.span(),
+ "doc alias attribute expects a string: #[doc(alias = \"0\")]",
+ )
+ .emit();
+ return false;
+ }
+ }
+ }
+ }
+ }
+ true
+ }
+
/// Checks if the `#[repr]` attributes on `item` are valid.
fn check_repr(
&self,
struct MarkSymbolVisitor<'tcx> {
worklist: Vec<hir::HirId>,
tcx: TyCtxt<'tcx>,
- maybe_typeck_tables: Option<&'tcx ty::TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
live_symbols: FxHashSet<hir::HirId>,
repr_has_repr_c: bool,
in_pat: bool,
}
impl<'tcx> MarkSymbolVisitor<'tcx> {
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables.expect("`MarkSymbolVisitor::tables` called outside of body")
+ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results
+ .expect("`MarkSymbolVisitor::typeck_results` called outside of body")
}
fn check_def_id(&mut self, def_id: DefId) {
}
fn lookup_and_handle_method(&mut self, id: hir::HirId) {
- if let Some(def_id) = self.tables().type_dependent_def_id(id) {
+ if let Some(def_id) = self.typeck_results().type_dependent_def_id(id) {
self.check_def_id(def_id);
} else {
bug!("no type-dependent def for method");
}
fn handle_field_access(&mut self, lhs: &hir::Expr<'_>, hir_id: hir::HirId) {
- match self.tables().expr_ty_adjusted(lhs).kind {
+ match self.typeck_results().expr_ty_adjusted(lhs).kind {
ty::Adt(def, _) => {
- let index = self.tcx.field_index(hir_id, self.tables());
+ let index = self.tcx.field_index(hir_id, self.typeck_results());
self.insert_def_id(def.non_enum_variant().fields[index].did);
}
ty::Tuple(..) => {}
res: Res,
pats: &[hir::FieldPat<'_>],
) {
- let variant = match self.tables().node_type(lhs.hir_id).kind {
+ let variant = match self.typeck_results().node_type(lhs.hir_id).kind {
ty::Adt(adt, _) => adt.variant_of_res(res),
_ => span_bug!(lhs.span, "non-ADT in struct pattern"),
};
if let PatKind::Wild = pat.pat.kind {
continue;
}
- let index = self.tcx.field_index(pat.hir_id, self.tables());
+ let index = self.tcx.field_index(pat.hir_id, self.typeck_results());
self.insert_def_id(variant.fields[index].did);
}
}
fn mark_as_used_if_union(&mut self, adt: &ty::AdtDef, fields: &[hir::Field<'_>]) {
if adt.is_union() && adt.non_enum_variant().fields.len() > 1 && adt.did.is_local() {
for field in fields {
- let index = self.tcx.field_index(field.hir_id, self.tables());
+ let index = self.tcx.field_index(field.hir_id, self.typeck_results());
self.insert_def_id(adt.non_enum_variant().fields[index].did);
}
}
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
- let old_maybe_typeck_tables = self.maybe_typeck_tables.replace(self.tcx.body_tables(body));
+ let old_maybe_typeck_results =
+ self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_variant_data(
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
match expr.kind {
hir::ExprKind::Path(ref qpath @ hir::QPath::TypeRelative(..)) => {
- let res = self.tables().qpath_res(qpath, expr.hir_id);
+ let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
self.handle_res(res);
}
hir::ExprKind::MethodCall(..) => {
self.handle_field_access(&lhs, expr.hir_id);
}
hir::ExprKind::Struct(ref qpath, ref fields, _) => {
- let res = self.tables().qpath_res(qpath, expr.hir_id);
+ let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
self.handle_res(res);
- if let ty::Adt(ref adt, _) = self.tables().expr_ty(expr).kind {
+ if let ty::Adt(ref adt, _) = self.typeck_results().expr_ty(expr).kind {
self.mark_as_used_if_union(adt, fields);
}
}
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
match pat.kind {
PatKind::Struct(ref path, ref fields, _) => {
- let res = self.tables().qpath_res(path, pat.hir_id);
+ let res = self.typeck_results().qpath_res(path, pat.hir_id);
self.handle_field_pattern_match(pat, res, fields);
}
PatKind::Path(ref qpath) => {
- let res = self.tables().qpath_res(qpath, pat.hir_id);
+ let res = self.typeck_results().qpath_res(qpath, pat.hir_id);
self.handle_res(res);
}
_ => (),
let mut symbol_visitor = MarkSymbolVisitor {
worklist,
tcx,
- maybe_typeck_tables: None,
+ maybe_typeck_results: None,
live_symbols: Default::default(),
repr_has_repr_c: false,
in_pat: false,
struct ExprVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
- tables: &'tcx ty::TypeckTables<'tcx>,
+ typeck_results: &'tcx ty::TypeckResults<'tcx>,
param_env: ty::ParamEnv<'tcx>,
}
tied_input: Option<(&hir::Expr<'tcx>, Option<InlineAsmType>)>,
) -> Option<InlineAsmType> {
// Check the type against the allowed types for inline asm.
- let ty = self.tables.expr_ty_adjusted(expr);
+ let ty = self.typeck_results.expr_ty_adjusted(expr);
let asm_ty_isize = match self.tcx.sess.target.ptr_width {
16 => InlineAsmType::I16,
32 => InlineAsmType::I32,
let mut err = self.tcx.sess.struct_span_err(vec![in_expr.span, expr.span], msg);
err.span_label(
in_expr.span,
- &format!("type `{}`", self.tables.expr_ty_adjusted(in_expr)),
+ &format!("type `{}`", self.typeck_results.expr_ty_adjusted(in_expr)),
);
err.span_label(expr.span, &format!("type `{}`", ty));
err.note(
}
}
hir::InlineAsmOperand::Const { ref expr } => {
- let ty = self.tables.expr_ty_adjusted(expr);
+ let ty = self.typeck_results.expr_ty_adjusted(expr);
match ty.kind {
ty::Int(_) | ty::Uint(_) | ty::Float(_) => {}
_ => {
let owner_def_id = self.tcx.hir().body_owner_def_id(body_id);
let body = self.tcx.hir().body(body_id);
let param_env = self.tcx.param_env(owner_def_id.to_def_id());
- let tables = self.tcx.typeck_tables_of(owner_def_id);
- ExprVisitor { tcx: self.tcx, param_env, tables }.visit_body(body);
+ let typeck_results = self.tcx.typeck(owner_def_id);
+ ExprVisitor { tcx: self.tcx, param_env, typeck_results }.visit_body(body);
self.visit_body(body);
}
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
match expr.kind {
hir::ExprKind::Path(ref qpath) => {
- let res = self.tables.qpath_res(qpath, expr.hir_id);
+ let res = self.typeck_results.qpath_res(qpath, expr.hir_id);
if let Res::Def(DefKind::Fn, did) = res {
if self.def_id_is_transmute(did) {
- let typ = self.tables.node_type(expr.hir_id);
+ let typ = self.typeck_results.node_type(expr.hir_id);
let sig = typ.fn_sig(self.tcx);
let from = sig.inputs().skip_binder()[0];
let to = sig.output().skip_binder();
#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(or_patterns)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![recursion_limit = "256"]
#[macro_use]
struct Liveness<'a, 'tcx> {
ir: &'a mut IrMaps<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
param_env: ty::ParamEnv<'tcx>,
s: Specials,
successors: Vec<LiveNode>,
exit_ln: ir.add_live_node(ExitNode),
};
- let tables = ir.tcx.typeck_tables_of(def_id);
+ let typeck_results = ir.tcx.typeck(def_id);
let param_env = ir.tcx.param_env(def_id);
let num_live_nodes = ir.num_live_nodes;
Liveness {
ir,
- tables,
+ typeck_results,
param_env,
s: specials,
successors: vec![invalid_node(); num_live_nodes],
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: self.ir.body_owner,
};
- match self.tables.upvar_capture(upvar_id) {
+ match self.typeck_results.upvar_capture(upvar_id) {
ty::UpvarCapture::ByRef(_) => {
let var = self.variable(var_hir_id, upvar.span);
self.acc(self.s.exit_ln, var, ACC_READ | ACC_USE);
FnKind::Closure(..) => {}
}
- let ty = self.tables.node_type(id);
+ let ty = self.typeck_results.node_type(id);
match ty.kind {
ty::Closure(_def_id, substs) => match substs.as_closure().kind() {
ty::ClosureKind::Fn => {}
hir::ExprKind::AssignOp(_, ref l, ref r) => {
// an overloaded assign op is like a method call
- if self.tables.is_method_call(expr) {
+ if self.typeck_results.is_method_call(expr) {
let succ = self.propagate_through_expr(&l, succ);
self.propagate_through_expr(&r, succ)
} else {
let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
let succ = if self.ir.tcx.is_ty_uninhabited_from(
m,
- self.tables.expr_ty(expr),
+ self.typeck_results.expr_ty(expr),
self.param_env,
) {
self.s.exit_ln
let m = self.ir.tcx.parent_module(expr.hir_id).to_def_id();
let succ = if self.ir.tcx.is_ty_uninhabited_from(
m,
- self.tables.expr_ty(expr),
+ self.typeck_results.expr_ty(expr),
self.param_env,
) {
self.s.exit_ln
}
hir::ExprKind::AssignOp(_, ref l, _) => {
- if !this.tables.is_method_call(expr) {
+ if !this.typeck_results.is_method_call(expr) {
this.check_place(&l);
}
}
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: self.ir.body_owner,
};
- match self.tables.upvar_capture(upvar_id) {
+ match self.typeck_results.upvar_capture(upvar_id) {
ty::UpvarCapture::ByValue => {}
ty::UpvarCapture::ByRef(..) => continue,
};
struct ReachableContext<'tcx> {
// The type context.
tcx: TyCtxt<'tcx>,
- maybe_typeck_tables: Option<&'tcx ty::TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
// The set of items which must be exported in the linkage sense.
reachable_symbols: HirIdSet,
// A worklist of item IDs. Each item ID in this worklist will be inlined
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
- let old_maybe_typeck_tables = self.maybe_typeck_tables.replace(self.tcx.body_tables(body));
+ let old_maybe_typeck_results =
+ self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
let res = match expr.kind {
- hir::ExprKind::Path(ref qpath) => Some(self.tables().qpath_res(qpath, expr.hir_id)),
+ hir::ExprKind::Path(ref qpath) => {
+ Some(self.typeck_results().qpath_res(qpath, expr.hir_id))
+ }
hir::ExprKind::MethodCall(..) => self
- .tables()
+ .typeck_results()
.type_dependent_def(expr.hir_id)
.map(|(kind, def_id)| Res::Def(kind, def_id)),
_ => None,
}
impl<'tcx> ReachableContext<'tcx> {
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables.expect("`ReachableContext::tables` called outside of body")
+ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results
+ .expect("`ReachableContext::typeck_results` called outside of body")
}
// Returns true if the given def ID represents a local item that is
});
let mut reachable_context = ReachableContext {
tcx,
- maybe_typeck_tables: None,
+ maybe_typeck_results: None,
reachable_symbols: Default::default(),
worklist: Vec::new(),
any_library,
match item.kind {
hir::ItemKind::ExternCrate(_) => {
// compiler-generated `extern crate` items have a dummy span.
- if item.span.is_dummy() {
+ // `std` is still checked for the `restricted-std` feature.
+ if item.span.is_dummy() && item.ident.as_str() != "std" {
return;
}
use rustc_data_structures::fx::FxHashSet;
use rustc_errors::struct_span_err;
use rustc_hir as hir;
+use rustc_hir::fake_lang_items::FAKE_ITEMS_REFS;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::lang_items;
-use rustc_hir::lang_items::ITEM_REFS;
use rustc_hir::weak_lang_items::WEAK_ITEMS_REFS;
use rustc_middle::middle::lang_items::required;
use rustc_middle::ty::TyCtxt;
use rustc_session::config::CrateType;
-use rustc_span::symbol::sym;
use rustc_span::symbol::Symbol;
use rustc_span::Span;
if self.items.require(item).is_err() {
self.items.missing.push(item);
}
- } else if name == sym::count_code_region {
- // `core::intrinsics::code_count_region()` is (currently) the only `extern` lang item
- // that is never actually linked. It is not a `weak_lang_item` that can be registered
- // when used, and should be registered here instead.
- if let Some((item_index, _)) = ITEM_REFS.get(&name).cloned() {
- if self.items.items[item_index].is_none() {
- let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
- self.items.items[item_index] = Some(item_def_id);
- }
+ } else if let Some(&item) = FAKE_ITEMS_REFS.get(&name) {
+ // Ensure "fake lang items" are registered. These are `extern` lang items that are
+ // injected into the MIR automatically (such as source code coverage counters), but are
+ // never actually linked; therefore, unlike "weak lang items", they cannot by registered
+ // when used, because they never appear to be used.
+ if self.items.items[item as usize].is_none() {
+ let item_def_id = self.tcx.hir().local_def_id(hir_id).to_def_id();
+ self.items.items[item as usize] = Some(item_def_id);
}
} else {
struct_span_err!(self.tcx.sess, span, E0264, "unknown external lang item: `{}`", name)
metadata_loader: &dyn MetadataLoader,
ident: Ident,
) {
- let registrar = locator::find_plugin_registrar(sess, metadata_loader, ident.span, ident.name);
-
- if let Some((lib, disambiguator)) = registrar {
- let symbol = sess.generate_plugin_registrar_symbol(disambiguator);
- let fun = dylink_registrar(sess, ident.span, lib, symbol);
- plugins.push(fun);
- }
+ let (lib, disambiguator) =
+ locator::find_plugin_registrar(sess, metadata_loader, ident.span, ident.name);
+ let symbol = sess.generate_plugin_registrar_symbol(disambiguator);
+ let fun = dylink_registrar(sess, ident.span, lib, symbol);
+ plugins.push(fun);
}
// Dynamically link a registrar function into the compiler process.
#![feature(in_band_lifetimes)]
#![feature(nll)]
#![feature(or_patterns)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![recursion_limit = "256"]
use rustc_attr as attr;
struct NamePrivacyVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
- maybe_typeck_tables: Option<&'tcx ty::TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
current_item: Option<hir::HirId>,
}
impl<'tcx> NamePrivacyVisitor<'tcx> {
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables.expect("`NamePrivacyVisitor::tables` called outside of body")
+ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results
+ .expect("`NamePrivacyVisitor::typeck_results` called outside of body")
}
// Checks that a field in a struct constructor (expression or pattern) is accessible.
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
- let old_maybe_typeck_tables = self.maybe_typeck_tables.replace(self.tcx.body_tables(body));
+ let old_maybe_typeck_results =
+ self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'tcx>) {
if let hir::ExprKind::Struct(ref qpath, fields, ref base) = expr.kind {
- let res = self.tables().qpath_res(qpath, expr.hir_id);
- let adt = self.tables().expr_ty(expr).ty_adt_def().unwrap();
+ let res = self.typeck_results().qpath_res(qpath, expr.hir_id);
+ let adt = self.typeck_results().expr_ty(expr).ty_adt_def().unwrap();
let variant = adt.variant_of_res(res);
if let Some(ref base) = *base {
// If the expression uses FRU we need to make sure all the unmentioned fields
// are checked for privacy (RFC 736). Rather than computing the set of
// unmentioned fields, just check them all.
for (vf_index, variant_field) in variant.fields.iter().enumerate() {
- let field = fields
- .iter()
- .find(|f| self.tcx.field_index(f.hir_id, self.tables()) == vf_index);
+ let field = fields.iter().find(|f| {
+ self.tcx.field_index(f.hir_id, self.typeck_results()) == vf_index
+ });
let (use_ctxt, span) = match field {
Some(field) => (field.ident.span, field.span),
None => (base.span, base.span),
} else {
for field in fields {
let use_ctxt = field.ident.span;
- let index = self.tcx.field_index(field.hir_id, self.tables());
+ let index = self.tcx.field_index(field.hir_id, self.typeck_results());
self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false);
}
}
fn visit_pat(&mut self, pat: &'tcx hir::Pat<'tcx>) {
if let PatKind::Struct(ref qpath, fields, _) = pat.kind {
- let res = self.tables().qpath_res(qpath, pat.hir_id);
- let adt = self.tables().pat_ty(pat).ty_adt_def().unwrap();
+ let res = self.typeck_results().qpath_res(qpath, pat.hir_id);
+ let adt = self.typeck_results().pat_ty(pat).ty_adt_def().unwrap();
let variant = adt.variant_of_res(res);
for field in fields {
let use_ctxt = field.ident.span;
- let index = self.tcx.field_index(field.hir_id, self.tables());
+ let index = self.tcx.field_index(field.hir_id, self.typeck_results());
self.check_field(use_ctxt, field.span, adt, &variant.fields[index], false);
}
}
struct TypePrivacyVisitor<'tcx> {
tcx: TyCtxt<'tcx>,
- maybe_typeck_tables: Option<&'tcx ty::TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
current_item: LocalDefId,
span: Span,
}
impl<'tcx> TypePrivacyVisitor<'tcx> {
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables.expect("`TypePrivacyVisitor::tables` called outside of body")
+ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results
+ .expect("`TypePrivacyVisitor::typeck_results` called outside of body")
}
fn item_is_accessible(&self, did: DefId) -> bool {
// Take node-id of an expression or pattern and check its type for privacy.
fn check_expr_pat_type(&mut self, id: hir::HirId, span: Span) -> bool {
self.span = span;
- let tables = self.tables();
- if self.visit(tables.node_type(id)) || self.visit(tables.node_substs(id)) {
+ let typeck_results = self.typeck_results();
+ if self.visit(typeck_results.node_type(id)) || self.visit(typeck_results.node_substs(id)) {
return true;
}
- if let Some(adjustments) = tables.adjustments().get(id) {
+ if let Some(adjustments) = typeck_results.adjustments().get(id) {
for adjustment in adjustments {
if self.visit(adjustment.target) {
return true;
}
fn visit_nested_body(&mut self, body: hir::BodyId) {
- let old_maybe_typeck_tables = self.maybe_typeck_tables.replace(self.tcx.body_tables(body));
+ let old_maybe_typeck_results =
+ self.maybe_typeck_results.replace(self.tcx.typeck_body(body));
let body = self.tcx.hir().body(body);
self.visit_body(body);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
self.span = hir_ty.span;
- if let Some(tables) = self.maybe_typeck_tables {
+ if let Some(typeck_results) = self.maybe_typeck_results {
// Types in bodies.
- if self.visit(tables.node_type(hir_ty.hir_id)) {
+ if self.visit(typeck_results.node_type(hir_ty.hir_id)) {
return;
}
} else {
fn visit_trait_ref(&mut self, trait_ref: &'tcx hir::TraitRef<'tcx>) {
self.span = trait_ref.path.span;
- if self.maybe_typeck_tables.is_none() {
+ if self.maybe_typeck_results.is_none() {
// Avoid calling `hir_trait_to_predicates` in bodies, it will ICE.
// The traits' privacy in bodies is already checked as a part of trait object types.
let bounds = rustc_typeck::hir_trait_to_predicates(
hir::ExprKind::MethodCall(_, span, _, _) => {
// Method calls have to be checked specially.
self.span = span;
- if let Some(def_id) = self.tables().type_dependent_def_id(expr.hir_id) {
+ if let Some(def_id) = self.typeck_results().type_dependent_def_id(expr.hir_id) {
if self.visit(self.tcx.type_of(def_id)) {
return;
}
Res::Def(kind, def_id) => Some((kind, def_id)),
_ => None,
},
- hir::QPath::TypeRelative(..) => {
- self.maybe_typeck_tables.and_then(|tables| tables.type_dependent_def(id))
- }
+ hir::QPath::TypeRelative(..) => self
+ .maybe_typeck_results
+ .and_then(|typeck_results| typeck_results.type_dependent_def(id)),
};
let def = def.filter(|(kind, _)| match kind {
DefKind::AssocFn | DefKind::AssocConst | DefKind::AssocTy | DefKind::Static => true,
fn visit_item(&mut self, item: &'tcx hir::Item<'tcx>) {
let orig_current_item =
mem::replace(&mut self.current_item, self.tcx.hir().local_def_id(item.hir_id));
- let old_maybe_typeck_tables = self.maybe_typeck_tables.take();
+ let old_maybe_typeck_results = self.maybe_typeck_results.take();
intravisit::walk_item(self, item);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
self.current_item = orig_current_item;
}
}
fn check_mod_privacy(tcx: TyCtxt<'_>, module_def_id: LocalDefId) {
// Check privacy of names not checked in previous compilation stages.
- let mut visitor = NamePrivacyVisitor { tcx, maybe_typeck_tables: None, current_item: None };
+ let mut visitor = NamePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: None };
let (module, span, hir_id) = tcx.hir().get_module(module_def_id);
intravisit::walk_mod(&mut visitor, module, hir_id);
// Check privacy of explicitly written types and traits as well as
// inferred types of expressions and patterns.
let mut visitor =
- TypePrivacyVisitor { tcx, maybe_typeck_tables: None, current_item: module_def_id, span };
+ TypePrivacyVisitor { tcx, maybe_typeck_results: None, current_item: module_def_id, span };
intravisit::walk_mod(&mut visitor, module, hir_id);
}
#![feature(bool_to_option)]
#![feature(const_fn)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_panic)]
#![feature(core_intrinsics)]
#![feature(hash_raw_entry)]
(self.cstore().crate_name_untracked(def_id.krate), None)
} else {
let def_key = self.cstore().def_key(def_id);
- (
- // This unwrap is safe: crates must always have a name
- def_key.disambiguated_data.data.get_opt_name().unwrap(),
- // This unwrap is safe since we know this isn't the root
- Some(self.get_module(DefId { index: def_key.parent.unwrap(), ..def_id })),
- )
+ let name = def_key
+ .disambiguated_data
+ .data
+ .get_opt_name()
+ .expect("given a DefId that wasn't a module");
+ // This unwrap is safe since we know this isn't the root
+ let parent = Some(self.get_module(DefId {
+ index: def_key.parent.expect("failed to get parent for module"),
+ ..def_id
+ }));
+ (name, parent)
};
// Allocate and return a new module with the information we found
);
err
}
+ ResolutionError::ParamInTyOfConstArg(name) => {
+ let mut err = struct_span_err!(
+ self.session,
+ span,
+ E0770,
+ "the type of const parameters must not depend on other generic parameters"
+ );
+ err.span_label(
+ span,
+ format!("the type must not depend on the parameter `{}`", name),
+ );
+ err
+ }
ResolutionError::SelfInTyParamDefault => {
let mut err = struct_span_err!(
self.session,
// otherwise cause duplicate suggestions.
continue;
}
- if let Some(crate_id) =
- self.crate_loader.maybe_process_path_extern(ident.name, ident.span)
- {
+ if let Some(crate_id) = self.crate_loader.maybe_process_path_extern(ident.name) {
let crate_root =
self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
suggestions.extend(self.lookup_import_candidates_from_module(
/// from the default of a type parameter because they're not declared
/// before said type parameter. Also see the `visit_generics` override.
ForwardTyParamBanRibKind,
+
+ /// We are inside of the type of a const parameter. Can't refer to any
+ /// parameters.
+ ConstParamTyRibKind,
}
impl RibKind<'_> {
| FnItemRibKind
| ConstantItemRibKind
| ModuleRibKind(_)
- | MacroDefinition(_) => false,
+ | MacroDefinition(_)
+ | ConstParamTyRibKind => false,
AssocItemRibKind | ItemRibKind(_) | ForwardTyParamBanRibKind => true,
}
}
/// Fields used to add information to diagnostic errors.
diagnostic_metadata: DiagnosticMetadata<'ast>,
+
+ /// State used to know whether to ignore resolution errors for function bodies.
+ ///
+ /// In particular, rustdoc uses this to avoid giving errors for `cfg()` items.
+ /// In most cases this will be `None`, in which case errors will always be reported.
+ /// If it is `Some(_)`, then it will be updated when entering a nested function or trait body.
+ in_func_body: bool,
}
/// Walks the whole crate in DFS order, visiting each item, resolving names as it goes.
impl<'a, 'ast> Visitor<'ast> for LateResolutionVisitor<'a, '_, 'ast> {
fn visit_item(&mut self, item: &'ast Item) {
let prev = replace(&mut self.diagnostic_metadata.current_item, Some(item));
+ // Always report errors in items we just entered.
+ let old_ignore = replace(&mut self.in_func_body, false);
self.resolve_item(item);
+ self.in_func_body = old_ignore;
self.diagnostic_metadata.current_item = prev;
}
fn visit_arm(&mut self, arm: &'ast Arm) {
visit::walk_fn_ret_ty(this, &declaration.output);
+ // Ignore errors in function bodies if this is rustdoc
+ // Be sure not to set this until the function signature has been resolved.
+ let previous_state = replace(&mut this.in_func_body, true);
// Resolve the function body, potentially inside the body of an async closure
match fn_kind {
FnKind::Fn(.., body) => walk_list!(this, visit_block, body),
};
debug!("(resolving function) leaving function");
+ this.in_func_body = previous_state;
})
});
self.diagnostic_metadata.current_function = previous_value;
for bound in ¶m.bounds {
self.visit_param_bound(bound);
}
+ self.ribs[TypeNS].push(Rib::new(ConstParamTyRibKind));
+ self.ribs[ValueNS].push(Rib::new(ConstParamTyRibKind));
self.visit_ty(ty);
+ self.ribs[TypeNS].pop().unwrap();
+ self.ribs[ValueNS].pop().unwrap();
}
}
}
label_ribs: Vec::new(),
current_trait_ref: None,
diagnostic_metadata: DiagnosticMetadata::default(),
+ // errors at module scope should always be reported
+ in_func_body: false,
}
}
return if self.is_label_valid_from_rib(i) {
Some(*id)
} else {
- self.r.report_error(
+ self.report_error(
original_span,
ResolutionError::UnreachableLabel {
name: label.name,
suggestion = suggestion.or_else(|| self.suggestion_for_label_in_rib(i, label));
}
- self.r.report_error(
+ self.report_error(
original_span,
ResolutionError::UndeclaredLabel { name: label.name, suggestion },
);
| ItemRibKind(..)
| ConstantItemRibKind
| ModuleRibKind(..)
- | ForwardTyParamBanRibKind => {
+ | ForwardTyParamBanRibKind
+ | ConstParamTyRibKind => {
return false;
}
}
};
let report_error = |this: &Self, ns| {
let what = if ns == TypeNS { "type parameters" } else { "local variables" };
- this.r.session.span_err(ident.span, &format!("imports cannot refer to {}", what));
+ if this.should_report_errs() {
+ this.r
+ .session
+ .span_err(ident.span, &format!("imports cannot refer to {}", what));
+ }
};
for &ns in nss {
if seen_bindings.contains_key(&ident) {
let span = seen_bindings.get(&ident).unwrap();
let err = ResolutionError::NameAlreadyUsedInParameterList(ident.name, *span);
- self.r.report_error(param.ident.span, err);
+ self.report_error(param.ident.span, err);
}
seen_bindings.entry(ident).or_insert(param.ident.span);
.is_err()
{
let path = &self.current_trait_ref.as_ref().unwrap().1.path;
- self.r.report_error(span, err(ident.name, &path_names_to_string(path)));
+ self.report_error(span, err(ident.name, &path_names_to_string(path)));
}
}
}
}
fn resolve_local(&mut self, local: &'ast Local) {
+ debug!("resolving local ({:?})", local);
// Resolve the type.
walk_list!(self, visit_ty, &local.ty);
if inconsistent_vars.contains_key(name) {
v.could_be_path = false;
}
- self.r.report_error(
+ self.report_error(
*v.origin.iter().next().unwrap(),
ResolutionError::VariableNotBoundInPattern(v),
);
let mut inconsistent_vars = inconsistent_vars.iter().collect::<Vec<_>>();
inconsistent_vars.sort();
for (name, v) in inconsistent_vars {
- self.r.report_error(v.0, ResolutionError::VariableBoundWithDifferentMode(*name, v.1));
+ self.report_error(v.0, ResolutionError::VariableBoundWithDifferentMode(*name, v.1));
}
// 5) Finally bubble up all the binding maps.
// `Variant(a, a)`:
_ => IdentifierBoundMoreThanOnceInSamePattern,
};
- self.r.report_error(ident.span, error(ident.name));
+ self.report_error(ident.span, error(ident.name));
}
// Record as bound if it's valid:
// to something unusable as a pattern (e.g., constructor function),
// but we still conservatively report an error, see
// issues/33118#issuecomment-233962221 for one reason why.
- self.r.report_error(
+ self.report_error(
ident.span,
ResolutionError::BindingShadowsSomethingUnacceptable(
pat_src.descr(),
source: PathSource<'ast>,
crate_lint: CrateLint,
) -> PartialRes {
+ log::debug!("smart_resolve_path_fragment(id={:?},qself={:?},path={:?}", id, qself, path);
let ns = source.namespace();
let is_expected = &|res| source.is_expected(res);
let report_errors = |this: &mut Self, res: Option<Res>| {
- let (err, candidates) = this.smart_resolve_report_errors(path, span, source, res);
-
- let def_id = this.parent_scope.module.normal_ancestor_id;
- let instead = res.is_some();
- let suggestion =
- if res.is_none() { this.report_missing_type_error(path) } else { None };
-
- this.r.use_injections.push(UseError { err, candidates, def_id, instead, suggestion });
+ if this.should_report_errs() {
+ let (err, candidates) = this.smart_resolve_report_errors(path, span, source, res);
+
+ let def_id = this.parent_scope.module.normal_ancestor_id;
+ let instead = res.is_some();
+ let suggestion =
+ if res.is_none() { this.report_missing_type_error(path) } else { None };
+
+ this.r.use_injections.push(UseError {
+ err,
+ candidates,
+ def_id,
+ instead,
+ suggestion,
+ });
+ }
PartialRes::new(Res::Err)
};
let def_id = this.parent_scope.module.normal_ancestor_id;
- this.r.use_injections.push(UseError {
- err,
- candidates,
- def_id,
- instead: false,
- suggestion: None,
- });
+ if this.should_report_errs() {
+ this.r.use_injections.push(UseError {
+ err,
+ candidates,
+ def_id,
+ instead: false,
+ suggestion: None,
+ });
+ } else {
+ err.cancel();
+ }
// We don't return `Some(parent_err)` here, because the error will
// be already printed as part of the `use` injections
Err(err) => {
if let Some(err) = report_errors_for_call(self, err) {
- self.r.report_error(err.span, err.node);
+ self.report_error(err.span, err.node);
}
PartialRes::new(Res::Err)
if let Some(LexicalScopeBinding::Res(res)) = binding { res != Res::Err } else { false }
}
+ /// A wrapper around [`Resolver::report_error`].
+ ///
+ /// This doesn't emit errors for function bodies if this is rustdoc.
+ fn report_error(&self, span: Span, resolution_error: ResolutionError<'_>) {
+ if self.should_report_errs() {
+ self.r.report_error(span, resolution_error);
+ }
+ }
+
+ #[inline]
+ /// If we're actually rustdoc then avoid giving a name resolution error for `cfg()` items.
+ fn should_report_errs(&self) -> bool {
+ !(self.r.session.opts.actually_rustdoc && self.in_func_body)
+ }
+
// Resolve in alternative namespaces if resolution in the primary namespace fails.
fn resolve_qpath_anywhere(
&mut self,
if !module.no_implicit_prelude {
let extern_prelude = self.r.extern_prelude.clone();
names.extend(extern_prelude.iter().flat_map(|(ident, _)| {
- self.r
- .crate_loader
- .maybe_process_path_extern(ident.name, ident.span)
- .and_then(|crate_id| {
+ self.r.crate_loader.maybe_process_path_extern(ident.name).and_then(
+ |crate_id| {
let crate_mod = Res::Def(
DefKind::Mod,
DefId { krate: crate_id, index: CRATE_DEF_INDEX },
} else {
None
}
- })
+ },
+ )
}));
if let Some(prelude) = self.r.prelude {
err.emit();
}
+ // FIXME(const_generics): This patches over a ICE caused by non-'static lifetimes in const
+ // generics. We are disallowing this until we can decide on how we want to handle non-'static
+ // lifetimes in const generics. See issue #74052 for discussion.
+ crate fn emit_non_static_lt_in_const_generic_error(&self, lifetime_ref: &hir::Lifetime) {
+ let mut err = struct_span_err!(
+ self.tcx.sess,
+ lifetime_ref.span,
+ E0771,
+ "use of non-static lifetime `{}` in const generic",
+ lifetime_ref
+ );
+ err.note(
+ "for more information, see issue #74052 \
+ <https://github.com/rust-lang/rust/issues/74052>",
+ );
+ err.emit();
+ }
+
crate fn is_trait_ref_fn_scope(&mut self, trait_ref: &'tcx hir::PolyTraitRef<'tcx>) -> bool {
if let def::Res::Def(_, did) = trait_ref.trait_ref.path.res {
if [
/// Used to disallow the use of in-band lifetimes in `fn` or `Fn` syntax.
is_in_fn_syntax: bool,
+ is_in_const_generic: bool,
+
/// List of labels in the function/method currently under analysis.
labels_in_fn: Vec<Ident>,
scope: ROOT_SCOPE,
trait_ref_hack: false,
is_in_fn_syntax: false,
+ is_in_const_generic: false,
labels_in_fn: vec![],
xcrate_object_lifetime_defaults: Default::default(),
lifetime_uses: &mut Default::default(),
self.insert_lifetime(lifetime_ref, Region::Static);
return;
}
+ if self.is_in_const_generic && lifetime_ref.name != LifetimeName::Error {
+ self.emit_non_static_lt_in_const_generic_error(lifetime_ref);
+ return;
+ }
self.resolve_lifetime_ref(lifetime_ref);
}
}
}
GenericParamKind::Const { ref ty, .. } => {
+ let was_in_const_generic = self.is_in_const_generic;
+ self.is_in_const_generic = true;
walk_list!(self, visit_param_bound, param.bounds);
self.visit_ty(&ty);
+ self.is_in_const_generic = was_in_const_generic;
}
}
}
scope: &wrap_scope,
trait_ref_hack: self.trait_ref_hack,
is_in_fn_syntax: self.is_in_fn_syntax,
+ is_in_const_generic: self.is_in_const_generic,
labels_in_fn,
xcrate_object_lifetime_defaults,
lifetime_uses,
BindingShadowsSomethingUnacceptable(&'static str, Symbol, &'a NameBinding<'a>),
/// Error E0128: type parameters with a default cannot use forward-declared identifiers.
ForwardDeclaredTyParam, // FIXME(const_generics:defaults)
+ /// ERROR E0770: the type of const parameters must not depend on other generic parameters.
+ ParamInTyOfConstArg(Symbol),
/// Error E0735: type parameters with a default cannot use `Self`
SelfInTyParamDefault,
/// Error E0767: use of unreachable label
}
return Res::Err;
}
+ ConstParamTyRibKind => {
+ if record_used {
+ self.report_error(span, ParamInTyOfConstArg(rib_ident.name));
+ }
+ return Res::Err;
+ }
}
}
if let Some(res_err) = res_err {
// This was an attempt to use a type parameter outside its scope.
ItemRibKind(has_generic_params) => has_generic_params,
FnItemRibKind => HasGenericParams::Yes,
+ ConstParamTyRibKind => {
+ if record_used {
+ self.report_error(
+ span,
+ ResolutionError::ParamInTyOfConstArg(rib_ident.name),
+ );
+ }
+ return Res::Err;
+ }
};
if record_used {
}
for rib in ribs {
let has_generic_params = match rib.kind {
+ NormalRibKind
+ | ClosureOrAsyncRibKind
+ | AssocItemRibKind
+ | ModuleRibKind(..)
+ | MacroDefinition(..)
+ | ForwardTyParamBanRibKind
+ | ConstantItemRibKind => continue,
ItemRibKind(has_generic_params) => has_generic_params,
FnItemRibKind => HasGenericParams::Yes,
- _ => continue,
+ ConstParamTyRibKind => {
+ if record_used {
+ self.report_error(
+ span,
+ ResolutionError::ParamInTyOfConstArg(rib_ident.name),
+ );
+ }
+ return Res::Err;
+ }
};
// This was an attempt to use a const parameter outside its scope.
let crate_id = if !speculative {
self.crate_loader.process_path_extern(ident.name, ident.span)
} else {
- self.crate_loader.maybe_process_path_extern(ident.name, ident.span)?
+ self.crate_loader.maybe_process_path_extern(ident.name)?
};
let crate_root = self.get_module(DefId { krate: crate_id, index: CRATE_DEF_INDEX });
Some(
span: Span,
path_str: &str,
ns: Namespace,
- module_id: LocalDefId,
+ module_id: DefId,
) -> Result<(ast::Path, Res), ()> {
let path = if path_str.starts_with("::") {
ast::Path {
.collect(),
}
};
- let module = self.module_map.get(&module_id).copied().unwrap_or(self.graph_root);
+ let module = self.get_module(module_id);
let parent_scope = &ParentScope::module(module);
let res = self.resolve_ast_path(&path, ns, parent_scope).map_err(|_| ())?;
Ok((path, res))
self.dumper.analysis()
}
- fn nest_tables<F>(&mut self, item_def_id: LocalDefId, f: F)
+ fn nest_typeck_results<F>(&mut self, item_def_id: LocalDefId, f: F)
where
F: FnOnce(&mut Self),
{
- let tables = if self.tcx.has_typeck_tables(item_def_id) {
- Some(self.tcx.typeck_tables_of(item_def_id))
+ let typeck_results = if self.tcx.has_typeck_results(item_def_id) {
+ Some(self.tcx.typeck(item_def_id))
} else {
None
};
- let old_maybe_typeck_tables = self.save_ctxt.maybe_typeck_tables;
- self.save_ctxt.maybe_typeck_tables = tables;
+ let old_maybe_typeck_results = self.save_ctxt.maybe_typeck_results;
+ self.save_ctxt.maybe_typeck_results = typeck_results;
f(self);
- self.save_ctxt.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.save_ctxt.maybe_typeck_results = old_maybe_typeck_results;
}
fn span_from_span(&self, span: Span) -> SpanData {
collector.visit_pat(&arg.pat);
for (hir_id, ident, ..) in collector.collected_idents {
- let typ = match self.save_ctxt.tables().node_type_opt(hir_id) {
+ let typ = match self.save_ctxt.typeck_results().node_type_opt(hir_id) {
Some(s) => s.to_string(),
None => continue,
};
debug!("process_method: {}:{}", hir_id, ident);
let map = &self.tcx.hir();
- self.nest_tables(map.local_def_id(hir_id), |v| {
+ self.nest_typeck_results(map.local_def_id(hir_id), |v| {
if let Some(mut method_data) = v.save_ctxt.get_method_data(hir_id, ident, span) {
if let Some(body) = body {
v.process_formals(map.body(body).params, &method_data.qualname);
body: hir::BodyId,
) {
let map = &self.tcx.hir();
- self.nest_tables(map.local_def_id(item.hir_id), |v| {
+ self.nest_typeck_results(map.local_def_id(item.hir_id), |v| {
let body = map.body(body);
if let Some(fn_data) = v.save_ctxt.get_item_data(item) {
down_cast_data!(fn_data, DefData, item.span);
typ: &'tcx hir::Ty<'tcx>,
expr: &'tcx hir::Expr<'tcx>,
) {
- self.nest_tables(self.tcx.hir().local_def_id(item.hir_id), |v| {
+ self.nest_typeck_results(self.tcx.hir().local_def_id(item.hir_id), |v| {
if let Some(var_data) = v.save_ctxt.get_item_data(item) {
down_cast_data!(var_data, DefData, item.span);
v.dumper.dump_def(&access_from!(v.save_ctxt, item, item.hir_id), var_data);
}
// walk type and init value
- self.nest_tables(self.tcx.hir().local_def_id(hir_id), |v| {
+ self.nest_typeck_results(self.tcx.hir().local_def_id(hir_id), |v| {
v.visit_ty(typ);
if let Some(expr) = expr {
v.visit_expr(expr);
);
}
- self.nest_tables(self.tcx.hir().local_def_id(item.hir_id), |v| {
+ self.nest_typeck_results(self.tcx.hir().local_def_id(item.hir_id), |v| {
for field in def.fields() {
v.process_struct_field_def(field, item.hir_id);
v.visit_ty(&field.ty);
}
let map = &self.tcx.hir();
- self.nest_tables(map.local_def_id(item.hir_id), |v| {
+ self.nest_typeck_results(map.local_def_id(item.hir_id), |v| {
v.visit_ty(&typ);
if let &Some(ref trait_ref) = trait_ref {
v.process_path(trait_ref.hir_ref_id, &hir::QPath::Resolved(None, &trait_ref.path));
match p.kind {
hir::PatKind::Struct(ref _path, fields, _) => {
// FIXME do something with _path?
- let adt = match self.save_ctxt.tables().node_type_opt(p.hir_id) {
+ let adt = match self.save_ctxt.typeck_results().node_type_opt(p.hir_id) {
Some(ty) if ty.ty_adt_def().is_some() => ty.ty_adt_def().unwrap(),
_ => {
intravisit::walk_pat(self, p);
Res::Local(hir_id) => {
let typ = self
.save_ctxt
- .tables()
+ .typeck_results()
.node_type_opt(hir_id)
.map(|t| t.to_string())
.unwrap_or_default();
hir::TyKind::Array(ref ty, ref anon_const) => {
self.visit_ty(ty);
let map = self.tcx.hir();
- self.nest_tables(self.tcx.hir().local_def_id(anon_const.hir_id), |v| {
+ self.nest_typeck_results(self.tcx.hir().local_def_id(anon_const.hir_id), |v| {
v.visit_expr(&map.body(anon_const.body).value)
});
}
hir::TyKind::OpaqueDef(item_id, _) => {
let item = self.tcx.hir().item(item_id.id);
- self.nest_tables(self.tcx.hir().local_def_id(item_id.id), |v| v.visit_item(item));
+ self.nest_typeck_results(self.tcx.hir().local_def_id(item_id.id), |v| {
+ v.visit_item(item)
+ });
}
_ => intravisit::walk_ty(self, t),
}
match ex.kind {
hir::ExprKind::Struct(ref path, ref fields, ref base) => {
let hir_expr = self.save_ctxt.tcx.hir().expect_expr(ex.hir_id);
- let adt = match self.save_ctxt.tables().expr_ty_opt(&hir_expr) {
+ let adt = match self.save_ctxt.typeck_results().expr_ty_opt(&hir_expr) {
Some(ty) if ty.ty_adt_def().is_some() => ty.ty_adt_def().unwrap(),
_ => {
intravisit::walk_expr(self, ex);
// walk the body
let map = self.tcx.hir();
- self.nest_tables(self.tcx.hir().local_def_id(ex.hir_id), |v| {
+ self.nest_typeck_results(self.tcx.hir().local_def_id(ex.hir_id), |v| {
let body = map.body(body);
v.process_formals(body.params, &id);
v.visit_expr(&body.value)
hir::ExprKind::Repeat(ref expr, ref anon_const) => {
self.visit_expr(expr);
let map = self.tcx.hir();
- self.nest_tables(self.tcx.hir().local_def_id(anon_const.hir_id), |v| {
+ self.nest_typeck_results(self.tcx.hir().local_def_id(anon_const.hir_id), |v| {
v.visit_expr(&map.body(anon_const.body).value)
});
}
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(nll)]
#![feature(or_patterns)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![recursion_limit = "256"]
mod dump_visitor;
pub struct SaveContext<'tcx> {
tcx: TyCtxt<'tcx>,
- maybe_typeck_tables: Option<&'tcx ty::TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx ty::TypeckResults<'tcx>>,
access_levels: &'tcx AccessLevels,
span_utils: SpanUtils<'tcx>,
config: Config,
}
impl<'tcx> SaveContext<'tcx> {
- /// Gets the type-checking side-tables for the current body.
+ /// Gets the type-checking results for the current body.
/// As this will ICE if called outside bodies, only call when working with
/// `Expr` or `Pat` nodes (they are guaranteed to be found only in bodies).
#[track_caller]
- fn tables(&self) -> &'tcx ty::TypeckTables<'tcx> {
- self.maybe_typeck_tables.expect("`SaveContext::tables` called outside of body")
+ fn typeck_results(&self) -> &'tcx ty::TypeckResults<'tcx> {
+ self.maybe_typeck_results.expect("`SaveContext::typeck_results` called outside of body")
}
fn span_from_span(&self, span: Span) -> SpanData {
None => {
debug!("could not find container for method {} at {:?}", hir_id, span);
// This is not necessarily a bug, if there was a compilation error,
- // the tables we need might not exist.
+ // the typeck results we need might not exist.
return None;
}
},
}
pub fn get_expr_data(&self, expr: &hir::Expr<'_>) -> Option<Data> {
- let ty = self.tables().expr_ty_adjusted_opt(expr)?;
+ let ty = self.typeck_results().expr_ty_adjusted_opt(expr)?;
if matches!(ty.kind, ty::Error(_)) {
return None;
}
match expr.kind {
hir::ExprKind::Field(ref sub_ex, ident) => {
- match self.tables().expr_ty_adjusted(&sub_ex).kind {
+ match self.typeck_results().expr_ty_adjusted(&sub_ex).kind {
ty::Adt(def, _) if !def.is_enum() => {
let variant = &def.non_enum_variant();
filter!(self.span_utils, ident.span);
}
}
hir::ExprKind::MethodCall(ref seg, ..) => {
- let method_id = match self.tables().type_dependent_def_id(expr.hir_id) {
+ let method_id = match self.typeck_results().type_dependent_def_id(expr.hir_id) {
Some(id) => id,
None => {
debug!("could not resolve method id for {:?}", expr);
},
Node::Expr(&hir::Expr { kind: hir::ExprKind::Struct(ref qpath, ..), .. }) => {
- self.tables().qpath_res(qpath, hir_id)
+ self.typeck_results().qpath_res(qpath, hir_id)
}
Node::Expr(&hir::Expr { kind: hir::ExprKind::Path(ref qpath), .. })
| Node::Ty(&hir::Ty { kind: hir::TyKind::Path(ref qpath), .. }) => match qpath {
hir::QPath::Resolved(_, path) => path.res,
hir::QPath::TypeRelative(..) => self
- .maybe_typeck_tables
- .map_or(Res::Err, |tables| tables.qpath_res(qpath, hir_id)),
+ .maybe_typeck_results
+ .map_or(Res::Err, |typeck_results| typeck_results.qpath_res(qpath, hir_id)),
},
Node::Binding(&hir::Pat {
let save_ctxt = SaveContext {
tcx,
- maybe_typeck_tables: None,
+ maybe_typeck_results: None,
access_levels: &access_levels,
span_utils: SpanUtils::new(&tcx.sess),
config: find_config(config),
);
}
+ if debugging_opts.instrument_coverage {
+ if cg.profile_generate.enabled() || cg.profile_use.is_some() {
+ early_error(
+ error_format,
+ "option `-Z instrument-coverage` is not compatible with either `-C profile-use` \
+ or `-C profile-generate`",
+ );
+ }
+
+ // `-Z instrument-coverage` implies:
+ // * `-Z symbol-mangling-version=v0` - to ensure consistent and reversable name mangling.
+ // Note, LLVM coverage tools can analyze coverage over multiple runs, including some
+ // changes to source code; so mangled names must be consistent across compilations.
+ // * `-C link-dead-code` - so unexecuted code is still counted as zero, rather than be
+ // optimized out. Note that instrumenting dead code can be explicitly disabled with:
+ // `-Z instrument-coverage -C link-dead-code=no`.
+ debugging_opts.symbol_mangling_version = SymbolManglingVersion::V0;
+ if cg.link_dead_code == None {
+ // FIXME(richkadel): Investigate if the `instrument-coverage` implementation can
+ // inject ["zero counters"](https://llvm.org/docs/CoverageMappingFormat.html#counter)
+ // in the coverage map when "dead code" is removed, rather than forcing `link-dead-code`.
+ cg.link_dead_code = Some(true);
+ }
+ }
+
if !cg.embed_bitcode {
match cg.lto {
LtoCli::No | LtoCli::Unspecified => {}
"a single extra argument to append to the linker invocation (can be used several times)"),
link_args: Vec<String> = (Vec::new(), parse_list, [UNTRACKED],
"extra arguments to append to the linker invocation (space separated)"),
- link_dead_code: bool = (false, parse_bool, [UNTRACKED],
+ link_dead_code: Option<bool> = (None, parse_opt_bool, [UNTRACKED],
"keep dead code at link time (useful for code coverage) (default: no)"),
linker: Option<PathBuf> = (None, parse_opt_pathbuf, [UNTRACKED],
"system linker to link outputs with"),
(such as entering an empty infinite loop) by inserting llvm.sideeffect \
(default: no)"),
instrument_coverage: bool = (false, parse_bool, [TRACKED],
- "instrument the generated code with LLVM code region counters to (in the \
- future) generate coverage reports; disables/overrides some optimization \
- options (note, the compiler build config must include `profiler = true`) \
- (default: no)"),
+ "instrument the generated code to support LLVM source-based code coverage \
+ reports (note, the compiler build config must include `profiler = true`, \
+ and is mutually exclusive with `-C profile-generate`/`-C profile-use`); \
+ implies `-C link-dead-code` (unless explicitly disabled)` and
+ `-Z symbol-mangling-version=v0`; and disables/overrides some optimization \
+ options (default: no)"),
instrument_mcount: bool = (false, parse_bool, [TRACKED],
"insert function instrument code for mcount-based tracing (default: no)"),
keep_hygiene_data: bool = (false, parse_bool, [UNTRACKED],
);
}
+ // FIXME(richkadel): See `src/test/run-make-fulldeps/instrument-coverage/Makefile`. After
+ // compiling with `-Zinstrument-coverage`, the resulting binary generates a segfault during
+ // the program's exit process (likely while attempting to generate the coverage stats in
+ // the "*.profraw" file). An investigation to resolve the problem on Windows is ongoing,
+ // but until this is resolved, the option is disabled on Windows, and the test is skipped
+ // when targeting `MSVC`.
+ if sess.opts.debugging_opts.instrument_coverage && sess.target.target.options.is_like_msvc {
+ sess.warn(
+ "Rust source-based code coverage instrumentation (with `-Z instrument-coverage`) \
+ is not yet supported on Windows when targeting MSVC. The resulting binaries will \
+ still be instrumented for experimentation purposes, but may not execute correctly.",
+ );
+ }
+
const ASAN_SUPPORTED_TARGETS: &[&str] = &[
"aarch64-fuchsia",
"aarch64-unknown-linux-gnu",
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(crate_visibility_modifier)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_fn)]
#![feature(const_panic)]
#![feature(negative_impls)]
#[cfg(test)]
mod tests;
+// The proc macro code for this is in `src/librustc_macros/src/symbols.rs`.
symbols! {
// After modifying this list adjust `is_special`, `is_used_keyword`/`is_unused_keyword`,
// this should be rarely necessary though if the keywords are kept in alphabetic order.
// As well as the symbols listed, there are symbols for the the strings
// "0", "1", ..., "9", which are accessible via `sym::integer`.
//
- // Keep this list in sorted order, as defined by the Unix `sort` utility.
+ // The proc macro will abort if symbols are not in alphabetical order (as
+ // defined by `impl Ord for str`) or if any symbols are duplicated. Vim
+ // users can sort the list by selecting it and executing the command
+ // `:'<,'>!LC_ALL=C sort`.
+ //
+ // There is currently no checking that all symbols are used; that would be
+ // nice to have.
Symbols {
+ Alignment,
+ Arc,
+ Argument,
+ ArgumentV1,
+ Arguments,
+ C,
+ Center,
+ Clone,
+ Copy,
+ Count,
+ Debug,
+ Decodable,
+ Decoder,
+ Default,
+ Encodable,
+ Encoder,
+ Eq,
+ Equal,
+ Err,
+ Error,
+ FormatSpec,
+ Formatter,
+ From,
+ Future,
+ FxHashMap,
+ FxHashSet,
+ GlobalAlloc,
+ Hash,
+ HashMap,
+ HashSet,
+ Hasher,
+ Implied,
+ Input,
+ IntoIterator,
+ Is,
+ ItemContext,
+ Iterator,
+ Layout,
+ Left,
+ LintPass,
+ None,
+ Ok,
+ Option,
+ Ord,
+ Ordering,
+ Output,
+ Param,
+ PartialEq,
+ PartialOrd,
+ Pending,
+ Pin,
+ Poll,
+ ProcMacro,
+ ProcMacroHack,
+ ProceduralMasqueradeDummyType,
+ Range,
+ RangeFrom,
+ RangeFull,
+ RangeInclusive,
+ RangeTo,
+ RangeToInclusive,
+ Rc,
+ Ready,
+ Result,
+ Return,
+ Right,
+ RustcDecodable,
+ RustcEncodable,
+ Send,
+ Some,
+ StructuralEq,
+ StructuralPartialEq,
+ Sync,
+ Target,
+ Try,
+ Ty,
+ TyCtxt,
+ TyKind,
+ Unknown,
+ Vec,
+ Yield,
+ _DECLS,
+ _Self,
+ __D,
+ __H,
+ __S,
+ __next,
+ __try_var,
+ _d,
+ _e,
+ _task_context,
aarch64_target_feature,
abi,
abi_amdgpu_kernel,
aborts,
add,
add_assign,
- address,
add_with_overflow,
+ address,
advanced_slice_patterns,
adx_target_feature,
alias,
alignstack,
all,
alloc,
- allocator,
- allocator_internals,
alloc_error_handler,
alloc_layout,
alloc_zeroed,
+ allocator,
+ allocator_internals,
allow,
- allowed,
allow_fail,
allow_internal_unsafe,
allow_internal_unstable,
allow_internal_unstable_backcompat_hack,
+ allowed,
always,
and,
+ and_then,
any,
arbitrary_enum_discriminant,
arbitrary_self_types,
- Arc,
- Arguments,
- ArgumentV1,
arith_offset,
arm_target_feature,
array,
+ as_str,
asm,
assert,
assert_inhabited,
associated_type_bounds,
associated_type_defaults,
associated_types,
- as_str,
assume,
assume_init,
async_await,
async_closure,
atomics,
+ att_syntax,
attr,
- attributes,
attr_literals,
- att_syntax,
+ attributes,
augmented_assignments,
automatically_derived,
avx512_target_feature,
await_macro,
+ bang,
begin_panic,
bench,
bin,
box_syntax,
braced_empty_structs,
breakpoint,
+ bridge,
bswap,
- C,
+ c_variadic,
call,
- caller_location,
call_mut,
call_once,
+ caller_location,
cdylib,
ceilf32,
ceilf64,
cfg_target_vendor,
cfg_version,
char,
+ client,
clippy,
clone,
- Clone,
clone_closures,
clone_from,
closure_to_fn_coercion,
context,
convert,
copy,
- Copy,
copy_closures,
copy_nonoverlapping,
copysignf32,
custom_derive,
custom_inner_attributes,
custom_test_frameworks,
- c_variadic,
+ d,
dead_code,
dealloc,
debug,
- Debug,
debug_assertions,
+ debug_struct,
debug_trait,
- declare_lint_pass,
+ debug_trait_builder,
+ debug_tuple,
decl_macro,
- Decodable,
+ declare_lint_pass,
decode,
- Default,
default_lib_allocator,
default_type_parameter_fallback,
default_type_params,
diagnostic,
direct,
discriminant_kind,
+ discriminant_type,
discriminant_value,
dispatch_from_dyn,
div,
doc_cfg,
doc_keyword,
doc_masked,
+ doc_spotlight,
doctest,
document_private_items,
- dotdoteq_in_patterns,
dotdot_in_tuple_patterns,
+ dotdoteq_in_patterns,
double_braced_closure: "{{closure}}",
double_braced_constant: "{{constant}}",
double_braced_constructor: "{{constructor}}",
double_braced_misc: "{{misc}}",
double_braced_opaque: "{{opaque}}",
drop,
- dropck_eyepatch,
- dropck_parametricity,
drop_in_place,
drop_types_in_const,
+ dropck_eyepatch,
+ dropck_parametricity,
dylib,
dyn_trait,
eh_catch_typeinfo,
eh_personality,
+ emit_enum,
+ emit_enum_variant,
+ emit_enum_variant_arg,
+ emit_struct,
+ emit_struct_field,
enable,
enclosing_scope,
- Encodable,
encode,
env,
eq,
- Eq,
- Equal,
err,
- Err,
exact_div,
except,
exchange_malloc,
export_name,
expr,
extern_absolute_paths,
- external_doc,
extern_crate_item_prelude,
extern_crate_self,
extern_in_paths,
extern_prelude,
extern_types,
+ external_doc,
+ f,
f16c_target_feature,
f32,
f32_runtime,
field,
field_init_shorthand,
file,
+ fill,
+ finish,
+ flags,
float_to_int_unchecked,
floorf32,
floorf64,
fn_once_output,
forbid,
forget,
+ format,
format_args,
format_args_capture,
format_args_nl,
freeze,
frem_fast,
from,
- From,
from_desugaring,
from_error,
from_generator,
fsub_fast,
fundamental,
future,
- Future,
future_trait,
- FxHashMap,
- FxHashSet,
ge,
+ gen_future,
+ gen_kill,
generator,
- generators,
generator_state,
+ generators,
generic_associated_types,
generic_param_attrs,
- gen_future,
- gen_kill,
get_context,
- GlobalAlloc,
global_allocator,
global_asm,
globs,
gt,
half_open_range_patterns,
hash,
- Hash,
- HashMap,
- HashSet,
hexagon_target_feature,
hidden,
homogeneous_aggregate,
html_no_source,
html_playground_url,
html_root_url,
+ i,
i128,
i128_type,
i16,
inlateout,
inline,
inout,
- Input,
intel,
into_iter,
- IntoIterator,
into_result,
intrinsics,
irrefutable_let_patterns,
issue_5723_bootstrap,
issue_tracker_base_url,
item,
- item_context: "ItemContext",
item_like_imports,
iter,
- Iterator,
keyword,
kind,
label,
lang,
lang_items,
lateout,
- Layout,
lazy_normalization_consts,
le,
let_chains,
likely,
line,
link,
- linkage,
link_args,
link_cfg,
link_llvm_intrinsics,
link_name,
link_ordinal,
link_section,
- LintPass,
+ linkage,
lint_reasons,
literal,
llvm_asm,
log10f64,
log2f32,
log2f64,
+ log_syntax,
logf32,
logf64,
- log_syntax,
loop_break_value,
lt,
macro_at_most_once_rep,
macro_lifetime_matcher,
macro_literal_matcher,
macro_reexport,
- macros_in_extern,
macro_use,
macro_vis_matcher,
+ macros_in_extern,
main,
managed_boxes,
manually_drop,
+ map,
marker,
marker_trait_attr,
masked,
match_default_bindings,
maxnumf32,
maxnumf64,
+ may_dangle,
maybe_uninit,
maybe_uninit_uninit,
maybe_uninit_zeroed,
- may_dangle,
- member_constraints,
- memory,
mem_uninitialized,
mem_zeroed,
+ member_constraints,
+ memory,
message,
meta,
min_align_of,
min_align_of_val,
min_const_fn,
min_const_unsafe_fn,
+ min_specialization,
minnumf32,
minnumf64,
- min_specialization,
mips_target_feature,
miri_start_panic,
mmx_target_feature,
never_type,
never_type_fallback,
new,
- __next,
next,
nll,
no,
no_link,
no_main,
no_mangle,
+ no_niche,
+ no_sanitize,
+ no_stack_check,
+ no_start,
+ no_std,
nomem,
non_ascii_idents,
- None,
non_exhaustive,
- no_niche,
non_modrs_mods,
+ none_error,
nontemporal_store,
nontrapping_dash_fptoint: "nontrapping-fptoint",
noreturn,
- no_sanitize,
nostack,
- no_stack_check,
- no_start,
- no_std,
not,
note,
object_safe_for_dispatch,
offset,
- Ok,
omit_gdb_pretty_printer_section,
on,
on_unimplemented,
oom,
opaque,
ops,
+ opt_out_copy,
optimize,
optimize_attribute,
optin_builtin_traits,
option,
- Option,
option_env,
- options,
option_type,
- opt_out_copy,
+ options,
or,
- Ord,
- Ordering,
or_patterns,
+ other,
out,
- Output,
overlapping_marker_traits,
owned_box,
packed,
param_attrs,
parent_trait,
partial_cmp,
- PartialEq,
partial_ord,
- PartialOrd,
passes,
pat,
path,
pattern_parentheses,
- Pending,
phantom_data,
pin,
- Pin,
pinned,
platform_intrinsics,
plugin,
plugins,
pointer,
poll,
- Poll,
+ position,
post_dash_lto: "post-lto",
powerpc_target_feature,
powf32,
powf64,
powif32,
powif64,
- precise_pointer_size_matching,
pre_dash_lto: "pre-lto",
+ precise_pointer_size_matching,
+ precision,
pref_align_of,
prefetch_read_data,
prefetch_read_instruction,
preserves_flags,
primitive,
proc_dash_macro: "proc-macro",
- ProceduralMasqueradeDummyType,
proc_macro,
proc_macro_attribute,
proc_macro_def_site,
proc_macro_derive,
proc_macro_expr,
proc_macro_gen,
- ProcMacroHack,
proc_macro_hygiene,
proc_macro_internals,
proc_macro_mod,
quad_precision_float,
question_mark,
quote,
- Range,
- RangeFrom,
- RangeFull,
- RangeInclusive,
- RangeTo,
- RangeToInclusive,
raw_dylib,
raw_identifiers,
raw_ref_op,
- Rc,
+ re_rebalance_coherence,
+ read_enum,
+ read_enum_variant,
+ read_enum_variant_arg,
+ read_struct,
+ read_struct_field,
readonly,
- Ready,
realloc,
reason,
receiver,
repr_packed,
repr_simd,
repr_transparent,
- re_rebalance_coherence,
result,
- Result,
result_type,
- Return,
rhs,
rintf32,
rintf64,
rust_2015_preview,
rust_2018_preview,
rust_begin_unwind,
+ rust_eh_personality,
+ rust_eh_register_frames,
+ rust_eh_unregister_frames,
+ rust_oom,
rustc,
rustc_allocator,
rustc_allocator_nounwind,
rustc_const_stable,
rustc_const_unstable,
rustc_conversion_suggestion,
- RustcDecodable,
rustc_def_path,
rustc_deprecated,
rustc_diagnostic_item,
rustc_dump_env_program_clauses,
rustc_dump_program_clauses,
rustc_dump_user_substs,
- RustcEncodable,
rustc_error,
rustc_expected_cgu_reuse,
rustc_if_this_changed,
rustc_promotable,
rustc_regions,
rustc_reservation_impl,
+ rustc_serialize,
rustc_specialization_trait,
rustc_stable,
rustc_std_internal_symbol,
rustc_then_this_would_need,
rustc_unsafe_specialization_marker,
rustc_variance,
- rust_eh_personality,
rustfmt,
- rust_oom,
rvalue_static_promotion,
sanitize,
sanitizer_runtime,
saturating_add,
saturating_sub,
- _Self,
self_in_typedefs,
self_struct_ctor,
semitransparent,
- Send,
send_trait,
shl,
shl_assign,
sinf32,
sinf64,
size,
- sized,
size_of,
size_of_val,
+ sized,
slice,
slice_alloc,
slice_patterns,
slice_u8_alloc,
slicing_syntax,
soft,
- Some,
specialization,
speed,
+ spotlight,
sqrtf32,
sqrtf64,
sse4a_target_feature,
stable,
staged_api,
start,
+ state,
static_in_const,
- staticlib,
static_nobundle,
static_recursion,
+ staticlib,
std,
std_inject,
stmt,
stringify,
struct_field_attributes,
struct_inherit,
+ struct_variant,
structural_match,
structural_peq,
structural_teq,
- struct_variant,
sty,
sub,
sub_assign,
suggestion,
sym,
sync,
- Sync,
sync_trait,
- Target,
target_arch,
target_endian,
target_env,
target_thread_local,
target_vendor,
task,
- _task_context,
tbm_target_feature,
termination,
termination_trait,
trivial_bounds,
truncf32,
truncf64,
- Try,
try_blocks,
try_trait,
tt,
tuple_indexing,
two_phase,
ty,
- Ty,
- TyCtxt,
- TyKind,
type_alias_enum_variants,
type_alias_impl_trait,
type_ascription,
unwind,
unwind_attributes,
unwrap_or,
- used,
use_extern_macros,
use_nested_groups,
+ used,
usize,
v1,
va_arg,
va_copy,
va_end,
- val,
va_list,
+ va_start,
+ val,
var,
variant_count,
- va_start,
vec,
- Vec,
vec_type,
version,
vis,
wasm_import_module,
wasm_target_feature,
while_let,
+ width,
windows,
windows_subsystem,
wrapping_add,
wrapping_mul,
wrapping_sub,
write_bytes,
- Yield,
}
}
#![doc(html_root_url = "https://doc.rust-lang.org/nightly/")]
#![feature(bool_to_option)]
-#![cfg_attr(bootstrap, feature(const_if_match))]
#![feature(const_fn)]
#![feature(const_panic)]
#![feature(nll)]
("powerpc64-wrs-vxworks", powerpc64_wrs_vxworks),
("mipsel-sony-psp", mipsel_sony_psp),
+ ("thumbv4t-none-eabi", thumbv4t_none_eabi),
}
/// Everything `rustc` knows about how to compile for a specific target.
/// for this target unconditionally.
pub no_builtins: bool,
- /// The codegen backend to use for this target, typically "llvm"
- pub codegen_backend: String,
-
/// The default visibility for symbols in this target should be "hidden"
/// rather than "default"
pub default_hidden_visibility: bool,
requires_lto: false,
singlethread: false,
no_builtins: false,
- codegen_backend: "llvm".to_string(),
default_hidden_visibility: false,
emit_debug_gdb_scripts: true,
requires_uwtable: false,
key!(requires_lto, bool);
key!(singlethread, bool);
key!(no_builtins, bool);
- key!(codegen_backend);
key!(default_hidden_visibility, bool);
key!(emit_debug_gdb_scripts, bool);
key!(requires_uwtable, bool);
target_option_val!(requires_lto);
target_option_val!(singlethread);
target_option_val!(no_builtins);
- target_option_val!(codegen_backend);
target_option_val!(default_hidden_visibility);
target_option_val!(emit_debug_gdb_scripts);
target_option_val!(requires_uwtable);
--- /dev/null
+//! Targets the ARMv4T, with code as `t32` code by default.
+//!
+//! Primarily of use for the GBA, but usable with other devices too.
+//!
+//! Please ping @Lokathor if changes are needed.
+//!
+//! This target profile assumes that you have the ARM binutils in your path (specifically the linker, `arm-none-eabi-ld`). They can be obtained for free for all major OSes from the ARM developer's website, and they may also be available in your system's package manager. Unfortunately, the standard linker that Rust uses (`lld`) only supports as far back as `ARMv5TE`, so we must use the GNU `ld` linker.
+//!
+//! **Important:** This target profile **does not** specify a linker script. You just get the default link script when you build a binary for this target. The default link script is very likely wrong, so you should use `-Clink-arg=-Tmy_script.ld` to override that with a correct linker script.
+
+use crate::spec::{LinkerFlavor, Target, TargetOptions, TargetResult};
+
+pub fn target() -> TargetResult {
+ Ok(Target {
+ llvm_target: "thumbv4t-none-eabi".to_string(),
+ target_endian: "little".to_string(),
+ target_pointer_width: "32".to_string(),
+ target_c_int_width: "32".to_string(),
+ target_os: "none".to_string(),
+ target_env: "".to_string(),
+ target_vendor: "".to_string(),
+ arch: "arm".to_string(),
+ /* Data layout args are '-' separated:
+ * little endian
+ * stack is 64-bit aligned (EABI)
+ * pointers are 32-bit
+ * i64 must be 64-bit aligned (EABI)
+ * mangle names with ELF style
+ * native integers are 32-bit
+ * All other elements are default
+ */
+ data_layout: "e-S64-p:32:32-i64:64-m:e-n32".to_string(),
+ linker_flavor: LinkerFlavor::Ld,
+ options: TargetOptions {
+ linker: Some("arm-none-eabi-ld".to_string()),
+ linker_is_gnu: true,
+
+ // extra args passed to the external assembler (assuming `arm-none-eabi-as`):
+ // * activate t32/a32 interworking
+ // * use arch ARMv4T
+ // * use little-endian
+ asm_args: vec![
+ "-mthumb-interwork".to_string(),
+ "-march=armv4t".to_string(),
+ "-mlittle-endian".to_string(),
+ ],
+
+ // minimum extra features, these cannot be disabled via -C
+ features: "+soft-float,+strict-align".to_string(),
+
+ main_needs_argc_argv: false,
+
+ // No thread-local storage (just use a static Cell)
+ has_elf_tls: false,
+
+ // don't have atomic compare-and-swap
+ atomic_cas: false,
+
+ ..super::thumb_base::opts()
+ },
+ })
+}
TypeFoldable, WithConstness,
};
use rustc_session::DiagnosticMessageId;
+use rustc_span::symbol::{kw, sym};
use rustc_span::{ExpnKind, MultiSpan, Span, DUMMY_SP};
use std::fmt;
.span_to_snippet(span)
.map(|s| &s == "?")
.unwrap_or(false);
- let is_from = format!("{}", trait_ref.print_only_trait_path())
- .starts_with("std::convert::From<");
+ let is_from = self.tcx.get_diagnostic_item(sym::from_trait)
+ == Some(trait_ref.def_id());
let is_unsize =
{ Some(trait_ref.def_id()) == self.tcx.lang_items().unsize_trait() };
let (message, note) = if is_try && is_from {
))
);
- let should_convert_option_to_result =
- format!("{}", trait_ref.print_only_trait_path())
- .starts_with("std::convert::From<std::option::NoneError");
- let should_convert_result_to_option = format!("{}", trait_ref)
- .starts_with("<std::option::NoneError as std::convert::From<");
if is_try && is_from {
+ let none_error = self
+ .tcx
+ .get_diagnostic_item(sym::none_error)
+ .map(|def_id| tcx.type_of(def_id));
+ let should_convert_option_to_result =
+ Some(trait_ref.skip_binder().substs.type_at(1)) == none_error;
+ let should_convert_result_to_option =
+ Some(trait_ref.self_ty().skip_binder()) == none_error;
if should_convert_option_to_result {
err.span_suggestion_verbose(
span.shrink_to_lo(),
// Additional context information explaining why the closure only implements
// a particular trait.
- if let Some(tables) = self.in_progress_tables {
- let tables = tables.borrow();
- match (found_kind, tables.closure_kind_origins().get(hir_id)) {
+ if let Some(typeck_results) = self.in_progress_typeck_results {
+ let typeck_results = typeck_results.borrow();
+ match (found_kind, typeck_results.closure_kind_origins().get(hir_id)) {
(ty::ClosureKind::FnOnce, Some((span, name))) => {
err.span_label(
*span,
(self.tcx.sess.source_map().span_to_snippet(span), &obligation.cause.code)
{
let generics = self.tcx.generics_of(*def_id);
- if generics.params.iter().any(|p| p.name.as_str() != "Self")
+ if generics.params.iter().any(|p| p.name != kw::SelfUpper)
&& !snippet.ends_with('>')
{
// FIXME: To avoid spurious suggestions in functions where type arguments
let mut flags = vec![];
flags.push((
- sym::item_context,
+ sym::ItemContext,
self.describe_enclosure(obligation.cause.body_id).map(|s| s.to_owned()),
));
self, suggest_constraining_type_param, AdtKind, DefIdTree, Infer, InferTy, ToPredicate, Ty,
TyCtxt, TypeFoldable, WithConstness,
};
-use rustc_middle::ty::{TypeAndMut, TypeckTables};
+use rustc_middle::ty::{TypeAndMut, TypeckResults};
use rustc_span::symbol::{kw, sym, Ident, Symbol};
use rustc_span::{MultiSpan, Span, DUMMY_SP};
use std::fmt;
outer_generator: Option<DefId>,
trait_ref: ty::TraitRef<'tcx>,
target_ty: Ty<'tcx>,
- tables: &ty::TypeckTables<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
obligation: &PredicateObligation<'tcx>,
next_code: Option<&ObligationCauseCode<'tcx>>,
);
let mut visitor = ReturnsVisitor::default();
visitor.visit_body(&body);
- let tables = self.in_progress_tables.map(|t| t.borrow()).unwrap();
+ let typeck_results = self.in_progress_typeck_results.map(|t| t.borrow()).unwrap();
let mut ret_types = visitor
.returns
.iter()
- .filter_map(|expr| tables.node_type_opt(expr.hir_id))
+ .filter_map(|expr| typeck_results.node_type_opt(expr.hir_id))
.map(|ty| self.resolve_vars_if_possible(&ty));
let (last_ty, all_returns_have_same_type, only_never_return) = ret_types.clone().fold(
(None, true, true),
},
);
let all_returns_conform_to_trait =
- if let Some(ty_ret_ty) = tables.node_type_opt(ret_ty.hir_id) {
+ if let Some(ty_ret_ty) = typeck_results.node_type_opt(ret_ty.hir_id) {
match ty_ret_ty.kind {
ty::Dynamic(predicates, _) => {
let cause = ObligationCause::misc(ret_ty.span, ret_ty.hir_id);
// Point at all the `return`s in the function as they have failed trait bounds.
let mut visitor = ReturnsVisitor::default();
visitor.visit_body(&body);
- let tables = self.in_progress_tables.map(|t| t.borrow()).unwrap();
+ let typeck_results = self.in_progress_typeck_results.map(|t| t.borrow()).unwrap();
for expr in &visitor.returns {
- if let Some(returned_ty) = tables.node_type_opt(expr.hir_id) {
+ if let Some(returned_ty) = typeck_results.node_type_opt(expr.hir_id) {
let ty = self.resolve_vars_if_possible(&returned_ty);
err.span_label(expr.span, &format!("this returned value is of type `{}`", ty));
}
return false;
}
- // Get the tables from the infcx if the generator is the function we are
+ // Get the typeck results from the infcx if the generator is the function we are
// currently type-checking; otherwise, get them by performing a query.
// This is needed to avoid cycles.
- let in_progress_tables = self.in_progress_tables.map(|t| t.borrow());
+ let in_progress_typeck_results = self.in_progress_typeck_results.map(|t| t.borrow());
let generator_did_root = self.tcx.closure_base_def_id(generator_did);
debug!(
"maybe_note_obligation_cause_for_async_await: generator_did={:?} \
- generator_did_root={:?} in_progress_tables.hir_owner={:?} span={:?}",
+ generator_did_root={:?} in_progress_typeck_results.hir_owner={:?} span={:?}",
generator_did,
generator_did_root,
- in_progress_tables.as_ref().map(|t| t.hir_owner),
+ in_progress_typeck_results.as_ref().map(|t| t.hir_owner),
span
);
- let query_tables;
- let tables: &TypeckTables<'tcx> = match &in_progress_tables {
+ let query_typeck_results;
+ let typeck_results: &TypeckResults<'tcx> = match &in_progress_typeck_results {
Some(t) if t.hir_owner.to_def_id() == generator_did_root => t,
_ => {
- query_tables = self.tcx.typeck_tables_of(generator_did.expect_local());
- &query_tables
+ query_typeck_results = self.tcx.typeck(generator_did.expect_local());
+ &query_typeck_results
}
};
if let Some(upvars) = self.tcx.upvars_mentioned(generator_did) {
interior_or_upvar_span = upvars.iter().find_map(|(upvar_id, upvar)| {
- let upvar_ty = tables.node_type(*upvar_id);
+ let upvar_ty = typeck_results.node_type(*upvar_id);
let upvar_ty = self.resolve_vars_if_possible(&upvar_ty);
if ty_matches(&upvar_ty) {
Some(GeneratorInteriorOrUpvar::Upvar(upvar.span))
});
};
- tables
+ typeck_results
.generator_interior_types
.iter()
.find(|ty::GeneratorInteriorTypeCause { ty, .. }| ty_matches(ty))
.into_iter()
.map(|id| hir.expect_expr(id))
.find(|await_expr| {
- let ty = tables.expr_ty_adjusted(&await_expr);
+ let ty = typeck_results.expr_ty_adjusted(&await_expr);
debug!(
"maybe_note_obligation_cause_for_async_await: await_expr={:?}",
await_expr
debug!(
"maybe_note_obligation_cause_for_async_await: interior_or_upvar={:?} \
generator_interior_types={:?}",
- interior_or_upvar_span, tables.generator_interior_types
+ interior_or_upvar_span, typeck_results.generator_interior_types
);
if let Some(interior_or_upvar_span) = interior_or_upvar_span {
self.note_obligation_cause_for_async_await(
outer_generator,
trait_ref,
target_ty,
- tables,
+ typeck_results,
obligation,
next_code,
);
outer_generator: Option<DefId>,
trait_ref: ty::TraitRef<'tcx>,
target_ty: Ty<'tcx>,
- tables: &ty::TypeckTables<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
obligation: &PredicateObligation<'tcx>,
next_code: Option<&ObligationCauseCode<'tcx>>,
) {
// Look at the last interior type to get a span for the `.await`.
debug!(
"note_obligation_cause_for_async_await generator_interior_types: {:#?}",
- tables.generator_interior_types
+ typeck_results.generator_interior_types
);
explain_yield(interior_span, yield_span, scope_span);
}
// ^^^^^^^ a temporary `&T` created inside this method call due to `&self`
// ```
//
- let is_region_borrow = tables
+ let is_region_borrow = typeck_results
.expr_adjustments(expr)
.iter()
.any(|adj| adj.is_region_borrow());
_ => false,
};
- if (tables.is_method_call(e) && is_region_borrow)
+ if (typeck_results.is_method_call(e) && is_region_borrow)
|| is_raw_borrow_inside_fn_like_call
{
err.span_help(
// `{from_desugaring}` is allowed
Position::ArgumentNamed(s) if s == sym::from_desugaring => (),
// `{ItemContext}` is allowed
- Position::ArgumentNamed(s) if s == sym::item_context => (),
+ Position::ArgumentNamed(s) if s == sym::ItemContext => (),
// So is `{A}` if A is a type parameter
Position::ArgumentNamed(s) => {
match generics.params.iter().find(|param| param.name == s) {
let s = self.0.as_str();
let parser = Parser::new(&s, None, None, false, ParseMode::Format);
- let item_context = (options.get(&sym::item_context)).unwrap_or(&empty_string);
+ let item_context = (options.get(&sym::ItemContext)).unwrap_or(&empty_string);
parser
.map(|p| match p {
Piece::String(s) => s,
} else if s == sym::from_desugaring || s == sym::from_method {
// don't break messages using these two arguments incorrectly
&empty_string
- } else if s == sym::item_context {
+ } else if s == sym::ItemContext {
&item_context
} else {
bug!(
use rustc_data_structures::stack::ensure_sufficient_stack;
use rustc_errors::ErrorReported;
use rustc_hir::def_id::DefId;
-use rustc_hir::lang_items::{FnOnceOutputLangItem, FnOnceTraitLangItem, GeneratorTraitLangItem};
+use rustc_hir::lang_items::{
+ DiscriminantTypeLangItem, FnOnceOutputLangItem, FnOnceTraitLangItem, GeneratorTraitLangItem,
+};
use rustc_infer::infer::resolve::OpportunisticRegionResolver;
use rustc_middle::ty::fold::{TypeFoldable, TypeFolder};
use rustc_middle::ty::subst::Subst;
-use rustc_middle::ty::util::IntTypeExt;
use rustc_middle::ty::{self, ToPolyTraitRef, ToPredicate, Ty, TyCtxt, WithConstness};
use rustc_span::symbol::sym;
use rustc_span::DUMMY_SP;
let self_ty = selcx.infcx().shallow_resolve(obligation.predicate.self_ty());
let substs = tcx.mk_substs([self_ty.into()].iter());
- let assoc_items = tcx.associated_items(tcx.lang_items().discriminant_kind_trait().unwrap());
- // FIXME: emit an error if the trait definition is wrong
- let discriminant_def_id = assoc_items.in_definition_order().next().unwrap().def_id;
-
- let discriminant_ty = match self_ty.kind {
- // Use the discriminant type for enums.
- ty::Adt(adt, _) if adt.is_enum() => adt.repr.discr_type().to_ty(tcx),
- // Default to `i32` for generators.
- ty::Generator(..) => tcx.types.i32,
- // Use `u8` for all other types.
- _ => tcx.types.u8,
- };
+ let discriminant_def_id = tcx.require_lang_item(DiscriminantTypeLangItem, None);
let predicate = ty::ProjectionPredicate {
projection_ty: ty::ProjectionTy { substs, item_def_id: discriminant_def_id },
- ty: discriminant_ty,
+ ty: self_ty.discriminant_ty(tcx),
};
confirm_param_env_candidate(selcx, obligation, ty::Binder::bind(predicate))
let mut inner_callee_path = None;
let def = match callee.kind {
hir::ExprKind::Path(ref qpath) => {
- self.tables.borrow().qpath_res(qpath, callee.hir_id)
+ self.typeck_results.borrow().qpath_res(qpath, callee.hir_id)
}
hir::ExprKind::Call(ref inner_callee, _) => {
// If the call spans more than one line and the callee kind is
}
if let hir::ExprKind::Path(ref inner_qpath) = inner_callee.kind {
inner_callee_path = Some(inner_qpath);
- self.tables.borrow().qpath_res(inner_qpath, inner_callee.hir_id)
+ self.typeck_results
+ .borrow()
+ .qpath_res(inner_qpath, inner_callee.hir_id)
} else {
Res::Err
}
Ok(()) => {
self.trivial_cast_lint(fcx);
debug!(" -> CoercionCast");
- fcx.tables.borrow_mut().set_coercion_cast(self.expr.hir_id.local_id);
+ fcx.typeck_results.borrow_mut().set_coercion_cast(self.expr.hir_id.local_id);
}
Err(ty::error::TypeError::ObjectUnsafeCoercion(did)) => {
self.report_object_unsafe_cast(&fcx, did);
// Up till this point, we have ignored the annotations that the user
// gave. This function will check that they unify successfully.
// Along the way, it also writes out entries for types that the user
- // wrote into our tables, which are then later used by the privacy
+ // wrote into our typeck results, which are then later used by the privacy
// check.
match self.check_supplied_sig_against_expectation(expr_def_id, decl, body, &closure_sigs) {
Ok(infer_ok) => self.register_infer_ok_obligations(infer_ok),
debug!("supplied_sig_of_closure: result={:?}", result);
let c_result = self.inh.infcx.canonicalize_response(&result);
- self.tables.borrow_mut().user_provided_sigs.insert(expr_def_id, c_result);
+ self.typeck_results.borrow_mut().user_provided_sigs.insert(expr_def_id, c_result);
result
}
// First try to coerce the new expression to the type of the previous ones,
// but only if the new expression has no coercion already applied to it.
let mut first_error = None;
- if !self.tables.borrow().adjustments().contains_key(new.hir_id) {
+ if !self.typeck_results.borrow().adjustments().contains_key(new.hir_id) {
let result = self.commit_if_ok(|_| coerce.coerce(new_ty, prev_ty));
match result {
Ok(ok) => {
// previous expressions, other than noop reborrows (ignoring lifetimes).
for expr in exprs {
let expr = expr.as_coercion_site();
- let noop = match self.tables.borrow().expr_adjustments(expr) {
+ let noop = match self.typeck_results.borrow().expr_adjustments(expr) {
&[Adjustment { kind: Adjust::Deref(_), .. }, Adjustment { kind: Adjust::Borrow(AutoBorrow::Ref(_, mutbl_adj)), .. }] =>
{
match self.node_ty(expr.hir_id).kind {
_ => return None,
};
- let self_ty = self.tables.borrow().node_type(method_expr[0].hir_id);
+ let self_ty = self.typeck_results.borrow().node_type(method_expr[0].hir_id);
let self_ty = format!("{:?}", self_ty);
- let name = method_path.ident.as_str();
+ let name = method_path.ident.name;
let is_as_ref_able = (self_ty.starts_with("&std::option::Option")
|| self_ty.starts_with("&std::result::Result")
|| self_ty.starts_with("std::option::Option")
|| self_ty.starts_with("std::result::Result"))
- && (name == "map" || name == "and_then");
+ && (name == sym::map || name == sym::and_then);
match (is_as_ref_able, self.sess().source_map().span_to_snippet(*method_span)) {
(true, Ok(src)) => {
let suggestion = format!("as_ref().{}", src);
let clone_trait = self.tcx.require_lang_item(CloneTraitLangItem, Some(sp));
if let ([arg], Some(true), sym::clone) = (
&args[..],
- self.tables.borrow().type_dependent_def_id(expr.hir_id).map(|did| {
- let ai = self.tcx.associated_item(did);
- ai.container == ty::TraitContainer(clone_trait)
- }),
+ self.typeck_results.borrow().type_dependent_def_id(expr.hir_id).map(
+ |did| {
+ let ai = self.tcx.associated_item(did);
+ ai.container == ty::TraitContainer(clone_trait)
+ },
+ ),
segment.ident.name,
) {
// If this expression had a clone call when suggesting borrowing
// coercions from ! to `expected`.
if ty.is_never() {
assert!(
- !self.tables.borrow().adjustments().contains_key(expr.hir_id),
+ !self.typeck_results.borrow().adjustments().contains_key(expr.hir_id),
"expression with never type wound up being adjusted"
);
let adj_ty = self.next_diverging_ty_var(TypeVariableOrigin {
// This is maybe too permissive, since it allows
// `let u = &raw const Box::new((1,)).0`, which creates an
// immediately dangling raw pointer.
- self.tables.borrow().adjustments().get(base.hir_id).map_or(false, |x| {
+ self.typeck_results.borrow().adjustments().get(base.hir_id).map_or(false, |x| {
x.iter().any(|adj| if let Adjust::Deref(_) = adj.kind { true } else { false })
})
});
// We always require that the type provided as the value for
// a type parameter outlives the moment of instantiation.
- let substs = self.tables.borrow().node_substs(expr.hir_id);
+ let substs = self.typeck_results.borrow().node_substs(expr.hir_id);
self.add_wf_bounds(substs, expr);
ty
})
.collect();
- self.tables
+ self.typeck_results
.borrow_mut()
.fru_field_types_mut()
.insert(expr.hir_id, fru_field_types);
let type_list = fcx.tcx.mk_type_list(type_causes.iter().map(|cause| cause.ty));
let witness = fcx.tcx.mk_generator_witness(ty::Binder::bind(type_list));
- // Store the generator types and spans into the tables for this generator.
- visitor.fcx.inh.tables.borrow_mut().generator_interior_types = type_causes;
+ // Store the generator types and spans into the typeck results for this generator.
+ visitor.fcx.inh.typeck_results.borrow_mut().generator_interior_types = type_causes;
debug!(
"types in generator after region replacement {:?}, span = {:?}",
if let PatKind::Binding(..) = pat.kind {
let scope = self.region_scope_tree.var_scope(pat.hir_id.local_id);
- let ty = self.fcx.tables.borrow().pat_ty(pat);
+ let ty = self.fcx.typeck_results.borrow().pat_ty(pat);
self.record(ty, Some(scope), None, pat.span);
}
}
match &expr.kind {
ExprKind::Call(callee, args) => match &callee.kind {
ExprKind::Path(qpath) => {
- let res = self.fcx.tables.borrow().qpath_res(qpath, callee.hir_id);
+ let res = self.fcx.typeck_results.borrow().qpath_res(qpath, callee.hir_id);
match res {
// Direct calls never need to keep the callee `ty::FnDef`
// ZST in a temporary, so skip its type, just in case it
// If there are adjustments, then record the final type --
// this is the actual value that is being produced.
- if let Some(adjusted_ty) = self.fcx.tables.borrow().expr_ty_adjusted_opt(expr) {
+ if let Some(adjusted_ty) = self.fcx.typeck_results.borrow().expr_ty_adjusted_opt(expr) {
self.record(adjusted_ty, scope, Some(expr), expr.span);
}
//
// The type table might not have information for this expression
// if it is in a malformed scope. (#66387)
- if let Some(ty) = self.fcx.tables.borrow().expr_ty_opt(expr) {
+ if let Some(ty) = self.fcx.typeck_results.borrow().expr_ty_opt(expr) {
self.record(ty, scope, Some(expr), expr.span);
} else {
self.fcx.tcx.sess.delay_span_bug(expr.span, "no type for node");
}
sym::count_code_region => {
- (0, vec![tcx.types.u32, tcx.types.u32, tcx.types.u32], tcx.mk_unit())
+ (0, vec![tcx.types.u64, tcx.types.u32, tcx.types.u32, tcx.types.u32], tcx.mk_unit())
}
sym::coverage_counter_add | sym::coverage_counter_subtract => (
pick: &probe::Pick<'tcx>,
) -> Ty<'tcx> {
// Commit the autoderefs by calling `autoderef` again, but this
- // time writing the results into the various tables.
+ // time writing the results into the various typeck results.
let mut autoderef = self.autoderef(self.span, unadjusted_self_ty);
let (_, n) = match autoderef.nth(pick.autoderefs) {
Some(n) => n,
for import_id in &pick.import_ids {
debug!("used_trait_import: {:?}", import_id);
- Lrc::get_mut(&mut self.tables.borrow_mut().used_trait_imports)
+ Lrc::get_mut(&mut self.typeck_results.borrow_mut().used_trait_imports)
.unwrap()
.insert(*import_id);
}
)?;
debug!("resolve_ufcs: pick={:?}", pick);
{
- let mut tables = self.tables.borrow_mut();
- let used_trait_imports = Lrc::get_mut(&mut tables.used_trait_imports).unwrap();
+ let mut typeck_results = self.typeck_results.borrow_mut();
+ let used_trait_imports = Lrc::get_mut(&mut typeck_results.used_trait_imports).unwrap();
for import_id in pick.import_ids {
debug!("resolve_ufcs: used_trait_import: {:?}", import_id);
used_trait_imports.insert(import_id);
// legal to implement.
let mut candidates = all_traits(self.tcx)
.into_iter()
+ // Don't issue suggestions for unstable traits since they're
+ // unlikely to be implementable anyway
+ .filter(|info| match self.tcx.lookup_stability(info.def_id) {
+ Some(attr) => attr.level.is_stable(),
+ None => true,
+ })
.filter(|info| {
// We approximate the coherence rules to only suggest
// traits that are legal to implement by requiring that
};
// Obtain the span for `param` and use it for a structured suggestion.
let mut suggested = false;
- if let (Some(ref param), Some(ref table)) = (param_type, self.in_progress_tables) {
+ if let (Some(ref param), Some(ref table)) =
+ (param_type, self.in_progress_typeck_results)
+ {
let table_owner = table.borrow().hir_owner;
let generics = self.tcx.generics_of(table_owner.to_def_id());
let type_param = generics.type_param(param, self.tcx);
nodes within the function.
The types of top-level items, which never contain unbound type
-variables, are stored directly into the `tcx` tables.
+variables, are stored directly into the `tcx` typeck_results.
N.B., a type variable is not the same thing as a type parameter. A
type variable is rather an "instance" of a type parameter: that is,
revealed_ty: Ty<'tcx>,
}
-/// A wrapper for `InferCtxt`'s `in_progress_tables` field.
+/// A wrapper for `InferCtxt`'s `in_progress_typeck_results` field.
#[derive(Copy, Clone)]
struct MaybeInProgressTables<'a, 'tcx> {
- maybe_tables: Option<&'a RefCell<ty::TypeckTables<'tcx>>>,
+ maybe_typeck_results: Option<&'a RefCell<ty::TypeckResults<'tcx>>>,
}
impl<'a, 'tcx> MaybeInProgressTables<'a, 'tcx> {
- fn borrow(self) -> Ref<'a, ty::TypeckTables<'tcx>> {
- match self.maybe_tables {
- Some(tables) => tables.borrow(),
- None => bug!("MaybeInProgressTables: inh/fcx.tables.borrow() with no tables"),
+ fn borrow(self) -> Ref<'a, ty::TypeckResults<'tcx>> {
+ match self.maybe_typeck_results {
+ Some(typeck_results) => typeck_results.borrow(),
+ None => bug!(
+ "MaybeInProgressTables: inh/fcx.typeck_results.borrow() with no typeck results"
+ ),
}
}
- fn borrow_mut(self) -> RefMut<'a, ty::TypeckTables<'tcx>> {
- match self.maybe_tables {
- Some(tables) => tables.borrow_mut(),
- None => bug!("MaybeInProgressTables: inh/fcx.tables.borrow_mut() with no tables"),
+ fn borrow_mut(self) -> RefMut<'a, ty::TypeckResults<'tcx>> {
+ match self.maybe_typeck_results {
+ Some(typeck_results) => typeck_results.borrow_mut(),
+ None => bug!(
+ "MaybeInProgressTables: inh/fcx.typeck_results.borrow_mut() with no typeck results"
+ ),
}
}
}
pub struct Inherited<'a, 'tcx> {
infcx: InferCtxt<'a, 'tcx>,
- tables: MaybeInProgressTables<'a, 'tcx>,
+ typeck_results: MaybeInProgressTables<'a, 'tcx>,
locals: RefCell<HirIdMap<LocalTy<'tcx>>>,
let hir_owner = tcx.hir().local_def_id_to_hir_id(def_id).owner;
InheritedBuilder {
- infcx: tcx.infer_ctxt().with_fresh_in_progress_tables(hir_owner),
+ infcx: tcx.infer_ctxt().with_fresh_in_progress_typeck_results(hir_owner),
def_id,
}
}
let body_id = tcx.hir().maybe_body_owned_by(item_id);
Inherited {
- tables: MaybeInProgressTables { maybe_tables: infcx.in_progress_tables },
+ typeck_results: MaybeInProgressTables {
+ maybe_typeck_results: infcx.in_progress_typeck_results,
+ },
infcx,
fulfillment_cx: RefCell::new(TraitEngine::new(tcx)),
locals: RefCell::new(Default::default()),
fn typeck_item_bodies(tcx: TyCtxt<'_>, crate_num: CrateNum) {
debug_assert!(crate_num == LOCAL_CRATE);
tcx.par_body_owners(|body_owner_def_id| {
- tcx.ensure().typeck_tables_of(body_owner_def_id);
+ tcx.ensure().typeck(body_owner_def_id);
});
}
method::provide(providers);
*providers = Providers {
typeck_item_bodies,
- typeck_tables_of_const_arg,
- typeck_tables_of,
- diagnostic_only_typeck_tables_of,
- has_typeck_tables,
+ typeck_const_arg,
+ typeck,
+ diagnostic_only_typeck,
+ has_typeck_results,
adt_destructor,
used_trait_imports,
check_item_well_formed,
/// it's body-id, fn-header and fn-decl (if any). Otherwise,
/// returns `None`.
///
-/// If this function returns `Some`, then `typeck_tables(def_id)` will
-/// succeed; if it returns `None`, then `typeck_tables(def_id)` may or
+/// If this function returns `Some`, then `typeck_results(def_id)` will
+/// succeed; if it returns `None`, then `typeck_results(def_id)` may or
/// may not succeed. In some cases where this function returns `None`
-/// (notably closures), `typeck_tables(def_id)` would wind up
+/// (notably closures), `typeck_results(def_id)` would wind up
/// redirecting to the owning function.
fn primary_body_of(
tcx: TyCtxt<'_>,
}
}
-fn has_typeck_tables(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
- // Closures' tables come from their outermost function,
+fn has_typeck_results(tcx: TyCtxt<'_>, def_id: DefId) -> bool {
+ // Closures' typeck results come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id);
if outer_def_id != def_id {
- return tcx.has_typeck_tables(outer_def_id);
+ return tcx.has_typeck_results(outer_def_id);
}
if let Some(def_id) = def_id.as_local() {
}
fn used_trait_imports(tcx: TyCtxt<'_>, def_id: LocalDefId) -> &FxHashSet<LocalDefId> {
- &*tcx.typeck_tables_of(def_id).used_trait_imports
+ &*tcx.typeck(def_id).used_trait_imports
}
/// Inspects the substs of opaque types, replacing any inference variables
val.fold_with(&mut FixupFolder { tcx })
}
-fn typeck_tables_of_const_arg<'tcx>(
+fn typeck_const_arg<'tcx>(
tcx: TyCtxt<'tcx>,
(did, param_did): (LocalDefId, DefId),
-) -> &ty::TypeckTables<'tcx> {
+) -> &ty::TypeckResults<'tcx> {
let fallback = move || tcx.type_of(param_did);
- typeck_tables_of_with_fallback(tcx, did, fallback)
+ typeck_with_fallback(tcx, did, fallback)
}
-fn typeck_tables_of<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &ty::TypeckTables<'tcx> {
+fn typeck<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &ty::TypeckResults<'tcx> {
if let Some(param_did) = tcx.opt_const_param_of(def_id) {
- tcx.typeck_tables_of_const_arg((def_id, param_did))
+ tcx.typeck_const_arg((def_id, param_did))
} else {
let fallback = move || tcx.type_of(def_id.to_def_id());
- typeck_tables_of_with_fallback(tcx, def_id, fallback)
+ typeck_with_fallback(tcx, def_id, fallback)
}
}
-/// Used only to get `TypeckTables` for type inference during error recovery.
+/// Used only to get `TypeckResults` for type inference during error recovery.
/// Currently only used for type inference of `static`s and `const`s to avoid type cycle errors.
-fn diagnostic_only_typeck_tables_of<'tcx>(
- tcx: TyCtxt<'tcx>,
- def_id: LocalDefId,
-) -> &ty::TypeckTables<'tcx> {
+fn diagnostic_only_typeck<'tcx>(tcx: TyCtxt<'tcx>, def_id: LocalDefId) -> &ty::TypeckResults<'tcx> {
let fallback = move || {
let span = tcx.hir().span(tcx.hir().as_local_hir_id(def_id));
tcx.ty_error_with_message(span, "diagnostic only typeck table used")
};
- typeck_tables_of_with_fallback(tcx, def_id, fallback)
+ typeck_with_fallback(tcx, def_id, fallback)
}
-fn typeck_tables_of_with_fallback<'tcx>(
+fn typeck_with_fallback<'tcx>(
tcx: TyCtxt<'tcx>,
def_id: LocalDefId,
fallback: impl Fn() -> Ty<'tcx> + 'tcx,
-) -> &'tcx ty::TypeckTables<'tcx> {
- // Closures' tables come from their outermost function,
+) -> &'tcx ty::TypeckResults<'tcx> {
+ // Closures' typeck results come from their outermost function,
// as they are part of the same "inference environment".
let outer_def_id = tcx.closure_base_def_id(def_id.to_def_id()).expect_local();
if outer_def_id != def_id {
- return tcx.typeck_tables_of(outer_def_id);
+ return tcx.typeck(outer_def_id);
}
let id = tcx.hir().as_local_hir_id(def_id);
});
let body = tcx.hir().body(body_id);
- let tables = Inherited::build(tcx, def_id).enter(|inh| {
+ let typeck_results = Inherited::build(tcx, def_id).enter(|inh| {
let param_env = tcx.param_env(def_id);
let fcx = if let (Some(header), Some(decl)) = (fn_header, fn_decl) {
let fn_sig = if crate::collect::get_infer_ret_ty(&decl.output).is_some() {
fcx.resolve_type_vars_in_body(body)
});
- // Consistency check our TypeckTables instance can hold all ItemLocalIds
+ // Consistency check our TypeckResults instance can hold all ItemLocalIds
// it will need to hold.
- assert_eq!(tables.hir_owner, id.owner);
+ assert_eq!(typeck_results.hir_owner, id.owner);
- tables
+ typeck_results
}
fn check_abi(tcx: TyCtxt<'_>, span: Span, abi: Abi) {
"visit_local: ty.hir_id={:?} o_ty={:?} revealed_ty={:?} c_ty={:?}",
ty.hir_id, o_ty, revealed_ty, c_ty
);
- self.fcx.tables.borrow_mut().user_provided_types_mut().insert(ty.hir_id, c_ty);
+ self.fcx
+ .typeck_results
+ .borrow_mut()
+ .user_provided_types_mut()
+ .insert(ty.hir_id, c_ty);
Some(LocalTy { decl_ty: o_ty, revealed_ty })
}
fcx.write_ty(param.hir_id, param_ty);
}
- inherited.tables.borrow_mut().liberated_fn_sigs_mut().insert(fn_id, fn_sig);
+ inherited.typeck_results.borrow_mut().liberated_fn_sigs_mut().insert(fn_id, fn_sig);
if let ty::Dynamic(..) = declared_ret_ty.kind {
// FIXME: We need to verify that the return type is `Sized` after the return expression has
let mut label = false;
if let Some((hir_id, visitor)) = get_owner_return_paths(tcx, def_id) {
- let tables = tcx.typeck_tables_of(tcx.hir().local_def_id(hir_id));
+ let typeck_results = tcx.typeck(tcx.hir().local_def_id(hir_id));
if visitor
.returns
.iter()
- .filter_map(|expr| tables.node_type_opt(expr.hir_id))
+ .filter_map(|expr| typeck_results.node_type_opt(expr.hir_id))
.all(|ty| matches!(ty.kind, ty::Never))
{
let spans = visitor
.returns
.iter()
- .filter(|expr| tables.node_type_opt(expr.hir_id).is_some())
+ .filter(|expr| typeck_results.node_type_opt(expr.hir_id).is_some())
.map(|expr| expr.span)
.collect::<Vec<Span>>();
let span_len = spans.len();
for (sp, ty) in visitor
.returns
.iter()
- .filter_map(|e| tables.node_type_opt(e.hir_id).map(|t| (e.span, t)))
+ .filter_map(|e| typeck_results.node_type_opt(e.hir_id).map(|t| (e.span, t)))
.filter(|(_, ty)| !matches!(ty.kind, ty::Never))
{
struct VisitTypes(Vec<DefId>);
..
}) => {
let hir_id = tcx.hir().as_local_hir_id(def_id);
- let tables =
- tcx.typeck_tables_of(tcx.hir().local_def_id(tcx.hir().get_parent_item(hir_id)));
- if let Some(ty) = tables.node_type_opt(expr.hir_id) {
+ let typeck_results =
+ tcx.typeck(tcx.hir().local_def_id(tcx.hir().get_parent_item(hir_id)));
+ if let Some(ty) = typeck_results.node_type_opt(expr.hir_id) {
err.span_label(
expr.span,
&format!(
// Consts can play a role in type-checking, so they are included here.
hir::ItemKind::Static(..) => {
let def_id = tcx.hir().local_def_id(it.hir_id);
- tcx.ensure().typeck_tables_of(def_id);
+ tcx.ensure().typeck(def_id);
maybe_check_static_with_link_section(tcx, def_id, it.span);
}
hir::ItemKind::Const(..) => {
- tcx.ensure().typeck_tables_of(tcx.hir().local_def_id(it.hir_id));
+ tcx.ensure().typeck(tcx.hir().local_def_id(it.hir_id));
}
hir::ItemKind::Enum(ref enum_definition, _) => {
check_enum(tcx, it.span, &enum_definition.variants, it.hir_id);
for v in vs {
if let Some(ref e) = v.disr_expr {
- tcx.ensure().typeck_tables_of(tcx.hir().local_def_id(e.hir_id));
+ tcx.ensure().typeck(tcx.hir().local_def_id(e.hir_id));
}
}
self.resolve_vars_if_possible(&ty),
self.tag()
);
- self.tables.borrow_mut().node_types_mut().insert(id, ty);
+ self.typeck_results.borrow_mut().node_types_mut().insert(id, ty);
if ty.references_error() {
self.has_errors.set(true);
}
pub fn write_field_index(&self, hir_id: hir::HirId, index: usize) {
- self.tables.borrow_mut().field_indices_mut().insert(hir_id, index);
+ self.typeck_results.borrow_mut().field_indices_mut().insert(hir_id, index);
}
fn write_resolution(&self, hir_id: hir::HirId, r: Result<(DefKind, DefId), ErrorReported>) {
- self.tables.borrow_mut().type_dependent_defs_mut().insert(hir_id, r);
+ self.typeck_results.borrow_mut().type_dependent_defs_mut().insert(hir_id, r);
}
pub fn write_method_call(&self, hir_id: hir::HirId, method: MethodCallee<'tcx>) {
if !substs.is_noop() {
debug!("write_substs({:?}, {:?}) in fcx {}", node_id, substs, self.tag());
- self.tables.borrow_mut().node_substs_mut().insert(node_id, substs);
+ self.typeck_results.borrow_mut().node_substs_mut().insert(node_id, substs);
}
}
);
if !canonical_user_type_annotation.is_identity() {
- self.tables
+ self.typeck_results
.borrow_mut()
.user_provided_types_mut()
.insert(hir_id, canonical_user_type_annotation);
})
});
- match self.tables.borrow_mut().adjustments_mut().entry(expr.hir_id) {
+ match self.typeck_results.borrow_mut().adjustments_mut().entry(expr.hir_id) {
Entry::Vacant(entry) => {
entry.insert(adj);
}
if Self::can_contain_user_lifetime_bounds(ty) {
let c_ty = self.infcx.canonicalize_response(&UserType::Ty(ty));
debug!("to_ty_saving_user_provided_ty: c_ty={:?}", c_ty);
- self.tables.borrow_mut().user_provided_types_mut().insert(ast_ty.hir_id, c_ty);
+ self.typeck_results.borrow_mut().user_provided_types_mut().insert(ast_ty.hir_id, c_ty);
}
ty
}
pub fn node_ty(&self, id: hir::HirId) -> Ty<'tcx> {
- match self.tables.borrow().node_types().get(id) {
+ match self.typeck_results.borrow().node_types().get(id) {
Some(&t) => t,
None if self.is_tainted_by_errors() => self.tcx.ty_error(),
None => {
}
QPath::TypeRelative(ref qself, ref segment) => (self.to_ty(qself), qself, segment),
};
- if let Some(&cached_result) = self.tables.borrow().type_dependent_defs().get(hir_id) {
+ if let Some(&cached_result) = self.typeck_results.borrow().type_dependent_defs().get(hir_id)
+ {
// Return directly on cache hit. This is useful to avoid doubly reporting
// errors with default match binding modes. See #44614.
let def =
let arm_spans: Vec<Span> = arms
.iter()
.filter_map(|arm| {
- self.in_progress_tables
- .and_then(|tables| tables.borrow().node_type_opt(arm.body.hir_id))
+ self.in_progress_typeck_results
+ .and_then(|typeck_results| {
+ typeck_results.borrow().node_type_opt(arm.body.hir_id)
+ })
.and_then(|arm_ty| {
if arm_ty.is_never() {
None
// some cases applied on the RHS, on top of which we need
// to autoref, which is not allowed by apply_adjustments.
// self.apply_adjustments(rhs_expr, vec![autoref]);
- self.tables
+ self.typeck_results
.borrow_mut()
.adjustments_mut()
.entry(rhs_expr.hir_id)
err.span_label(span, ty.to_string());
if let FnDef(def_id, _) = ty.kind {
let source_map = self.tcx.sess.source_map();
- if !self.tcx.has_typeck_tables(def_id) {
+ if !self.tcx.has_typeck_results(def_id) {
return false;
}
// We're emitting a suggestion, so we can just ignore regions
let fn_sig = self.tcx.fn_sig(def_id).skip_binder();
let other_ty = if let FnDef(def_id, _) = other_ty.kind {
- if !self.tcx.has_typeck_tables(def_id) {
+ if !self.tcx.has_typeck_results(def_id) {
return false;
}
// We're emitting a suggestion, so we can just ignore regions
if !pat_adjustments.is_empty() {
debug!("default binding mode is now {:?}", def_bm);
- self.inh.tables.borrow_mut().pat_adjustments_mut().insert(pat.hir_id, pat_adjustments);
+ self.inh
+ .typeck_results
+ .borrow_mut()
+ .pat_adjustments_mut()
+ .insert(pat.hir_id, pat_adjustments);
}
(expected, def_bm)
_ => BindingMode::convert(ba),
};
// ...and store it in a side table:
- self.inh.tables.borrow_mut().pat_binding_modes_mut().insert(pat.hir_id, bm);
+ self.inh.typeck_results.borrow_mut().pat_binding_modes_mut().insert(pat.hir_id, bm);
debug!("check_pat_ident: pat.hir_id={:?} bm={:?}", pat.hir_id, bm);
let mut source = self.node_ty(expr.hir_id);
// Do not mutate adjustments in place, but rather take them,
// and replace them after mutating them, to avoid having the
- // tables borrowed during (`deref_mut`) method resolution.
+ // typeck results borrowed during (`deref_mut`) method resolution.
let previous_adjustments =
- self.tables.borrow_mut().adjustments_mut().remove(expr.hir_id);
+ self.typeck_results.borrow_mut().adjustments_mut().remove(expr.hir_id);
if let Some(mut adjustments) = previous_adjustments {
for adjustment in &mut adjustments {
if let Adjust::Deref(Some(ref mut deref)) = adjustment.kind {
}
source = adjustment.target;
}
- self.tables.borrow_mut().adjustments_mut().insert(expr.hir_id, adjustments);
+ self.typeck_results.borrow_mut().adjustments_mut().insert(expr.hir_id, adjustments);
}
match expr.kind {
hir::ExprKind::Index(ref base_expr, ref index_expr) => {
// We need to get the final type in case dereferences were needed for the trait
// to apply (#72002).
- let index_expr_ty = self.tables.borrow().expr_ty_adjusted(index_expr);
+ let index_expr_ty = self.typeck_results.borrow().expr_ty_adjusted(index_expr);
self.convert_place_op_to_mutable(
PlaceOp::Index,
expr,
arg_tys: &[Ty<'tcx>],
) {
debug!("convert_place_op_to_mutable({:?}, {:?}, {:?}, {:?})", op, expr, base_expr, arg_tys);
- if !self.tables.borrow().is_method_call(expr) {
+ if !self.typeck_results.borrow().is_method_call(expr) {
debug!("convert_place_op_to_mutable - builtin, nothing to do");
return;
}
// Need to deref because overloaded place ops take self by-reference.
let base_ty = self
- .tables
+ .typeck_results
.borrow()
.expr_ty_adjusted(base_expr)
.builtin_deref(false)
// region and mutability.
let base_expr_ty = self.node_ty(base_expr.hir_id);
if let Some(adjustments) =
- self.tables.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id)
+ self.typeck_results.borrow_mut().adjustments_mut().get_mut(base_expr.hir_id)
{
let mut source = base_expr_ty;
for adjustment in &mut adjustments[..] {
use rustc_hir::PatKind;
use rustc_infer::infer::outlives::env::OutlivesEnvironment;
use rustc_infer::infer::{self, RegionObligation, RegionckMode};
+use rustc_middle::hir::place::{PlaceBase, PlaceWithHirId};
use rustc_middle::ty::adjustment;
use rustc_middle::ty::{self, Ty};
use rustc_span::Span;
self.body_owner = self.tcx.hir().body_owner_def_id(body_id);
let fn_sig = {
- match self.tables.borrow().liberated_fn_sigs().get(id) {
+ match self.typeck_results.borrow().liberated_fn_sigs().get(id) {
Some(f) => *f,
None => {
bug!("No fn-sig entry for id={:?}", id);
&self.infcx,
self.outlives_environment.param_env,
self.body_owner,
- &self.tables.borrow(),
+ &self.typeck_results.borrow(),
))
}
fn constrain_adjustments(
&mut self,
expr: &hir::Expr<'_>,
- ) -> mc::McResult<mc::PlaceWithHirId<'tcx>> {
+ ) -> mc::McResult<PlaceWithHirId<'tcx>> {
debug!("constrain_adjustments(expr={:?})", expr);
let mut place = self.with_mc(|mc| mc.cat_expr_unadjusted(expr))?;
- let tables = self.tables.borrow();
- let adjustments = tables.expr_adjustments(&expr);
+ let typeck_results = self.typeck_results.borrow();
+ let adjustments = typeck_results.expr_adjustments(&expr);
if adjustments.is_empty() {
return Ok(place);
}
fn check_safety_of_rvalue_destructor_if_necessary(
&mut self,
- place_with_id: &mc::PlaceWithHirId<'tcx>,
+ place_with_id: &PlaceWithHirId<'tcx>,
span: Span,
) {
- if let mc::PlaceBase::Rvalue = place_with_id.place.base {
+ if let PlaceBase::Rvalue = place_with_id.place.base {
if place_with_id.place.projections.is_empty() {
let typ = self.resolve_type(place_with_id.place.ty());
let body_id = self.body_id;
/// Link lifetimes of any ref bindings in `root_pat` to the pointers found
/// in the discriminant, if needed.
- fn link_pattern(&self, discr_cmt: mc::PlaceWithHirId<'tcx>, root_pat: &hir::Pat<'_>) {
+ fn link_pattern(&self, discr_cmt: PlaceWithHirId<'tcx>, root_pat: &hir::Pat<'_>) {
debug!("link_pattern(discr_cmt={:?}, root_pat={:?})", discr_cmt, root_pat);
ignore_err!(self.with_mc(|mc| {
mc.cat_pattern(discr_cmt, root_pat, |sub_cmt, hir::Pat { kind, span, hir_id }| {
// `ref x` pattern
if let PatKind::Binding(..) = kind {
if let Some(ty::BindByReference(mutbl)) =
- mc.tables.extract_binding_mode(self.tcx.sess, *hir_id, *span)
+ mc.typeck_results.extract_binding_mode(self.tcx.sess, *hir_id, *span)
{
self.link_region_from_node_type(*span, *hir_id, mutbl, &sub_cmt);
}
fn link_autoref(
&self,
expr: &hir::Expr<'_>,
- expr_cmt: &mc::PlaceWithHirId<'tcx>,
+ expr_cmt: &PlaceWithHirId<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>,
) {
debug!("link_autoref(autoref={:?}, expr_cmt={:?})", autoref, expr_cmt);
span: Span,
id: hir::HirId,
mutbl: hir::Mutability,
- cmt_borrowed: &mc::PlaceWithHirId<'tcx>,
+ cmt_borrowed: &PlaceWithHirId<'tcx>,
) {
debug!(
"link_region_from_node_type(id={:?}, mutbl={:?}, cmt_borrowed={:?})",
span: Span,
borrow_region: ty::Region<'tcx>,
borrow_kind: ty::BorrowKind,
- borrow_place: &mc::PlaceWithHirId<'tcx>,
+ borrow_place: &PlaceWithHirId<'tcx>,
) {
let origin = infer::DataBorrowed(borrow_place.place.ty(), span);
self.type_must_outlive(origin, borrow_place.place.ty(), borrow_region);
_ => assert!(pointer_ty.is_box(), "unexpected built-in deref type {}", pointer_ty),
}
}
- if let mc::PlaceBase::Upvar(upvar_id) = borrow_place.place.base {
+ if let PlaceBase::Upvar(upvar_id) = borrow_place.place.base {
self.link_upvar_region(span, borrow_region, upvar_id);
}
}
debug!("link_upvar_region(borrorw_region={:?}, upvar_id={:?}", borrow_region, upvar_id);
// A by-reference upvar can't be borrowed for longer than the
// upvar is borrowed from the environment.
- match self.tables.borrow().upvar_capture(upvar_id) {
+ match self.typeck_results.borrow().upvar_capture(upvar_id) {
ty::UpvarCapture::ByRef(upvar_borrow) => {
self.sub_regions(
infer::ReborrowUpvar(span, upvar_id),
use super::FnCtxt;
use crate::expr_use_visitor as euv;
-use crate::mem_categorization as mc;
-use crate::mem_categorization::PlaceBase;
use rustc_data_structures::fx::FxIndexMap;
use rustc_hir as hir;
use rustc_hir::def_id::DefId;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_infer::infer::UpvarRegion;
+use rustc_middle::hir::place::{PlaceBase, PlaceWithHirId};
use rustc_middle::ty::{self, Ty, TyCtxt, UpvarSubsts};
use rustc_span::{Span, Symbol};
}
};
- self.tables.borrow_mut().upvar_capture_map.insert(upvar_id, capture_kind);
+ self.typeck_results.borrow_mut().upvar_capture_map.insert(upvar_id, capture_kind);
}
// Add the vector of upvars to the map keyed with the closure id.
// This gives us an easier access to them without having to call
// tcx.upvars again..
if !closure_captures.is_empty() {
- self.tables.borrow_mut().closure_captures.insert(closure_def_id, closure_captures);
+ self.typeck_results
+ .borrow_mut()
+ .closure_captures
+ .insert(closure_def_id, closure_captures);
}
}
&self.infcx,
body_owner_def_id,
self.param_env,
- &self.tables.borrow(),
+ &self.typeck_results.borrow(),
)
.consume_body(body);
// If we have an origin, store it.
if let Some(origin) = delegate.current_origin {
- self.tables.borrow_mut().closure_kind_origins_mut().insert(closure_hir_id, origin);
+ self.typeck_results
+ .borrow_mut()
+ .closure_kind_origins_mut()
+ .insert(closure_hir_id, origin);
}
}
- self.tables.borrow_mut().upvar_capture_map.extend(delegate.adjust_upvar_captures);
+ self.typeck_results.borrow_mut().upvar_capture_map.extend(delegate.adjust_upvar_captures);
// Now that we've analyzed the closure, we know how each
// variable is borrowed, and we know what traits the closure
var_path: ty::UpvarPath { hir_id: var_hir_id },
closure_expr_id: closure_def_id,
};
- let capture = self.tables.borrow().upvar_capture(upvar_id);
+ let capture = self.typeck_results.borrow().upvar_capture(upvar_id);
debug!("var_id={:?} upvar_ty={:?} capture={:?}", var_hir_id, upvar_ty, capture);
impl<'a, 'tcx> InferBorrowKind<'a, 'tcx> {
fn adjust_upvar_borrow_kind_for_consume(
&mut self,
- place_with_id: &mc::PlaceWithHirId<'tcx>,
+ place_with_id: &PlaceWithHirId<'tcx>,
mode: euv::ConsumeMode,
) {
debug!(
/// Indicates that `place_with_id` is being directly mutated (e.g., assigned
/// to). If the place is based on a by-ref upvar, this implies that
/// the upvar must be borrowed using an `&mut` borrow.
- fn adjust_upvar_borrow_kind_for_mut(&mut self, place_with_id: &mc::PlaceWithHirId<'tcx>) {
+ fn adjust_upvar_borrow_kind_for_mut(&mut self, place_with_id: &PlaceWithHirId<'tcx>) {
debug!("adjust_upvar_borrow_kind_for_mut(place_with_id={:?})", place_with_id);
if let PlaceBase::Upvar(upvar_id) = place_with_id.place.base {
}
}
- fn adjust_upvar_borrow_kind_for_unique(&mut self, place_with_id: &mc::PlaceWithHirId<'tcx>) {
+ fn adjust_upvar_borrow_kind_for_unique(&mut self, place_with_id: &PlaceWithHirId<'tcx>) {
debug!("adjust_upvar_borrow_kind_for_unique(place_with_id={:?})", place_with_id);
if let PlaceBase::Upvar(upvar_id) = place_with_id.place.base {
.adjust_upvar_captures
.get(&upvar_id)
.copied()
- .unwrap_or_else(|| self.fcx.tables.borrow().upvar_capture(upvar_id));
+ .unwrap_or_else(|| self.fcx.typeck_results.borrow().upvar_capture(upvar_id));
debug!(
"adjust_upvar_borrow_kind(upvar_id={:?}, upvar_capture={:?}, kind={:?})",
upvar_id, upvar_capture, kind
}
impl<'a, 'tcx> euv::Delegate<'tcx> for InferBorrowKind<'a, 'tcx> {
- fn consume(&mut self, place_with_id: &mc::PlaceWithHirId<'tcx>, mode: euv::ConsumeMode) {
+ fn consume(&mut self, place_with_id: &PlaceWithHirId<'tcx>, mode: euv::ConsumeMode) {
debug!("consume(place_with_id={:?},mode={:?})", place_with_id, mode);
self.adjust_upvar_borrow_kind_for_consume(place_with_id, mode);
}
- fn borrow(&mut self, place_with_id: &mc::PlaceWithHirId<'tcx>, bk: ty::BorrowKind) {
+ fn borrow(&mut self, place_with_id: &PlaceWithHirId<'tcx>, bk: ty::BorrowKind) {
debug!("borrow(place_with_id={:?}, bk={:?})", place_with_id, bk);
match bk {
}
}
- fn mutate(&mut self, assignee_place: &mc::PlaceWithHirId<'tcx>) {
+ fn mutate(&mut self, assignee_place: &PlaceWithHirId<'tcx>) {
debug!("mutate(assignee_place={:?})", assignee_place);
self.adjust_upvar_borrow_kind_for_mut(assignee_place);
self, AdtKind, GenericParamDefKind, ToPredicate, Ty, TyCtxt, TypeFoldable, WithConstness,
};
use rustc_session::parse::feature_err;
-use rustc_span::symbol::{sym, Symbol};
+use rustc_span::symbol::{sym, Ident, Symbol};
use rustc_span::Span;
use rustc_trait_selection::opaque_types::may_define_opaque_type;
use rustc_trait_selection::traits::query::evaluate_obligation::InferCtxtExt;
_ => unreachable!(),
}
}
- hir::ItemKind::Fn(..) => {
- check_item_fn(tcx, item);
+ hir::ItemKind::Fn(ref sig, ..) => {
+ check_item_fn(tcx, item.hir_id, item.ident, item.span, sig.decl);
}
hir::ItemKind::Static(ref ty, ..) => {
check_item_type(tcx, item.hir_id, ty.span, false);
}
hir::ItemKind::ForeignMod(ref module) => {
for it in module.items.iter() {
- if let hir::ForeignItemKind::Static(ref ty, ..) = it.kind {
- check_item_type(tcx, it.hir_id, ty.span, true);
+ match it.kind {
+ hir::ForeignItemKind::Fn(ref decl, ..) => {
+ check_item_fn(tcx, it.hir_id, it.ident, it.span, decl)
+ }
+ hir::ForeignItemKind::Static(ref ty, ..) => {
+ check_item_type(tcx, it.hir_id, ty.span, true)
+ }
+ hir::ForeignItemKind::Type => (),
}
}
}
fcx,
item.ident.span,
sig,
- hir_sig,
+ hir_sig.decl,
item.def_id,
&mut implied_bounds,
);
}
}
-fn check_item_fn(tcx: TyCtxt<'_>, item: &hir::Item<'_>) {
- for_item(tcx, item).with_fcx(|fcx, tcx| {
- let def_id = fcx.tcx.hir().local_def_id(item.hir_id);
+fn check_item_fn(
+ tcx: TyCtxt<'_>,
+ item_id: hir::HirId,
+ ident: Ident,
+ span: Span,
+ decl: &hir::FnDecl<'_>,
+) {
+ for_id(tcx, item_id, span).with_fcx(|fcx, tcx| {
+ let def_id = fcx.tcx.hir().local_def_id(item_id);
let sig = fcx.tcx.fn_sig(def_id);
- let sig = fcx.normalize_associated_types_in(item.span, &sig);
+ let sig = fcx.normalize_associated_types_in(span, &sig);
let mut implied_bounds = vec![];
- let hir_sig = match &item.kind {
- ItemKind::Fn(sig, ..) => sig,
- _ => bug!("expected `ItemKind::Fn`, found `{:?}`", item.kind),
- };
check_fn_or_method(
tcx,
fcx,
- item.ident.span,
+ ident.span,
sig,
- hir_sig,
+ decl,
def_id.to_def_id(),
&mut implied_bounds,
);
fcx: &FnCtxt<'fcx, 'tcx>,
span: Span,
sig: ty::PolyFnSig<'tcx>,
- hir_sig: &hir::FnSig<'_>,
+ hir_decl: &hir::FnDecl<'_>,
def_id: DefId,
implied_bounds: &mut Vec<Ty<'tcx>>,
) {
let sig = fcx.normalize_associated_types_in(span, &sig);
let sig = fcx.tcx.liberate_late_bound_regions(def_id, &sig);
- for (&input_ty, span) in sig.inputs().iter().zip(hir_sig.decl.inputs.iter().map(|t| t.span)) {
+ for (&input_ty, span) in sig.inputs().iter().zip(hir_decl.inputs.iter().map(|t| t.span)) {
fcx.register_wf_obligation(input_ty.into(), span, ObligationCauseCode::MiscObligation);
}
implied_bounds.extend(sig.inputs());
fcx.register_wf_obligation(
sig.output().into(),
- hir_sig.decl.output.span(),
+ hir_decl.output.span(),
ObligationCauseCode::ReturnType,
);
// FIXME(#25759) return types should not be implied bounds
implied_bounds.push(sig.output());
- check_where_clauses(tcx, fcx, span, def_id, Some((sig.output(), hir_sig.decl.output.span())));
+ check_where_clauses(tcx, fcx, span, def_id, Some((sig.output(), hir_decl.output.span())));
}
/// Checks "defining uses" of opaque `impl Trait` types to ensure that they meet the restrictions
// During type inference, partially inferred types are
// represented using Type variables (ty::Infer). These don't appear in
-// the final TypeckTables since all of the types should have been
-// inferred once typeck_tables_of is done.
+// the final TypeckResults since all of the types should have been
+// inferred once typeck is done.
// When type inference is running however, having to update the typeck
-// tables every time a new type is inferred would be unreasonably slow,
+// typeck results every time a new type is inferred would be unreasonably slow,
// so instead all of the replacement happens at the end in
// resolve_type_vars_in_body, which creates a new TypeTables which
// doesn't contain any inference types.
pub fn resolve_type_vars_in_body(
&self,
body: &'tcx hir::Body<'tcx>,
- ) -> &'tcx ty::TypeckTables<'tcx> {
+ ) -> &'tcx ty::TypeckResults<'tcx> {
let item_id = self.tcx.hir().body_owner(body.id());
let item_def_id = self.tcx.hir().local_def_id(item_id);
wbcx.visit_user_provided_sigs();
wbcx.visit_generator_interior_types();
- let used_trait_imports = mem::take(&mut self.tables.borrow_mut().used_trait_imports);
+ let used_trait_imports =
+ mem::take(&mut self.typeck_results.borrow_mut().used_trait_imports);
debug!("used_trait_imports({:?}) = {:?}", item_def_id, used_trait_imports);
- wbcx.tables.used_trait_imports = used_trait_imports;
+ wbcx.typeck_results.used_trait_imports = used_trait_imports;
- wbcx.tables.closure_captures =
- mem::replace(&mut self.tables.borrow_mut().closure_captures, Default::default());
+ wbcx.typeck_results.closure_captures = mem::replace(
+ &mut self.typeck_results.borrow_mut().closure_captures,
+ Default::default(),
+ );
if self.is_tainted_by_errors() {
// FIXME(eddyb) keep track of `ErrorReported` from where the error was emitted.
- wbcx.tables.tainted_by_errors = Some(ErrorReported);
+ wbcx.typeck_results.tainted_by_errors = Some(ErrorReported);
}
- debug!("writeback: tables for {:?} are {:#?}", item_def_id, wbcx.tables);
+ debug!("writeback: typeck results for {:?} are {:#?}", item_def_id, wbcx.typeck_results);
- self.tcx.arena.alloc(wbcx.tables)
+ self.tcx.arena.alloc(wbcx.typeck_results)
}
}
///////////////////////////////////////////////////////////////////////////
// The Writeback context. This visitor walks the AST, checking the
-// fn-specific tables to find references to types or regions. It
+// fn-specific typeck results to find references to types or regions. It
// resolves those regions to remove inference variables and writes the
-// final result back into the master tables in the tcx. Here and
+// final result back into the master typeck results in the tcx. Here and
// there, it applies a few ad-hoc checks that were not convenient to
// do elsewhere.
struct WritebackCx<'cx, 'tcx> {
fcx: &'cx FnCtxt<'cx, 'tcx>,
- tables: ty::TypeckTables<'tcx>,
+ typeck_results: ty::TypeckResults<'tcx>,
body: &'tcx hir::Body<'tcx>,
) -> WritebackCx<'cx, 'tcx> {
let owner = body.id().hir_id.owner;
- WritebackCx { fcx, tables: ty::TypeckTables::new(owner), body, rustc_dump_user_substs }
+ WritebackCx {
+ fcx,
+ typeck_results: ty::TypeckResults::new(owner),
+ body,
+ rustc_dump_user_substs,
+ }
}
fn tcx(&self) -> TyCtxt<'tcx> {
self.fcx.tcx
}
- fn write_ty_to_tables(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
- debug!("write_ty_to_tables({:?}, {:?})", hir_id, ty);
+ fn write_ty_to_typeck_results(&mut self, hir_id: hir::HirId, ty: Ty<'tcx>) {
+ debug!("write_ty_to_typeck_results({:?}, {:?})", hir_id, ty);
assert!(!ty.needs_infer() && !ty.has_placeholders() && !ty.has_free_regions());
- self.tables.node_types_mut().insert(hir_id, ty);
+ self.typeck_results.node_types_mut().insert(hir_id, ty);
}
// Hacky hack: During type-checking, we treat *all* operators
let inner_ty = self.fcx.resolve_vars_if_possible(&inner_ty);
if inner_ty.is_scalar() {
- let mut tables = self.fcx.tables.borrow_mut();
- tables.type_dependent_defs_mut().remove(e.hir_id);
- tables.node_substs_mut().remove(e.hir_id);
+ let mut typeck_results = self.fcx.typeck_results.borrow_mut();
+ typeck_results.type_dependent_defs_mut().remove(e.hir_id);
+ typeck_results.node_substs_mut().remove(e.hir_id);
}
}
hir::ExprKind::Binary(ref op, ref lhs, ref rhs)
let rhs_ty = self.fcx.resolve_vars_if_possible(&rhs_ty);
if lhs_ty.is_scalar() && rhs_ty.is_scalar() {
- let mut tables = self.fcx.tables.borrow_mut();
- tables.type_dependent_defs_mut().remove(e.hir_id);
- tables.node_substs_mut().remove(e.hir_id);
+ let mut typeck_results = self.fcx.typeck_results.borrow_mut();
+ typeck_results.type_dependent_defs_mut().remove(e.hir_id);
+ typeck_results.node_substs_mut().remove(e.hir_id);
match e.kind {
hir::ExprKind::Binary(..) => {
if !op.node.is_by_value() {
- let mut adjustments = tables.adjustments_mut();
+ let mut adjustments = typeck_results.adjustments_mut();
if let Some(a) = adjustments.get_mut(lhs.hir_id) {
a.pop();
}
}
}
hir::ExprKind::AssignOp(..) => {
- if let Some(a) = tables.adjustments_mut().get_mut(lhs.hir_id) {
+ if let Some(a) = typeck_results.adjustments_mut().get_mut(lhs.hir_id) {
a.pop();
}
}
// usize-ish
fn fix_index_builtin_expr(&mut self, e: &hir::Expr<'_>) {
if let hir::ExprKind::Index(ref base, ref index) = e.kind {
- let mut tables = self.fcx.tables.borrow_mut();
+ let mut typeck_results = self.fcx.typeck_results.borrow_mut();
// All valid indexing looks like this; might encounter non-valid indexes at this point.
- let base_ty = tables.expr_ty_adjusted_opt(&base).map(|t| &t.kind);
+ let base_ty = typeck_results.expr_ty_adjusted_opt(&base).map(|t| &t.kind);
if base_ty.is_none() {
// When encountering `return [0][0]` outside of a `fn` body we can encounter a base
// that isn't in the type table. We assume more relevant errors have already been
self.tcx().sess.delay_span_bug(e.span, &format!("bad base: `{:?}`", base));
}
if let Some(ty::Ref(_, base_ty, _)) = base_ty {
- let index_ty = tables.expr_ty_adjusted_opt(&index).unwrap_or_else(|| {
+ let index_ty = typeck_results.expr_ty_adjusted_opt(&index).unwrap_or_else(|| {
// When encountering `return [0][0]` outside of a `fn` body we would attempt
// to access an unexistend index. We assume that more relevant errors will
// already have been emitted, so we only gate on this with an ICE if no
if base_ty.builtin_index().is_some() && index_ty == self.fcx.tcx.types.usize {
// Remove the method call record
- tables.type_dependent_defs_mut().remove(e.hir_id);
- tables.node_substs_mut().remove(e.hir_id);
+ typeck_results.type_dependent_defs_mut().remove(e.hir_id);
+ typeck_results.node_substs_mut().remove(e.hir_id);
- if let Some(a) = tables.adjustments_mut().get_mut(base.hir_id) {
+ if let Some(a) = typeck_results.adjustments_mut().get_mut(base.hir_id) {
// Discard the need for a mutable borrow
// Extra adjustment made when indexing causes a drop
// This is the master code which walks the AST. It delegates most of
// the heavy lifting to the generic visit and resolve functions
// below. In general, a function is made into a `visitor` if it must
-// traffic in node-ids or update tables in the type context etc.
+// traffic in node-ids or update typeck results in the type context etc.
impl<'cx, 'tcx> Visitor<'tcx> for WritebackCx<'cx, 'tcx> {
type Map = intravisit::ErasedMap<'tcx>;
fn visit_pat(&mut self, p: &'tcx hir::Pat<'tcx>) {
match p.kind {
hir::PatKind::Binding(..) => {
- let tables = self.fcx.tables.borrow();
- if let Some(bm) = tables.extract_binding_mode(self.tcx().sess, p.hir_id, p.span) {
- self.tables.pat_binding_modes_mut().insert(p.hir_id, bm);
+ let typeck_results = self.fcx.typeck_results.borrow();
+ if let Some(bm) =
+ typeck_results.extract_binding_mode(self.tcx().sess, p.hir_id, p.span)
+ {
+ self.typeck_results.pat_binding_modes_mut().insert(p.hir_id, bm);
}
}
hir::PatKind::Struct(_, fields, _) => {
intravisit::walk_local(self, l);
let var_ty = self.fcx.local_ty(l.span, l.hir_id).decl_ty;
let var_ty = self.resolve(&var_ty, &l.span);
- self.write_ty_to_tables(l.hir_id, var_ty);
+ self.write_ty_to_typeck_results(l.hir_id, var_ty);
}
fn visit_ty(&mut self, hir_ty: &'tcx hir::Ty<'tcx>) {
intravisit::walk_ty(self, hir_ty);
let ty = self.fcx.node_ty(hir_ty.hir_id);
let ty = self.resolve(&ty, &hir_ty.span);
- self.write_ty_to_tables(hir_ty.hir_id, ty);
+ self.write_ty_to_typeck_results(hir_ty.hir_id, ty);
}
}
impl<'cx, 'tcx> WritebackCx<'cx, 'tcx> {
fn visit_upvar_capture_map(&mut self) {
- for (upvar_id, upvar_capture) in self.fcx.tables.borrow().upvar_capture_map.iter() {
+ for (upvar_id, upvar_capture) in self.fcx.typeck_results.borrow().upvar_capture_map.iter() {
let new_upvar_capture = match *upvar_capture {
ty::UpvarCapture::ByValue => ty::UpvarCapture::ByValue,
ty::UpvarCapture::ByRef(ref upvar_borrow) => {
}
};
debug!("Upvar capture for {:?} resolved to {:?}", upvar_id, new_upvar_capture);
- self.tables.upvar_capture_map.insert(*upvar_id, new_upvar_capture);
+ self.typeck_results.upvar_capture_map.insert(*upvar_id, new_upvar_capture);
}
}
fn visit_closures(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
- let common_hir_owner = fcx_tables.hir_owner;
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
+ let common_hir_owner = fcx_typeck_results.hir_owner;
- for (&id, &origin) in fcx_tables.closure_kind_origins().iter() {
+ for (&id, &origin) in fcx_typeck_results.closure_kind_origins().iter() {
let hir_id = hir::HirId { owner: common_hir_owner, local_id: id };
- self.tables.closure_kind_origins_mut().insert(hir_id, origin);
+ self.typeck_results.closure_kind_origins_mut().insert(hir_id, origin);
}
}
fn visit_coercion_casts(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- let fcx_coercion_casts = fcx_tables.coercion_casts();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ let fcx_coercion_casts = fcx_typeck_results.coercion_casts();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
for local_id in fcx_coercion_casts {
- self.tables.set_coercion_cast(*local_id);
+ self.typeck_results.set_coercion_cast(*local_id);
}
}
fn visit_user_provided_tys(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
- let common_hir_owner = fcx_tables.hir_owner;
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
+ let common_hir_owner = fcx_typeck_results.hir_owner;
let mut errors_buffer = Vec::new();
- for (&local_id, c_ty) in fcx_tables.user_provided_types().iter() {
+ for (&local_id, c_ty) in fcx_typeck_results.user_provided_types().iter() {
let hir_id = hir::HirId { owner: common_hir_owner, local_id };
if cfg!(debug_assertions) && c_ty.needs_infer() {
);
};
- self.tables.user_provided_types_mut().insert(hir_id, *c_ty);
+ self.typeck_results.user_provided_types_mut().insert(hir_id, *c_ty);
if let ty::UserType::TypeOf(_, user_substs) = c_ty.value {
if self.rustc_dump_user_substs {
}
fn visit_user_provided_sigs(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
- for (&def_id, c_sig) in fcx_tables.user_provided_sigs.iter() {
+ for (&def_id, c_sig) in fcx_typeck_results.user_provided_sigs.iter() {
if cfg!(debug_assertions) && c_sig.needs_infer() {
span_bug!(
self.fcx.tcx.hir().span_if_local(def_id).unwrap(),
);
};
- self.tables.user_provided_sigs.insert(def_id, *c_sig);
+ self.typeck_results.user_provided_sigs.insert(def_id, *c_sig);
}
}
fn visit_generator_interior_types(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
- self.tables.generator_interior_types = fcx_tables.generator_interior_types.clone();
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
+ self.typeck_results.generator_interior_types =
+ fcx_typeck_results.generator_interior_types.clone();
}
fn visit_opaque_types(&mut self, span: Span) {
substs: opaque_defn.substs,
};
- let old = self.tables.concrete_opaque_types.insert(def_id, new);
+ let old = self.typeck_results.concrete_opaque_types.insert(def_id, new);
if let Some(old) = old {
if old.concrete_type != definition_ty || old.substs != opaque_defn.substs {
span_bug!(
}
fn visit_field_id(&mut self, hir_id: hir::HirId) {
- if let Some(index) = self.fcx.tables.borrow_mut().field_indices_mut().remove(hir_id) {
- self.tables.field_indices_mut().insert(hir_id, index);
+ if let Some(index) = self.fcx.typeck_results.borrow_mut().field_indices_mut().remove(hir_id)
+ {
+ self.typeck_results.field_indices_mut().insert(hir_id, index);
}
}
fn visit_node_id(&mut self, span: Span, hir_id: hir::HirId) {
// Export associated path extensions and method resolutions.
- if let Some(def) = self.fcx.tables.borrow_mut().type_dependent_defs_mut().remove(hir_id) {
- self.tables.type_dependent_defs_mut().insert(hir_id, def);
+ if let Some(def) =
+ self.fcx.typeck_results.borrow_mut().type_dependent_defs_mut().remove(hir_id)
+ {
+ self.typeck_results.type_dependent_defs_mut().insert(hir_id, def);
}
// Resolve any borrowings for the node with id `node_id`
// Resolve the type of the node with id `node_id`
let n_ty = self.fcx.node_ty(hir_id);
let n_ty = self.resolve(&n_ty, &span);
- self.write_ty_to_tables(hir_id, n_ty);
+ self.write_ty_to_typeck_results(hir_id, n_ty);
debug!("node {:?} has type {:?}", hir_id, n_ty);
// Resolve any substitutions
- if let Some(substs) = self.fcx.tables.borrow().node_substs_opt(hir_id) {
+ if let Some(substs) = self.fcx.typeck_results.borrow().node_substs_opt(hir_id) {
let substs = self.resolve(&substs, &span);
debug!("write_substs_to_tcx({:?}, {:?})", hir_id, substs);
assert!(!substs.needs_infer() && !substs.has_placeholders());
- self.tables.node_substs_mut().insert(hir_id, substs);
+ self.typeck_results.node_substs_mut().insert(hir_id, substs);
}
}
fn visit_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
- let adjustment = self.fcx.tables.borrow_mut().adjustments_mut().remove(hir_id);
+ let adjustment = self.fcx.typeck_results.borrow_mut().adjustments_mut().remove(hir_id);
match adjustment {
None => {
debug!("no adjustments for node {:?}", hir_id);
Some(adjustment) => {
let resolved_adjustment = self.resolve(&adjustment, &span);
debug!("adjustments for node {:?}: {:?}", hir_id, resolved_adjustment);
- self.tables.adjustments_mut().insert(hir_id, resolved_adjustment);
+ self.typeck_results.adjustments_mut().insert(hir_id, resolved_adjustment);
}
}
}
fn visit_pat_adjustments(&mut self, span: Span, hir_id: hir::HirId) {
- let adjustment = self.fcx.tables.borrow_mut().pat_adjustments_mut().remove(hir_id);
+ let adjustment = self.fcx.typeck_results.borrow_mut().pat_adjustments_mut().remove(hir_id);
match adjustment {
None => {
debug!("no pat_adjustments for node {:?}", hir_id);
Some(adjustment) => {
let resolved_adjustment = self.resolve(&adjustment, &span);
debug!("pat_adjustments for node {:?}: {:?}", hir_id, resolved_adjustment);
- self.tables.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
+ self.typeck_results.pat_adjustments_mut().insert(hir_id, resolved_adjustment);
}
}
}
fn visit_liberated_fn_sigs(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
- let common_hir_owner = fcx_tables.hir_owner;
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
+ let common_hir_owner = fcx_typeck_results.hir_owner;
- for (&local_id, fn_sig) in fcx_tables.liberated_fn_sigs().iter() {
+ for (&local_id, fn_sig) in fcx_typeck_results.liberated_fn_sigs().iter() {
let hir_id = hir::HirId { owner: common_hir_owner, local_id };
let fn_sig = self.resolve(fn_sig, &hir_id);
- self.tables.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
+ self.typeck_results.liberated_fn_sigs_mut().insert(hir_id, fn_sig);
}
}
fn visit_fru_field_types(&mut self) {
- let fcx_tables = self.fcx.tables.borrow();
- assert_eq!(fcx_tables.hir_owner, self.tables.hir_owner);
- let common_hir_owner = fcx_tables.hir_owner;
+ let fcx_typeck_results = self.fcx.typeck_results.borrow();
+ assert_eq!(fcx_typeck_results.hir_owner, self.typeck_results.hir_owner);
+ let common_hir_owner = fcx_typeck_results.hir_owner;
- for (&local_id, ftys) in fcx_tables.fru_field_types().iter() {
+ for (&local_id, ftys) in fcx_typeck_results.fru_field_types().iter() {
let hir_id = hir::HirId { owner: common_hir_owner, local_id };
let ftys = self.resolve(ftys, &hir_id);
- self.tables.fru_field_types_mut().insert(hir_id, ftys);
+ self.typeck_results.fru_field_types_mut().insert(hir_id, ftys);
}
}
}
// We may have introduced e.g. `ty::Error`, if inference failed, make sure
- // to mark the `TypeckTables` as tainted in that case, so that downstream
- // users of the tables don't produce extra errors, or worse, ICEs.
+ // to mark the `TypeckResults` as tainted in that case, so that downstream
+ // users of the typeck results don't produce extra errors, or worse, ICEs.
if resolver.replaced_with_error {
// FIXME(eddyb) keep track of `ErrorReported` from where the error was emitted.
- self.tables.tainted_by_errors = Some(ErrorReported);
+ self.typeck_results.tainted_by_errors = Some(ErrorReported);
}
x
use rustc_hir::def_id::{DefId, LocalDefId, LOCAL_CRATE};
use rustc_hir::intravisit::{self, NestedVisitorMap, Visitor};
use rustc_hir::weak_lang_items;
-use rustc_hir::{GenericParamKind, Node};
+use rustc_hir::{GenericParamKind, HirId, Node};
use rustc_middle::hir::map::blocks::FnLikeNode;
use rustc_middle::hir::map::Map;
use rustc_middle::middle::codegen_fn_attrs::{CodegenFnAttrFlags, CodegenFnAttrs};
}
}
+struct AnonConstInParamListDetector {
+ in_param_list: bool,
+ found_anon_const_in_list: bool,
+ ct: HirId,
+}
+
+impl<'v> Visitor<'v> for AnonConstInParamListDetector {
+ type Map = intravisit::ErasedMap<'v>;
+
+ fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
+ NestedVisitorMap::None
+ }
+
+ fn visit_generic_param(&mut self, p: &'v hir::GenericParam<'v>) {
+ let prev = self.in_param_list;
+ self.in_param_list = true;
+ intravisit::walk_generic_param(self, p);
+ self.in_param_list = prev;
+ }
+
+ fn visit_anon_const(&mut self, c: &'v hir::AnonConst) {
+ if self.in_param_list && self.ct == c.hir_id {
+ self.found_anon_const_in_list = true;
+ } else {
+ intravisit::walk_anon_const(self, c)
+ }
+ }
+}
+
fn generics_of(tcx: TyCtxt<'_>, def_id: DefId) -> ty::Generics {
use rustc_hir::*;
let parent_id = tcx.hir().get_parent_item(hir_id);
let parent_def_id = tcx.hir().local_def_id(parent_id);
- // HACK(eddyb) this provides the correct generics when
- // `feature(const_generics)` is enabled, so that const expressions
- // used with const generics, e.g. `Foo<{N+1}>`, can work at all.
- if tcx.lazy_normalization() {
+ let mut in_param_list = false;
+ for (_parent, node) in tcx.hir().parent_iter(hir_id) {
+ if let Some(generics) = node.generics() {
+ let mut visitor = AnonConstInParamListDetector {
+ in_param_list: false,
+ found_anon_const_in_list: false,
+ ct: hir_id,
+ };
+
+ visitor.visit_generics(generics);
+ in_param_list = visitor.found_anon_const_in_list;
+ break;
+ }
+ }
+
+ if in_param_list {
+ // We do not allow generic parameters in anon consts if we are inside
+ // of a param list.
+ //
+ // This affects both default type bindings, e.g. `struct<T, U = [u8; std::mem::size_of::<T>()]>(T, U)`,
+ // and the types of const parameters, e.g. `struct V<const N: usize, const M: [u8; N]>();`.
+ None
+ } else if tcx.lazy_normalization() {
+ // HACK(eddyb) this provides the correct generics when
+ // `feature(const_generics)` is enabled, so that const expressions
+ // used with const generics, e.g. `Foo<{N+1}>`, can work at all.
Some(parent_def_id.to_def_id())
} else {
let parent_node = tcx.hir().get(tcx.hir().get_parent_node(hir_id));
| Item(hir::Item { kind: ItemKind::Fn(sig, generics, _), ident, .. }) => {
match get_infer_ret_ty(&sig.decl.output) {
Some(ty) => {
- let fn_sig = tcx.typeck_tables_of(def_id).liberated_fn_sigs()[hir_id];
+ let fn_sig = tcx.typeck(def_id).liberated_fn_sigs()[hir_id];
let mut visitor = PlaceholderHirTyCollector::default();
visitor.visit_ty(ty);
let mut diag = bad_placeholder_type(tcx, visitor.0);
let re_root_empty = tcx.lifetimes.re_root_empty;
let predicate = ty::OutlivesPredicate(ty, re_root_empty);
predicates.push((
- ty::PredicateKind::TypeOutlives(ty::Binder::dummy(predicate))
+ ty::PredicateKind::TypeOutlives(ty::Binder::bind(predicate))
.to_predicate(tcx),
span,
));
..
}) => {
let body_owner = tcx.hir().local_def_id(tcx.hir().enclosing_body_owner(hir_id));
- let tables = tcx.typeck_tables_of(body_owner);
+ let tables = tcx.typeck(body_owner);
// This may fail in case the method/path does not actually exist.
// As there is no relevant param for `def_id`, we simply return
// `None` here.
}) => {
let body_owner =
tcx.hir().local_def_id(tcx.hir().enclosing_body_owner(hir_id));
- let _tables = tcx.typeck_tables_of(body_owner);
+ let _tables = tcx.typeck(body_owner);
&*path
}
_ => span_bug!(DUMMY_SP, "unexpected const parent path {:?}", parent_node),
tcx.sess.delay_span_bug(
DUMMY_SP,
&format!(
- "owner {:?} has no opaque type for {:?} in its tables",
+ "owner {:?} has no opaque type for {:?} in its typeck results",
owner, def_id,
),
);
if let Some(ErrorReported) =
- tcx.typeck_tables_of(owner.expect_local()).tainted_by_errors
+ tcx.typeck(owner.expect_local()).tainted_by_errors
{
// Some error in the
// owner fn prevented us from populating
impl ConstraintLocator<'_> {
fn check(&mut self, def_id: LocalDefId) {
// Don't try to check items that cannot possibly constrain the type.
- if !self.tcx.has_typeck_tables(def_id) {
+ if !self.tcx.has_typeck_results(def_id) {
debug!(
- "find_opaque_ty_constraints: no constraint for `{:?}` at `{:?}`: no tables",
+ "find_opaque_ty_constraints: no constraint for `{:?}` at `{:?}`: no typeck results",
self.def_id, def_id,
);
return;
}
// Calling `mir_borrowck` can lead to cycle errors through
// const-checking, avoid calling it if we don't have to.
- if !self.tcx.typeck_tables_of(def_id).concrete_opaque_types.contains_key(&self.def_id) {
+ if !self.tcx.typeck(def_id).concrete_opaque_types.contains_key(&self.def_id) {
debug!(
"find_opaque_ty_constraints: no constraint for `{:?}` at `{:?}`",
self.def_id, def_id,
let opaque_ty_def_id = opaque_ty_id.to_def_id();
- let owner_tables = tcx.typeck_tables_of(scope_def_id);
- let concrete_ty = owner_tables
+ let owner_typeck_results = tcx.typeck(scope_def_id);
+ let concrete_ty = owner_typeck_results
.concrete_opaque_types
.get(&opaque_ty_def_id)
.map(|opaque| opaque.concrete_type)
tcx.sess.delay_span_bug(
DUMMY_SP,
&format!(
- "owner {:?} has no opaque type for {:?} in its tables",
+ "owner {:?} has no opaque type for {:?} in its typeck results",
scope_def_id, opaque_ty_id
),
);
- if let Some(ErrorReported) = owner_tables.tainted_by_errors {
+ if let Some(ErrorReported) = owner_typeck_results.tainted_by_errors {
// Some error in the owner fn prevented us from populating the
// `concrete_opaque_types` table.
tcx.ty_error()
span: Span,
item_ident: Ident,
) -> Ty<'_> {
- let ty = tcx.diagnostic_only_typeck_tables_of(def_id).node_type(body_id.hir_id);
+ let ty = tcx.diagnostic_only_typeck(def_id).node_type(body_id.hir_id);
// If this came from a free `const` or `static mut?` item,
// then the user may have written e.g. `const A = 42;`.
pub use self::ConsumeMode::*;
// Export these here so that Clippy can use them.
-pub use mc::{PlaceBase, PlaceWithHirId, Projection};
+pub use rustc_middle::hir::place::{PlaceBase, PlaceWithHirId, Projection};
use rustc_hir as hir;
use rustc_hir::def::Res;
use rustc_hir::def_id::LocalDefId;
use rustc_hir::PatKind;
+use rustc_index::vec::Idx;
use rustc_infer::infer::InferCtxt;
+use rustc_middle::hir::place::ProjectionKind;
use rustc_middle::ty::{self, adjustment, TyCtxt};
+use rustc_target::abi::VariantIdx;
use crate::mem_categorization as mc;
use rustc_span::Span;
pub trait Delegate<'tcx> {
// The value found at `place` is either copied or moved, depending
// on mode.
- fn consume(&mut self, place_with_id: &mc::PlaceWithHirId<'tcx>, mode: ConsumeMode);
+ fn consume(&mut self, place_with_id: &PlaceWithHirId<'tcx>, mode: ConsumeMode);
// The value found at `place` is being borrowed with kind `bk`.
- fn borrow(&mut self, place_with_id: &mc::PlaceWithHirId<'tcx>, bk: ty::BorrowKind);
+ fn borrow(&mut self, place_with_id: &PlaceWithHirId<'tcx>, bk: ty::BorrowKind);
// The path at `place_with_id` is being assigned to.
- fn mutate(&mut self, assignee_place: &mc::PlaceWithHirId<'tcx>);
+ fn mutate(&mut self, assignee_place: &PlaceWithHirId<'tcx>);
}
#[derive(Copy, Clone, PartialEq, Debug)]
///
/// - `delegate` -- who receives the callbacks
/// - `param_env` --- parameter environment for trait lookups (esp. pertaining to `Copy`)
- /// - `tables` --- typeck results for the code being analyzed
+ /// - `typeck_results` --- typeck results for the code being analyzed
pub fn new(
delegate: &'a mut (dyn Delegate<'tcx> + 'a),
infcx: &'a InferCtxt<'a, 'tcx>,
body_owner: LocalDefId,
param_env: ty::ParamEnv<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
) -> Self {
ExprUseVisitor {
- mc: mc::MemCategorizationContext::new(infcx, param_env, body_owner, tables),
+ mc: mc::MemCategorizationContext::new(infcx, param_env, body_owner, typeck_results),
delegate,
}
}
}
hir::ExprKind::AssignOp(_, ref lhs, ref rhs) => {
- if self.mc.tables.is_method_call(expr) {
+ if self.mc.typeck_results.is_method_call(expr) {
self.consume_expr(lhs);
} else {
self.mutate_expr(lhs);
ty::Adt(adt, substs) if adt.is_struct() => {
// Consume those fields of the with expression that are needed.
for (f_index, with_field) in adt.non_enum_variant().fields.iter().enumerate() {
- let is_mentioned = fields
- .iter()
- .any(|f| self.tcx().field_index(f.hir_id, self.mc.tables) == f_index);
+ let is_mentioned = fields.iter().any(|f| {
+ self.tcx().field_index(f.hir_id, self.mc.typeck_results) == f_index
+ });
if !is_mentioned {
let field_place = self.mc.cat_projection(
&*with_expr,
with_place.clone(),
with_field.ty(self.tcx(), substs),
+ ProjectionKind::Field(f_index as u32, VariantIdx::new(0)),
);
self.delegate_consume(&field_place);
}
// consumed or borrowed as part of the automatic adjustment
// process.
fn walk_adjustment(&mut self, expr: &hir::Expr<'_>) {
- let adjustments = self.mc.tables.expr_adjustments(expr);
+ let adjustments = self.mc.typeck_results.expr_adjustments(expr);
let mut place_with_id = return_if_err!(self.mc.cat_expr_unadjusted(expr));
for adjustment in adjustments {
debug!("walk_adjustment expr={:?} adj={:?}", expr, adjustment);
fn walk_autoref(
&mut self,
expr: &hir::Expr<'_>,
- base_place: &mc::PlaceWithHirId<'tcx>,
+ base_place: &PlaceWithHirId<'tcx>,
autoref: &adjustment::AutoBorrow<'tcx>,
) {
debug!(
return_if_err!(mc.cat_pattern(discr_place.clone(), pat, |place, pat| {
if let PatKind::Binding(_, canonical_id, ..) = pat.kind {
debug!("walk_pat: binding place={:?} pat={:?}", place, pat,);
- if let Some(bm) = mc.tables.extract_binding_mode(tcx.sess, pat.hir_id, pat.span) {
+ if let Some(bm) =
+ mc.typeck_results.extract_binding_mode(tcx.sess, pat.hir_id, pat.span)
+ {
debug!("walk_pat: pat.hir_id={:?} bm={:?}", pat.hir_id, bm);
// pat_ty: the type of the binding being produced.
var_path: ty::UpvarPath { hir_id: var_id },
closure_expr_id: closure_def_id,
};
- let upvar_capture = self.mc.tables.upvar_capture(upvar_id);
+ let upvar_capture = self.mc.typeck_results.upvar_capture(upvar_id);
let captured_place = return_if_err!(self.cat_captured_var(
closure_expr.hir_id,
fn_decl_span,
closure_hir_id: hir::HirId,
closure_span: Span,
var_id: hir::HirId,
- ) -> mc::McResult<mc::PlaceWithHirId<'tcx>> {
+ ) -> mc::McResult<PlaceWithHirId<'tcx>> {
// Create the place for the variable being borrowed, from the
// perspective of the creator (parent) of the closure.
let var_ty = self.mc.node_ty(var_id)?;
//! result of `*x'`, effectively, where `x'` is a `Categorization::Upvar` reference
//! tied to `x`. The type of `x'` will be a borrowed pointer.
+use rustc_middle::hir::place::*;
use rustc_middle::ty::adjustment;
use rustc_middle::ty::fold::TypeFoldable;
use rustc_middle::ty::{self, Ty, TyCtxt};
use rustc_data_structures::fx::FxIndexMap;
use rustc_hir as hir;
-use rustc_hir::def::{DefKind, Res};
+use rustc_hir::def::{CtorOf, DefKind, Res};
use rustc_hir::def_id::LocalDefId;
+use rustc_hir::pat_util::EnumerateAndAdjustIterator;
use rustc_hir::PatKind;
+use rustc_index::vec::Idx;
use rustc_infer::infer::InferCtxt;
use rustc_span::Span;
+use rustc_target::abi::VariantIdx;
use rustc_trait_selection::infer::InferCtxtExt;
-#[derive(Clone, Debug)]
-pub enum PlaceBase {
- /// A temporary variable
- Rvalue,
- /// A named `static` item
- StaticItem,
- /// A named local variable
- Local(hir::HirId),
- /// An upvar referenced by closure env
- Upvar(ty::UpvarId),
-}
-
-#[derive(Clone, Debug, Eq, PartialEq)]
-pub enum ProjectionKind {
- /// A dereference of a pointer, reference or `Box<T>` of the given type
- Deref,
- /// An index or a field
- Other,
-}
-
-#[derive(Clone, Debug)]
-pub struct Projection<'tcx> {
- // Type after the projection is being applied.
- ty: Ty<'tcx>,
-
- /// Defines the type of access
- kind: ProjectionKind,
-}
-
-/// A `Place` represents how a value is located in memory.
-///
-/// This is an HIR version of `mir::Place`
-#[derive(Clone, Debug)]
-pub struct Place<'tcx> {
- /// The type of the `PlaceBase`
- pub base_ty: Ty<'tcx>,
- /// The "outermost" place that holds this value.
- pub base: PlaceBase,
- /// How this place is derived from the base place.
- pub projections: Vec<Projection<'tcx>>,
-}
-
-/// A `PlaceWithHirId` represents how a value is located in memory.
-///
-/// This is an HIR version of `mir::Place`
-#[derive(Clone, Debug)]
-pub struct PlaceWithHirId<'tcx> {
- /// `HirId` of the expression or pattern producing this value.
- pub hir_id: hir::HirId,
-
- /// Information about the `Place`
- pub place: Place<'tcx>,
-}
-
-impl<'tcx> PlaceWithHirId<'tcx> {
- crate fn new(
- hir_id: hir::HirId,
- base_ty: Ty<'tcx>,
- base: PlaceBase,
- projections: Vec<Projection<'tcx>>,
- ) -> PlaceWithHirId<'tcx> {
- PlaceWithHirId {
- hir_id: hir_id,
- place: Place { base_ty: base_ty, base: base, projections: projections },
- }
- }
-}
-
-impl<'tcx> Place<'tcx> {
- /// Returns an iterator of the types that have to be dereferenced to access
- /// the `Place`.
- ///
- /// The types are in the reverse order that they are applied. So if
- /// `x: &*const u32` and the `Place` is `**x`, then the types returned are
- ///`*const u32` then `&*const u32`.
- crate fn deref_tys(&self) -> impl Iterator<Item = Ty<'tcx>> + '_ {
- self.projections.iter().enumerate().rev().filter_map(move |(index, proj)| {
- if ProjectionKind::Deref == proj.kind {
- Some(self.ty_before_projection(index))
- } else {
- None
- }
- })
- }
-
- // Returns the type of this `Place` after all projections have been applied.
- pub fn ty(&self) -> Ty<'tcx> {
- self.projections.last().map_or_else(|| self.base_ty, |proj| proj.ty)
- }
-
- // Returns the type of this `Place` immediately before `projection_index`th projection
- // is applied.
- crate fn ty_before_projection(&self, projection_index: usize) -> Ty<'tcx> {
- assert!(projection_index < self.projections.len());
- if projection_index == 0 { self.base_ty } else { self.projections[projection_index - 1].ty }
- }
-}
-
crate trait HirNode {
fn hir_id(&self) -> hir::HirId;
fn span(&self) -> Span;
#[derive(Clone)]
crate struct MemCategorizationContext<'a, 'tcx> {
- crate tables: &'a ty::TypeckTables<'tcx>,
+ crate typeck_results: &'a ty::TypeckResults<'tcx>,
infcx: &'a InferCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
body_owner: LocalDefId,
infcx: &'a InferCtxt<'a, 'tcx>,
param_env: ty::ParamEnv<'tcx>,
body_owner: LocalDefId,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
) -> MemCategorizationContext<'a, 'tcx> {
MemCategorizationContext {
- tables,
+ typeck_results,
infcx,
param_env,
body_owner,
}
crate fn node_ty(&self, hir_id: hir::HirId) -> McResult<Ty<'tcx>> {
- self.resolve_type_vars_or_error(hir_id, self.tables.node_type_opt(hir_id))
+ self.resolve_type_vars_or_error(hir_id, self.typeck_results.node_type_opt(hir_id))
}
fn expr_ty(&self, expr: &hir::Expr<'_>) -> McResult<Ty<'tcx>> {
- self.resolve_type_vars_or_error(expr.hir_id, self.tables.expr_ty_opt(expr))
+ self.resolve_type_vars_or_error(expr.hir_id, self.typeck_results.expr_ty_opt(expr))
}
crate fn expr_ty_adjusted(&self, expr: &hir::Expr<'_>) -> McResult<Ty<'tcx>> {
- self.resolve_type_vars_or_error(expr.hir_id, self.tables.expr_ty_adjusted_opt(expr))
+ self.resolve_type_vars_or_error(expr.hir_id, self.typeck_results.expr_ty_adjusted_opt(expr))
}
/// Returns the type of value that this pattern matches against.
// that these are never attached to binding patterns, so
// actually this is somewhat "disjoint" from the code below
// that aims to account for `ref x`.
- if let Some(vec) = self.tables.pat_adjustments().get(pat.hir_id) {
+ if let Some(vec) = self.typeck_results.pat_adjustments().get(pat.hir_id) {
if let Some(first_ty) = vec.first() {
debug!("pat_ty(pat={:?}) found adjusted ty `{:?}`", pat, first_ty);
return Ok(first_ty);
// and if so, figures out what the type *being borrowed* is.
let ret_ty = match pat.kind {
PatKind::Binding(..) => {
- let bm =
- *self.tables.pat_binding_modes().get(pat.hir_id).expect("missing binding mode");
+ let bm = *self
+ .typeck_results
+ .pat_binding_modes()
+ .get(pat.hir_id)
+ .expect("missing binding mode");
if let ty::BindByReference(_) = bm {
// a bind-by-ref means that the base_ty will be the type of the ident itself,
}
}
- helper(self, expr, self.tables.expr_adjustments(expr))
+ helper(self, expr, self.typeck_results.expr_adjustments(expr))
}
crate fn cat_expr_adjusted(
let expr_ty = self.expr_ty(expr)?;
match expr.kind {
hir::ExprKind::Unary(hir::UnOp::UnDeref, ref e_base) => {
- if self.tables.is_method_call(expr) {
+ if self.typeck_results.is_method_call(expr) {
self.cat_overloaded_place(expr, e_base)
} else {
let base = self.cat_expr(&e_base)?;
hir::ExprKind::Field(ref base, _) => {
let base = self.cat_expr(&base)?;
debug!("cat_expr(cat_field): id={} expr={:?} base={:?}", expr.hir_id, expr, base);
- Ok(self.cat_projection(expr, base, expr_ty))
+
+ let field_idx = self
+ .typeck_results
+ .field_indices()
+ .get(expr.hir_id)
+ .cloned()
+ .expect("Field index not found");
+
+ Ok(self.cat_projection(
+ expr,
+ base,
+ expr_ty,
+ ProjectionKind::Field(field_idx as u32, VariantIdx::new(0)),
+ ))
}
hir::ExprKind::Index(ref base, _) => {
- if self.tables.is_method_call(expr) {
+ if self.typeck_results.is_method_call(expr) {
// If this is an index implemented by a method call, then it
// will include an implicit deref of the result.
// The call to index() returns a `&T` value, which
self.cat_overloaded_place(expr, base)
} else {
let base = self.cat_expr(&base)?;
- Ok(self.cat_projection(expr, base, expr_ty))
+ Ok(self.cat_projection(expr, base, expr_ty, ProjectionKind::Index))
}
}
hir::ExprKind::Path(ref qpath) => {
- let res = self.tables.qpath_res(qpath, expr.hir_id);
+ let res = self.typeck_results.qpath_res(qpath, expr.hir_id);
self.cat_res(expr.hir_id, expr.span, expr_ty, res)
}
node: &N,
base_place: PlaceWithHirId<'tcx>,
ty: Ty<'tcx>,
+ kind: ProjectionKind,
) -> PlaceWithHirId<'tcx> {
let mut projections = base_place.place.projections;
- projections.push(Projection { kind: ProjectionKind::Other, ty: ty });
+ projections.push(Projection { kind: kind, ty: ty });
let ret = PlaceWithHirId::new(
node.hir_id(),
base_place.place.base_ty,
self.cat_pattern_(place, pat, &mut op)
}
+ /// Returns the variant index for an ADT used within a Struct or TupleStruct pattern
+ /// Here `pat_hir_id` is the HirId of the pattern itself.
+ fn variant_index_for_adt(
+ &self,
+ qpath: &hir::QPath<'_>,
+ pat_hir_id: hir::HirId,
+ span: Span,
+ ) -> McResult<VariantIdx> {
+ let res = self.typeck_results.qpath_res(qpath, pat_hir_id);
+ let ty = self.typeck_results.node_type(pat_hir_id);
+ let adt_def = match ty.kind {
+ ty::Adt(adt_def, _) => adt_def,
+ _ => {
+ self.tcx()
+ .sess
+ .delay_span_bug(span, "struct or tuple struct pattern not applied to an ADT");
+ return Err(());
+ }
+ };
+
+ match res {
+ Res::Def(DefKind::Variant, variant_id) => Ok(adt_def.variant_index_with_id(variant_id)),
+ Res::Def(DefKind::Ctor(CtorOf::Variant, ..), variant_ctor_id) => {
+ Ok(adt_def.variant_index_with_ctor_id(variant_ctor_id))
+ }
+ Res::Def(DefKind::Ctor(CtorOf::Struct, ..), _)
+ | Res::Def(DefKind::Struct | DefKind::Union | DefKind::TyAlias | DefKind::AssocTy, _)
+ | Res::SelfCtor(..)
+ | Res::SelfTy(..) => {
+ // Structs and Unions have only have one variant.
+ Ok(VariantIdx::new(0))
+ }
+ _ => bug!("expected ADT path, found={:?}", res),
+ }
+ }
+
+ /// Returns the total number of fields in an ADT variant used within a pattern.
+ /// Here `pat_hir_id` is the HirId of the pattern itself.
+ fn total_fields_in_adt_variant(
+ &self,
+ pat_hir_id: hir::HirId,
+ variant_index: VariantIdx,
+ span: Span,
+ ) -> McResult<usize> {
+ let ty = self.typeck_results.node_type(pat_hir_id);
+ match ty.kind {
+ ty::Adt(adt_def, _) => Ok(adt_def.variants[variant_index].fields.len()),
+ _ => {
+ self.tcx()
+ .sess
+ .delay_span_bug(span, "struct or tuple struct pattern not applied to an ADT");
+ return Err(());
+ }
+ }
+ }
+
+ /// Returns the total number of fields in a tuple used within a Tuple pattern.
+ /// Here `pat_hir_id` is the HirId of the pattern itself.
+ fn total_fields_in_tuple(&self, pat_hir_id: hir::HirId, span: Span) -> McResult<usize> {
+ let ty = self.typeck_results.node_type(pat_hir_id);
+ match ty.kind {
+ ty::Tuple(substs) => Ok(substs.len()),
+ _ => {
+ self.tcx().sess.delay_span_bug(span, "tuple pattern not applied to a tuple");
+ return Err(());
+ }
+ }
+ }
+
// FIXME(#19596) This is a workaround, but there should be a better way to do this
fn cat_pattern_<F>(
&self,
// Then we see that to get the same result, we must start with
// `deref { deref { place_foo }}` instead of `place_foo` since the pattern is now `Some(x,)`
// and not `&&Some(x,)`, even though its assigned type is that of `&&Some(x,)`.
- for _ in 0..self.tables.pat_adjustments().get(pat.hir_id).map(|v| v.len()).unwrap_or(0) {
+ for _ in
+ 0..self.typeck_results.pat_adjustments().get(pat.hir_id).map(|v| v.len()).unwrap_or(0)
+ {
debug!("cat_pattern: applying adjustment to place_with_id={:?}", place_with_id);
place_with_id = self.cat_deref(pat, place_with_id)?;
}
op(&place_with_id, pat);
match pat.kind {
- PatKind::TupleStruct(_, ref subpats, _) | PatKind::Tuple(ref subpats, _) => {
- // S(p1, ..., pN) or (p1, ..., pN)
- for subpat in subpats.iter() {
+ PatKind::Tuple(ref subpats, dots_pos) => {
+ // (p1, ..., pN)
+ let total_fields = self.total_fields_in_tuple(pat.hir_id, pat.span)?;
+
+ for (i, subpat) in subpats.iter().enumerate_and_adjust(total_fields, dots_pos) {
let subpat_ty = self.pat_ty_adjusted(&subpat)?;
- let sub_place = self.cat_projection(pat, place_with_id.clone(), subpat_ty);
+ let projection_kind = ProjectionKind::Field(i as u32, VariantIdx::new(0));
+ let sub_place =
+ self.cat_projection(pat, place_with_id.clone(), subpat_ty, projection_kind);
self.cat_pattern_(sub_place, &subpat, op)?;
}
}
- PatKind::Struct(_, field_pats, _) => {
+ PatKind::TupleStruct(ref qpath, ref subpats, dots_pos) => {
+ // S(p1, ..., pN)
+ let variant_index = self.variant_index_for_adt(qpath, pat.hir_id, pat.span)?;
+ let total_fields =
+ self.total_fields_in_adt_variant(pat.hir_id, variant_index, pat.span)?;
+
+ for (i, subpat) in subpats.iter().enumerate_and_adjust(total_fields, dots_pos) {
+ let subpat_ty = self.pat_ty_adjusted(&subpat)?;
+ let projection_kind = ProjectionKind::Field(i as u32, variant_index);
+ let sub_place =
+ self.cat_projection(pat, place_with_id.clone(), subpat_ty, projection_kind);
+ self.cat_pattern_(sub_place, &subpat, op)?;
+ }
+ }
+
+ PatKind::Struct(ref qpath, field_pats, _) => {
// S { f1: p1, ..., fN: pN }
+
+ let variant_index = self.variant_index_for_adt(qpath, pat.hir_id, pat.span)?;
+
for fp in field_pats {
let field_ty = self.pat_ty_adjusted(&fp.pat)?;
- let field_place = self.cat_projection(pat, place_with_id.clone(), field_ty);
+ let field_index = self
+ .typeck_results
+ .field_indices()
+ .get(fp.hir_id)
+ .cloned()
+ .expect("no index for a field");
+
+ let field_place = self.cat_projection(
+ pat,
+ place_with_id.clone(),
+ field_ty,
+ ProjectionKind::Field(field_index as u32, variant_index),
+ );
self.cat_pattern_(field_place, &fp.pat, op)?;
}
}
return Err(());
}
};
- let elt_place = self.cat_projection(pat, place_with_id.clone(), element_ty);
+ let elt_place = self.cat_projection(
+ pat,
+ place_with_id.clone(),
+ element_ty,
+ ProjectionKind::Index,
+ );
for before_pat in before {
self.cat_pattern_(elt_place.clone(), &before_pat, op)?;
}
if let Some(ref slice_pat) = *slice {
let slice_pat_ty = self.pat_ty_adjusted(&slice_pat)?;
- let slice_place = self.cat_projection(pat, place_with_id, slice_pat_ty);
+ let slice_place = self.cat_projection(
+ pat,
+ place_with_id,
+ slice_pat_ty,
+ ProjectionKind::Subslice,
+ );
self.cat_pattern_(slice_place, &slice_pat, op)?;
}
for after_pat in after {
use rustc_middle::ty;
use rustc_mir::const_eval::is_min_const_fn;
use rustc_span::hygiene::MacroKind;
-use rustc_span::symbol::Symbol;
+use rustc_span::symbol::{sym, Symbol};
use rustc_span::Span;
use crate::clean::{self, GetDefId, ToSource, TypeKind};
let generics = (cx.tcx.generics_of(did), predicates).clean(cx);
let generics = filter_non_trait_generics(did, generics);
let (generics, supertrait_bounds) = separate_supertrait_bounds(generics);
+ let is_spotlight = load_attrs(cx, did).clean(cx).has_doc_flag(sym::spotlight);
let is_auto = cx.tcx.trait_is_auto(did);
clean::Trait {
auto: auto_trait,
generics,
items: trait_items,
bounds: supertrait_bounds,
+ is_spotlight,
is_auto,
}
}
return;
}
}
+
+ // Skip foreign unstable traits from lists of trait implementations and
+ // such. This helps prevent dependencies of the standard library, for
+ // example, from getting documented as "traits `u32` implements" which
+ // isn't really too helpful.
+ if let Some(trait_did) = associated_trait {
+ if let Some(stab) = cx.tcx.lookup_stability(trait_did.def_id) {
+ if stab.level.is_unstable() {
+ return;
+ }
+ }
+ }
}
let for_ = if let Some(did) = did.as_local() {
impl Clean<Item> for doctree::Trait<'_> {
fn clean(&self, cx: &DocContext<'_>) -> Item {
let attrs = self.attrs.clean(cx);
+ let is_spotlight = attrs.has_doc_flag(sym::spotlight);
Item {
name: Some(self.name.clean(cx)),
attrs,
items: self.items.iter().map(|ti| ti.clean(cx)).collect(),
generics: self.generics.clean(cx),
bounds: self.bounds.clean(cx),
+ is_spotlight,
is_auto: self.is_auto.clean(cx),
}),
}
})
}
- /// Enforce the format of attributes inside `#[doc(...)]`.
- pub fn check_doc_attributes(
- diagnostic: &::rustc_errors::Handler,
- mi: &ast::MetaItem,
- ) -> Option<(String, String)> {
- mi.meta_item_list().and_then(|list| {
- for meta in list {
- if meta.check_name(sym::alias) {
- if !meta.is_value_str()
- || meta
- .value_str()
- .map(|s| s.to_string())
- .unwrap_or_else(String::new)
- .is_empty()
- {
- diagnostic.span_err(
- meta.span(),
- "doc alias attribute expects a string: #[doc(alias = \"0\")]",
- );
- }
- }
- }
-
- None
- })
- }
-
pub fn has_doc_flag(&self, flag: Symbol) -> bool {
for attr in &self.other_attrs {
if !attr.check_name(sym::doc) {
} else {
if attr.check_name(sym::doc) {
if let Some(mi) = attr.meta() {
- Attributes::check_doc_attributes(&diagnostic, &mi);
if let Some(cfg_mi) = Attributes::extract_cfg(&mi) {
// Extracted #[doc(cfg(...))]
match Cfg::parse(cfg_mi) {
pub items: Vec<Item>,
pub generics: Generics,
pub bounds: Vec<GenericBound>,
+ pub is_spotlight: bool,
pub is_auto: bool,
}
use rustc_errors::emitter::{Emitter, EmitterWriter};
use rustc_errors::json::JsonEmitter;
use rustc_feature::UnstableFeatures;
-use rustc_hir::def::Namespace::TypeNS;
+use rustc_hir::def::{Namespace::TypeNS, Res};
use rustc_hir::def_id::{CrateNum, DefId, DefIndex, LocalDefId, CRATE_DEF_INDEX, LOCAL_CRATE};
use rustc_hir::HirId;
+use rustc_hir::{
+ intravisit::{self, NestedVisitorMap, Visitor},
+ Path,
+};
use rustc_interface::interface;
+use rustc_middle::hir::map::Map;
use rustc_middle::middle::cstore::CrateStore;
use rustc_middle::middle::privacy::AccessLevels;
use rustc_middle::ty::{Ty, TyCtxt};
let missing_doc_example = rustc_lint::builtin::MISSING_DOC_CODE_EXAMPLES.name;
let private_doc_tests = rustc_lint::builtin::PRIVATE_DOC_TESTS.name;
let no_crate_level_docs = rustc_lint::builtin::MISSING_CRATE_LEVEL_DOCS.name;
- let invalid_codeblock_attribute_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name;
+ let invalid_codeblock_attributes_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name;
// In addition to those specific lints, we also need to allow those given through
// command line, otherwise they'll get ignored and we don't want that.
missing_doc_example.to_owned(),
private_doc_tests.to_owned(),
no_crate_level_docs.to_owned(),
- invalid_codeblock_attribute_name.to_owned(),
+ invalid_codeblock_attributes_name.to_owned(),
];
let (lint_opts, lint_caps) = init_lints(allowed_lints, lint_opts, |lint| {
if lint.name == intra_link_resolution_failure_name
- || lint.name == invalid_codeblock_attribute_name
+ || lint.name == invalid_codeblock_attributes_name
{
None
} else {
crate_name,
lint_caps,
register_lints: None,
- override_queries: None,
+ override_queries: Some(|_sess, providers, _external_providers| {
+ // Most lints will require typechecking, so just don't run them.
+ providers.lint_mod = |_, _| {};
+ // Prevent `rustc_typeck::check_crate` from calling `typeck` on all bodies.
+ providers.typeck_item_bodies = |_, _| {};
+ // hack so that `used_trait_imports` won't try to call typeck
+ providers.used_trait_imports = |_, _| {
+ lazy_static! {
+ static ref EMPTY_SET: FxHashSet<LocalDefId> = FxHashSet::default();
+ }
+ &EMPTY_SET
+ };
+ // In case typeck does end up being called, don't ICE in case there were name resolution errors
+ providers.typeck = move |tcx, def_id| {
+ // Closures' tables come from their outermost function,
+ // as they are part of the same "inference environment".
+ // This avoids emitting errors for the parent twice (see similar code in `typeck_with_fallback`)
+ let outer_def_id = tcx.closure_base_def_id(def_id.to_def_id()).expect_local();
+ if outer_def_id != def_id {
+ return tcx.typeck(outer_def_id);
+ }
+
+ let hir = tcx.hir();
+ let body = hir.body(hir.body_owned_by(hir.as_local_hir_id(def_id)));
+ debug!("visiting body for {:?}", def_id);
+ EmitIgnoredResolutionErrors::new(tcx).visit_body(body);
+ (rustc_interface::DEFAULT_QUERY_PROVIDERS.typeck)(tcx, def_id)
+ };
+ }),
registry: rustc_driver::diagnostics_registry(),
};
DUMMY_SP,
extern_name,
TypeNS,
- LocalDefId { local_def_index: CRATE_DEF_INDEX },
+ LocalDefId { local_def_index: CRATE_DEF_INDEX }.to_def_id(),
)
.unwrap_or_else(|()| {
panic!("Unable to resolve external crate {}", extern_name)
let mut global_ctxt = abort_on_err(queries.global_ctxt(), sess).take();
global_ctxt.enter(|tcx| {
- tcx.analysis(LOCAL_CRATE).ok();
-
- // Abort if there were any errors so far
- sess.abort_if_errors();
+ // Certain queries assume that some checks were run elsewhere
+ // (see https://github.com/rust-lang/rust/pull/73566#issuecomment-656954425),
+ // so type-check everything other than function bodies in this crate before running lints.
+ // NOTE: this does not call `tcx.analysis()` so that we won't
+ // typeck function bodies or run the default rustc lints.
+ // (see `override_queries` in the `config`)
+ let _ = rustc_typeck::check_crate(tcx);
+ tcx.sess.abort_if_errors();
+ sess.time("missing_docs", || {
+ rustc_lint::check_crate(tcx, rustc_lint::builtin::MissingDoc::new);
+ });
let access_levels = tcx.privacy_access_levels(LOCAL_CRATE);
// Convert from a HirId set to a DefId set since we don't always have easy access
})
}
+/// Due to https://github.com/rust-lang/rust/pull/73566,
+/// the name resolution pass may find errors that are never emitted.
+/// If typeck is called after this happens, then we'll get an ICE:
+/// 'Res::Error found but not reported'. To avoid this, emit the errors now.
+struct EmitIgnoredResolutionErrors<'tcx> {
+ tcx: TyCtxt<'tcx>,
+}
+
+impl<'tcx> EmitIgnoredResolutionErrors<'tcx> {
+ fn new(tcx: TyCtxt<'tcx>) -> Self {
+ Self { tcx }
+ }
+}
+
+impl<'tcx> Visitor<'tcx> for EmitIgnoredResolutionErrors<'tcx> {
+ type Map = Map<'tcx>;
+
+ fn nested_visit_map(&mut self) -> NestedVisitorMap<Self::Map> {
+ // We need to recurse into nested closures,
+ // since those will fallback to the parent for type checking.
+ NestedVisitorMap::OnlyBodies(self.tcx.hir())
+ }
+
+ fn visit_path(&mut self, path: &'tcx Path<'_>, _id: HirId) {
+ debug!("visiting path {:?}", path);
+ if path.res == Res::Err {
+ // We have less context here than in rustc_resolve,
+ // so we can only emit the name and span.
+ // However we can give a hint that rustc_resolve will have more info.
+ let label = format!(
+ "could not resolve path `{}`",
+ path.segments
+ .iter()
+ .map(|segment| segment.ident.as_str().to_string())
+ .collect::<Vec<_>>()
+ .join("::")
+ );
+ let mut err = rustc_errors::struct_span_err!(
+ self.tcx.sess,
+ path.span,
+ E0433,
+ "failed to resolve: {}",
+ label
+ );
+ err.span_label(path.span, label);
+ err.note("this error was originally ignored because you are running `rustdoc`");
+ err.note("try running again with `rustc` or `cargo check` and you may get a more detailed error");
+ err.emit();
+ }
+ // We could have an outer resolution that succeeded,
+ // but with generic parameters that failed.
+ // Recurse into the segments so we catch those too.
+ intravisit::walk_path(self, path);
+ }
+}
+
/// `DefId` or parameter index (`ty::ParamTy.index`) of a synthetic type parameter
/// for `impl Trait` in argument position.
#[derive(Clone, Copy, PartialEq, Eq, Hash, PartialOrd, Ord)]
let bc = format!(
"{}{}",
bc,
- Markdown(&m_bc, &[], id_map, codes, edition, playground).to_string()
+ Markdown(&m_bc, &[], id_map, codes, edition, playground).into_string()
);
let ac = load_external_files(after_content, diag)?;
let m_ac = load_external_files(md_after_content, diag)?;
let ac = format!(
"{}{}",
ac,
- Markdown(&m_ac, &[], id_map, codes, edition, playground).to_string()
+ Markdown(&m_ac, &[], id_map, codes, edition, playground).into_string()
);
Some(ExternalHtml { in_header: ih, before_content: bc, after_content: ac })
}
Buffer { for_html: false, buffer: String::new() }
}
+ crate fn is_empty(&self) -> bool {
+ self.buffer.is_empty()
+ }
+
crate fn into_inner(self) -> String {
self.buffer
}
+ crate fn insert_str(&mut self, idx: usize, s: &str) {
+ self.buffer.insert_str(idx, s);
+ }
+
+ crate fn push_str(&mut self, s: &str) {
+ self.buffer.push_str(s);
+ }
+
// Intended for consumption by write! and writeln! (std::fmt) but without
// the fmt::Result return type imposed by fmt::Write (and avoiding the trait
// import).
use std::io::prelude::*;
use rustc_ast::token::{self, Token};
+use rustc_data_structures::sync::Lrc;
use rustc_parse::lexer;
use rustc_session::parse::ParseSess;
+use rustc_span::hygiene::SyntaxContext;
use rustc_span::source_map::SourceMap;
use rustc_span::symbol::{kw, sym};
-use rustc_span::{FileName, Span};
+use rustc_span::{BytePos, FileName, SourceFile, Span};
/// Highlights `src`, returning the HTML output.
pub fn render_with_highlighting(
- src: &str,
+ src: String,
class: Option<&str>,
playground_button: Option<&str>,
tooltip: Option<(&str, &str)>,
}
let sess = ParseSess::with_silent_emitter();
- let sf = sess
+ let source_file = sess
.source_map()
- .new_source_file(FileName::Custom(String::from("rustdoc-highlighting")), src.to_owned());
+ .new_source_file(FileName::Custom(String::from("rustdoc-highlighting")), src);
+
+ let classifier_source_file = Lrc::clone(&source_file);
let highlight_result = rustc_driver::catch_fatal_errors(|| {
- let lexer = lexer::StringReader::new(&sess, sf, None);
- let mut classifier = Classifier::new(lexer, sess.source_map());
+ let mut classifier = Classifier::new(&sess, classifier_source_file);
let mut highlighted_source = vec![];
if classifier.write_source(&mut highlighted_source).is_err() {
write_footer(&mut out, playground_button).unwrap();
}
Err(()) => {
+ // Get the source back out of the source map to avoid a copy in the happy path.
+ let span =
+ Span::new(BytePos(0), BytePos(source_file.byte_length()), SyntaxContext::root());
+ let src = sess
+ .source_map()
+ .span_to_snippet(span)
+ .expect("could not retrieve snippet from artificial source file");
+
// If errors are encountered while trying to highlight, just emit
// the unhighlighted source.
- write!(out, "<pre><code>{}</code></pre>", Escape(src)).unwrap();
+ write!(out, "<pre><code>{}</code></pre>", Escape(&src)).unwrap();
}
}
/// Processes a program (nested in the internal `lexer`), classifying strings of
/// text by highlighting category (`Class`). Calls out to a `Writer` to write
/// each span of text in sequence.
-struct Classifier<'a> {
- lexer: lexer::StringReader<'a>,
+struct Classifier<'sess> {
+ lexer: lexer::StringReader<'sess>,
peek_token: Option<Token>,
- source_map: &'a SourceMap,
+ source_map: &'sess SourceMap,
// State of the classifier.
in_attribute: bool,
}
}
+#[derive(Debug)]
enum HighlightError {
LexError,
IoError(io::Error),
}
}
-impl<'a> Classifier<'a> {
- fn new(lexer: lexer::StringReader<'a>, source_map: &'a SourceMap) -> Classifier<'a> {
+impl<'sess> Classifier<'sess> {
+ fn new(sess: &ParseSess, source_file: Lrc<SourceFile>) -> Classifier<'_> {
+ let lexer = lexer::StringReader::new(sess, source_file, None);
+
Classifier {
lexer,
peek_token: None,
- source_map,
+ source_map: sess.source_map(),
in_attribute: false,
in_macro: false,
in_macro_nonterminal: false,
/// source.
fn write_source<W: Writer>(&mut self, out: &mut W) -> Result<(), HighlightError> {
loop {
- let next = self.try_next_token()?;
+ let mut next = self.try_next_token()?;
if next == token::Eof {
break;
}
+ // Glue any tokens that need to be glued.
+ if let Some(joint) = next.glue(self.peek()?) {
+ next = joint;
+ let _ = self.try_next_token()?;
+ }
+
self.write_token(out, next)?;
}
fn write_footer(out: &mut dyn Write, playground_button: Option<&str>) -> io::Result<()> {
write!(out, "</pre>{}</div>\n", if let Some(button) = playground_button { button } else { "" })
}
+
+#[cfg(test)]
+mod tests;
--- /dev/null
+use rustc_ast::attr::with_session_globals;
+use rustc_session::parse::ParseSess;
+use rustc_span::edition::Edition;
+use rustc_span::FileName;
+
+use super::Classifier;
+
+fn highlight(src: &str) -> String {
+ let mut out = vec![];
+
+ with_session_globals(Edition::Edition2018, || {
+ let sess = ParseSess::with_silent_emitter();
+ let source_file = sess.source_map().new_source_file(
+ FileName::Custom(String::from("rustdoc-highlighting")),
+ src.to_owned(),
+ );
+
+ let mut classifier = Classifier::new(&sess, source_file);
+ classifier.write_source(&mut out).unwrap();
+ });
+
+ String::from_utf8(out).unwrap()
+}
+
+#[test]
+fn function() {
+ assert_eq!(
+ highlight("fn main() {}"),
+ r#"<span class="kw">fn</span> <span class="ident">main</span>() {}"#,
+ );
+}
+
+#[test]
+fn statement() {
+ assert_eq!(
+ highlight("let foo = true;"),
+ concat!(
+ r#"<span class="kw">let</span> <span class="ident">foo</span> "#,
+ r#"<span class="op">=</span> <span class="bool-val">true</span>;"#,
+ ),
+ );
+}
+
+#[test]
+fn inner_attr() {
+ assert_eq!(
+ highlight(r##"#![crate_type = "lib"]"##),
+ concat!(
+ r##"<span class="attribute">#![<span class="ident">crate_type</span> "##,
+ r##"<span class="op">=</span> <span class="string">"lib"</span>]</span>"##,
+ ),
+ );
+}
+
+#[test]
+fn outer_attr() {
+ assert_eq!(
+ highlight(r##"#[cfg(target_os = "linux")]"##),
+ concat!(
+ r##"<span class="attribute">#[<span class="ident">cfg</span>("##,
+ r##"<span class="ident">target_os</span> <span class="op">=</span> "##,
+ r##"<span class="string">"linux"</span>)]</span>"##,
+ ),
+ );
+}
+
+#[test]
+fn mac() {
+ assert_eq!(
+ highlight("mac!(foo bar)"),
+ concat!(
+ r#"<span class="macro">mac</span><span class="macro">!</span>("#,
+ r#"<span class="ident">foo</span> <span class="ident">bar</span>)"#,
+ ),
+ );
+}
+
+// Regression test for #72684
+#[test]
+fn andand() {
+ assert_eq!(highlight("&&"), r#"<span class="op">&&</span>"#);
+}
//! let s = "My *markdown* _text_";
//! let mut id_map = IdMap::new();
//! let md = Markdown(s, &[], &mut id_map, ErrorCodes::Yes, Edition::Edition2015, &None);
-//! let html = md.to_string();
+//! let html = md.into_string();
//! // ... something using html
//! ```
if let Some((s1, s2)) = tooltip {
s.push_str(&highlight::render_with_highlighting(
- &text,
+ text,
Some(&format!(
"rust-example-rendered{}",
if ignore != Ignore::None {
Some(Event::Html(s.into()))
} else {
s.push_str(&highlight::render_with_highlighting(
- &text,
+ text,
Some(&format!(
"rust-example-rendered{}",
if ignore != Ignore::None {
}
impl Markdown<'_> {
- pub fn to_string(self) -> String {
+ pub fn into_string(self) -> String {
let Markdown(md, links, mut ids, codes, edition, playground) = self;
// This is actually common enough to special-case
}
impl MarkdownWithToc<'_> {
- pub fn to_string(self) -> String {
+ pub fn into_string(self) -> String {
let MarkdownWithToc(md, mut ids, codes, edition, playground) = self;
let p = Parser::new_ext(md, opts());
}
impl MarkdownHtml<'_> {
- pub fn to_string(self) -> String {
+ pub fn into_string(self) -> String {
let MarkdownHtml(md, mut ids, codes, edition, playground) = self;
// This is actually common enough to special-case
}
impl MarkdownSummaryLine<'_> {
- pub fn to_string(self) -> String {
+ pub fn into_string(self) -> String {
let MarkdownSummaryLine(md, links) = self;
// This is actually common enough to special-case
if md.is_empty() {
fn t(input: &str, expect: &str) {
let mut map = IdMap::new();
let output =
- Markdown(input, &[], &mut map, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string();
+ Markdown(input, &[], &mut map, ErrorCodes::Yes, DEFAULT_EDITION, &None).into_string();
assert_eq!(output, expect, "original: {}", input);
}
fn test_header_ids_multiple_blocks() {
let mut map = IdMap::new();
fn t(map: &mut IdMap, input: &str, expect: &str) {
- let output = Markdown(input, &[], map, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string();
+ let output =
+ Markdown(input, &[], map, ErrorCodes::Yes, DEFAULT_EDITION, &None).into_string();
assert_eq!(output, expect, "original: {}", input);
}
fn t(input: &str, expect: &str) {
let mut idmap = IdMap::new();
let output =
- MarkdownHtml(input, &mut idmap, ErrorCodes::Yes, DEFAULT_EDITION, &None).to_string();
+ MarkdownHtml(input, &mut idmap, ErrorCodes::Yes, DEFAULT_EDITION, &None).into_string();
assert_eq!(output, expect, "original: {}", input);
}
.into(),
("auto-hide-attributes", "Auto-hide item attributes.", true).into(),
("auto-hide-method-docs", "Auto-hide item methods' documentation", false).into(),
- ("auto-hide-trait-implementations", "Auto-hide trait implementations documentation", true)
+ ("auto-hide-trait-implementations", "Auto-hide trait implementation documentation", true)
.into(),
+ ("auto-collapse-implementors", "Auto-hide implementors of a trait", true).into(),
("go-to-only-result", "Directly go to item in search if there is only one result", false)
.into(),
("line-numbers", "Show line numbers on code examples", false).into(),
cx.shared.edition,
&cx.shared.playground
)
- .to_string()
+ .into_string()
)
}
</tr>",
name = *myitem.name.as_ref().unwrap(),
stab_tags = stability_tags(myitem),
- docs = MarkdownSummaryLine(doc_value, &myitem.links()).to_string(),
+ docs = MarkdownSummaryLine(doc_value, &myitem.links()).into_string(),
class = myitem.type_(),
add = add,
stab = stab.unwrap_or_else(String::new),
tags += &tag_html("deprecated", message);
}
- if let Some(stab) = item.stability.as_ref().filter(|s| s.level == stability::Unstable) {
- if stab.feature.as_deref() == Some("rustc_private") {
- tags += &tag_html("internal", "Internal");
- } else {
- tags += &tag_html("unstable", "Experimental");
- }
+ // The "rustc_private" crates are permanently unstable so it makes no sense
+ // to render "unstable" everywhere.
+ if item
+ .stability
+ .as_ref()
+ .map(|s| s.level == stability::Unstable && s.feature.as_deref() != Some("rustc_private"))
+ == Some(true)
+ {
+ tags += &tag_html("unstable", "Experimental");
}
if let Some(ref cfg) = item.attrs.cfg {
cx.shared.edition,
&cx.shared.playground,
);
- message.push_str(&format!(": {}", html.to_string()));
+ message.push_str(&format!(": {}", html.into_string()));
}
stability.push(format!(
"<div class='stab deprecated'><span class='emoji'>👎</span> {}</div>",
));
}
- if let Some(stab) = item.stability.as_ref().filter(|stab| stab.level == stability::Unstable) {
- let is_rustc_private = stab.feature.as_deref() == Some("rustc_private");
-
- let mut message = if is_rustc_private {
- "<span class='emoji'>⚙️</span> This is an internal compiler API."
- } else {
- "<span class='emoji'>🔬</span> This is a nightly-only experimental API."
- }
- .to_owned();
+ // Render unstable items. But don't render "rustc_private" crates (internal compiler crates).
+ // Those crates are permanently unstable so it makes no sense to render "unstable" everywhere.
+ if let Some(stab) = item.stability.as_ref().filter(|stab| {
+ stab.level == stability::Unstable && stab.feature.as_deref() != Some("rustc_private")
+ }) {
+ let mut message =
+ "<span class='emoji'>🔬</span> This is a nightly-only experimental API.".to_owned();
if let Some(feature) = stab.feature.as_deref() {
let mut feature = format!("<code>{}</code>", Escape(&feature));
}
if let Some(unstable_reason) = &stab.unstable_reason {
- // Provide a more informative message than the compiler help.
- let unstable_reason = if is_rustc_private {
- "This crate is being loaded from the sysroot, a permanently unstable location \
- for private compiler dependencies. It is not intended for general use. Prefer \
- using a public version of this crate from \
- [crates.io](https://crates.io) via [`Cargo.toml`]\
- (https://doc.rust-lang.org/cargo/reference/specifying-dependencies.html)."
- } else {
- unstable_reason
- };
-
let mut ids = cx.id_map.borrow_mut();
message = format!(
"<details><summary>{}</summary>{}</details>",
cx.shared.edition,
&cx.shared.playground,
)
- .to_string()
+ .into_string()
);
}
- let class = if is_rustc_private { "internal" } else { "unstable" };
- stability.push(format!("<div class='stab {}'>{}</div>", class, message));
+ stability.push(format!("<div class='stab unstable'>{}</div>", message));
}
if let Some(ref cfg) = item.attrs.cfg {
write!(
w,
"{vis}{constness}{asyncness}{unsafety}{abi}fn \
- {name}{generics}{decl}{where_clause}</pre>",
+ {name}{generics}{decl}{spotlight}{where_clause}</pre>",
vis = it.visibility.print_with_space(),
constness = f.header.constness.print_with_space(),
asyncness = f.header.asyncness.print_with_space(),
generics = f.generics.print(),
where_clause = WhereClause { gens: &f.generics, indent: 0, end_newline: true },
decl = Function { decl: &f.decl, header_len, indent: 0, asyncness: f.header.asyncness }
- .print()
+ .print(),
+ spotlight = spotlight_decl(&f.decl),
);
document(w, cx, it)
}
let name = m.name.as_ref().unwrap();
let item_type = m.type_();
let id = cx.derive_id(format!("{}.{}", item_type, name));
- write!(w, "<h3 id='{id}' class='method'><code>", id = id);
+ write!(w, "<h3 id='{id}' class='method'><code>", id = id,);
render_assoc_item(w, m, AssocItemLink::Anchor(Some(&id)), ItemType::Impl);
write!(w, "</code>");
render_stability_since(w, m, t);
write!(
w,
"{}{}{}{}{}{}{}fn <a href='{href}' class='fnname'>{name}</a>\
- {generics}{decl}{where_clause}",
+ {generics}{decl}{spotlight}{where_clause}",
if parent == ItemType::Trait { " " } else { "" },
meth.visibility.print_with_space(),
header.constness.print_with_space(),
name = name,
generics = g.print(),
decl = Function { decl: d, header_len, indent, asyncness: header.asyncness }.print(),
+ spotlight = spotlight_decl(&d),
where_clause = WhereClause { gens: g, indent, end_newline }
)
}
}
}
+fn spotlight_decl(decl: &clean::FnDecl) -> String {
+ let mut out = Buffer::html();
+ let mut trait_ = String::new();
+
+ if let Some(did) = decl.output.def_id() {
+ let c = cache();
+ if let Some(impls) = c.impls.get(&did) {
+ for i in impls {
+ let impl_ = i.inner_impl();
+ if impl_.trait_.def_id().map_or(false, |d| c.traits[&d].is_spotlight) {
+ if out.is_empty() {
+ out.push_str(&format!(
+ "<h3 class=\"important\">Important traits for {}</h3>\
+ <code class=\"content\">",
+ impl_.for_.print()
+ ));
+ trait_.push_str(&impl_.for_.print().to_string());
+ }
+
+ //use the "where" class here to make it small
+ out.push_str(&format!(
+ "<span class=\"where fmt-newline\">{}</span>",
+ impl_.print()
+ ));
+ let t_did = impl_.trait_.def_id().unwrap();
+ for it in &impl_.items {
+ if let clean::TypedefItem(ref tydef, _) = it.inner {
+ out.push_str("<span class=\"where fmt-newline\"> ");
+ assoc_type(
+ &mut out,
+ it,
+ &[],
+ Some(&tydef.type_),
+ AssocItemLink::GotoSource(t_did, &FxHashSet::default()),
+ "",
+ );
+ out.push_str(";</span>");
+ }
+ }
+ }
+ }
+ }
+ }
+
+ if !out.is_empty() {
+ out.insert_str(
+ 0,
+ "<span class=\"important-traits\"><span class=\"important-traits-tooltip\">ⓘ<div class='important-traits-tooltiptext'><span class=\"docblock\">"
+
+ );
+ out.push_str("</code></span></div></span></span>");
+ }
+
+ out.into_inner()
+}
+
fn render_impl(
w: &mut Buffer,
cx: &Context,
cx.shared.edition,
&cx.shared.playground
)
- .to_string()
+ .into_string()
);
}
}
// Only render when the method is not static or we allow static methods
if render_method_item {
let id = cx.derive_id(format!("{}.{}", item_type, name));
- write!(w, "<h4 id='{}' class=\"{}{}\"><code>", id, item_type, extra_class);
+ write!(w, "<h4 id='{}' class=\"{}{}\">", id, item_type, extra_class);
+ write!(w, "<code>");
render_assoc_item(w, item, link.anchor(&id), ItemType::Impl);
write!(w, "</code>");
render_stability_since_raw(w, item.stable_since(), outer_version);
fn item_macro(w: &mut Buffer, cx: &Context, it: &clean::Item, t: &clean::Macro) {
wrap_into_docblock(w, |w| {
- w.write_str(&highlight::render_with_highlighting(&t.source, Some("macro"), None, None))
+ w.write_str(&highlight::render_with_highlighting(
+ t.source.clone(),
+ Some("macro"),
+ None,
+ None,
+ ))
});
document(w, cx, it)
}
return Ok(());
}
- let contents = match fs::read_to_string(&p) {
+ let mut contents = match fs::read_to_string(&p) {
Ok(contents) => contents,
Err(e) => {
return Err(Error::new(e, &p));
};
// Remove the utf-8 BOM if any
- let contents =
- if contents.starts_with("\u{feff}") { &contents[3..] } else { &contents[..] };
+ if contents.starts_with("\u{feff}") {
+ contents.drain(..3);
+ }
// Create the intermediate directories
let mut cur = self.dst.clone();
&self.scx.layout,
&page,
"",
- |buf: &mut _| print_src(buf, &contents),
+ |buf: &mut _| print_src(buf, contents),
&self.scx.style_files,
);
self.scx.fs.write(&cur, v.as_bytes())?;
/// Wrapper struct to render the source code of a file. This will do things like
/// adding line numbers to the left-hand side.
-fn print_src(buf: &mut Buffer, s: &str) {
+fn print_src(buf: &mut Buffer, s: String) {
let lines = s.lines().count();
let mut cols = 0;
let mut tmp = lines;
relatedDoc = relatedDoc.nextElementSibling;
}
- if ((!relatedDoc && hasClass(docblock, "docblock") === false) ||
- (pageId && document.getElementById(pageId))) {
+ if (!relatedDoc && hasClass(docblock, "docblock") === false) {
return;
}
(function() {
var toggle = createSimpleToggle(false);
var hideMethodDocs = getCurrentValue("rustdoc-auto-hide-method-docs") === "true";
+ var hideImplementors = getCurrentValue("rustdoc-auto-collapse-implementors") !== "false";
var pageId = getPageId();
var func = function(e) {
if (hasClass(e, "impl") &&
(next.getElementsByClassName("method").length > 0 ||
next.getElementsByClassName("associatedconstant").length > 0)) {
- insertAfter(toggle.cloneNode(true), e.childNodes[e.childNodes.length - 1]);
+ var newToggle = toggle.cloneNode(true);
+ insertAfter(newToggle, e.childNodes[e.childNodes.length - 1]);
+ // In case the option "auto-collapse implementors" is not set to false, we collapse
+ // all implementors.
+ if (hideImplementors === true && e.parentNode.id === "implementors-list") {
+ collapseDocs(newToggle, "hide", pageId);
+ }
}
};
});
}());
+ onEachLazy(document.getElementsByClassName("important-traits"), function(e) {
+ e.onclick = function() {
+ this.getElementsByClassName('important-traits-tooltiptext')[0]
+ .classList.toggle("force-tooltip");
+ };
+ });
+
// In the search display, allows to switch between tabs.
function printTab(nb) {
if (nb === 0 || nb === 1 || nb === 2) {
+/* ignore-tidy-linelength */
/*! normalize.css v3.0.0 | MIT License | git.io/normalize */html{font-family:sans-serif;-ms-text-size-adjust:100%;-webkit-text-size-adjust:100%}body{margin:0}article,aside,details,figcaption,figure,footer,header,hgroup,main,nav,section,summary{display:block}audio,canvas,progress,video{display:inline-block;vertical-align:baseline}audio:not([controls]){display:none;height:0}[hidden],template{display:none}a{background:transparent}a:active,a:hover{outline:0}abbr[title]{border-bottom:1px dotted}b,strong{font-weight:bold}dfn{font-style:italic}h1{font-size:2em;margin:.67em 0}mark{background:#ff0;color:#000}small{font-size:80%}sub,sup{font-size:75%;line-height:0;position:relative;vertical-align:baseline}sup{top:-0.5em}sub{bottom:-0.25em}img{border:0}svg:not(:root){overflow:hidden}figure{margin:1em 40px}hr{-moz-box-sizing:content-box;box-sizing:content-box;height:0}pre{overflow:auto}code,kbd,pre,samp{font-family:monospace,monospace;font-size:1em}button,input,optgroup,select,textarea{color:inherit;font:inherit;margin:0}button{overflow:visible}button,select{text-transform:none}button,html input[type="button"],input[type="reset"],input[type="submit"]{-webkit-appearance:button;cursor:pointer}button[disabled],html input[disabled]{cursor:default}button::-moz-focus-inner,input::-moz-focus-inner{border:0;padding:0}input{line-height:normal}input[type="checkbox"],input[type="radio"]{box-sizing:border-box;padding:0}input[type="number"]::-webkit-inner-spin-button,input[type="number"]::-webkit-outer-spin-button{height:auto}input[type="search"]{-webkit-appearance:textfield;-moz-box-sizing:content-box;-webkit-box-sizing:content-box;box-sizing:content-box}input[type="search"]::-webkit-search-cancel-button,input[type="search"]::-webkit-search-decoration{-webkit-appearance:none}fieldset{border:1px solid silver;margin:0 2px;padding:.35em .625em .75em}legend{border:0;padding:0}textarea{overflow:auto}optgroup{font-weight:bold}table{border-collapse:collapse;border-spacing:0}td,th{padding:0}
}
* {
- -webkit-box-sizing: border-box;
- -moz-box-sizing: border-box;
- box-sizing: border-box;
+ -webkit-box-sizing: border-box;
+ -moz-box-sizing: border-box;
+ box-sizing: border-box;
}
/* This part handles the "default" theme being used depending on the system one. */
h3 {
font-size: 1.3em;
}
-h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod):not(.important), h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) {
+h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod):not(.important),
+h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) {
font-weight: 500;
margin: 20px 0 15px 0;
padding-bottom: 6px;
h1.fqn > .in-band > a:hover {
text-decoration: underline;
}
-h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) {
+h2, h3:not(.impl):not(.method):not(.type):not(.tymethod),
+h4:not(.method):not(.type):not(.tymethod):not(.associatedconstant) {
border-bottom: 1px solid;
}
h3.impl, h3.method, h4.method, h3.type, h4.type, h4.associatedconstant {
border-radius: 3px;
padding: 0 0.1em;
}
-.docblock pre code, .docblock-short pre code {
+.docblock pre code, .docblock-short pre code, .docblock code.spotlight {
padding: 0;
}
+.docblock code.spotlight :last-child {
+ padding-bottom: 0.6em;
+}
pre {
padding: 14px;
}
#results > table {
width: 100%;
table-layout: fixed;
+ margin-bottom: 40px;
}
.content pre.line-numbers {
font-size: 0.8em;
}
-.content .methods > div {
+.content .methods > div:not(.important-traits) {
margin-left: 40px;
margin-bottom: 15px;
}
font-size: 16px;
}
-.tooltip:hover .tooltiptext {
- display: inline;
-}
-
.tooltip .tooltiptext::after {
content: " ";
position: absolute;
font-size: 20px;
}
-.tooltip .tooltiptext {
+.important-traits-tooltip {
+ display: inline-block;
+ cursor: pointer;
+}
+
+.important-traits:hover .important-traits-tooltiptext,
+.important-traits .important-traits-tooltiptext.force-tooltip {
+ display: inline-block;
+}
+
+.important-traits .important-traits-tooltiptext {
+ display: none;
+ padding: 5px 3px 3px 3px;
+ border-radius: 6px;
+ margin-left: 5px;
+ z-index: 10;
+ font-size: 16px;
+ cursor: default;
+ position: absolute;
border: 1px solid;
- font-weight: normal;
}
-pre.rust {
+.important-traits-tooltip::after {
+ /* The margin on the tooltip does not capture hover events,
+ this extends the area of hover enough so that mouse hover is not
+ lost when moving the mouse to the tooltip */
+ content: "\00a0\00a0\00a0";
+}
+
+.important-traits .important, .important-traits .docblock {
+ margin: 0;
+}
+
+.important-traits .docblock code.content{
+ margin: 0;
+ padding: 0;
+ font-size: 20px;
+}
+
+/* Example code has the "Run" button that needs to be positioned relative to the pre */
+pre.rust.rust-example-rendered {
position: relative;
+}
+
+pre.rust {
tab-size: 4;
-moz-tab-size: 4;
}
font-size: 16px;
}
+.important-traits {
+ cursor: pointer;
+ z-index: 2;
+ margin-left: 5px;
+}
+
+h4 > .important-traits {
+ position: absolute;
+ left: -44px;
+ top: 2px;
+}
+
#all-types {
text-align: center;
border: 1px solid;
z-index: 1;
}
+ h4 > .important-traits {
+ position: absolute;
+ left: -22px;
+ top: 24px;
+ }
+
#titles > div > div.count {
float: left;
width: 100%;
});
main.insertBefore(sidebar, main.firstChild);
+ // Focus on the current file in the source files sidebar.
+ var selected_elem = sidebar.getElementsByClassName("selected")[0];
+ if (typeof selected_elem !== "undefined") {
+ selected_elem.focus();
+ }
}
color: #c5c5c5;
}
-h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod),
+h4:not(.method):not(.type):not(.tymethod) {
color: white;
}
h1.fqn {
color: #e6e1cf;
}
pre > code {
- color: #e6e1cf;
+ color: #e6e1cf;
}
span code {
- color: #e6e1cf;
+ color: #e6e1cf;
}
.docblock a > code {
- color: #39AFD7 !important;
+ color: #39AFD7 !important;
}
.docblock code, .docblock-short code {
background-color: #191f26;
}
.sidebar-elems .location {
- color: #ff7733;
+ color: #ff7733;
}
.sidebar-elems .location a {
- color: #fff;
+ color: #fff;
}
.sidebar .version {
- border-bottom-color: #DDD;
+ border-bottom-color: #424c57;
}
.sidebar-title {
.line-numbers span { color: #5c6773ab; }
.line-numbers .line-highlighted {
- background-color: rgba(255, 236, 164, 0.06) !important;
- padding-right: 4px;
- border-right: 1px solid #ffb44c;
+ background-color: rgba(255, 236, 164, 0.06) !important;
+ padding-right: 4px;
+ border-right: 1px solid #ffb44c;
}
.docblock h1, .docblock h2, .docblock h3, .docblock h4, .docblock h5 {
.content span.keyword, .content a.keyword { color: #de5249; }
.content span.externcrate, .content span.mod, .content a.mod {
- color: #acccf9;
+ color: #acccf9;
}
.content span.struct, .content a.struct {
- color: #ffa0a5;
+ color: #ffa0a5;
}
.content span.enum, .content a.enum {
- color: #99e0c9;
+ color: #99e0c9;
}
.content span.trait, .content a.trait {
- color: #39AFD7;
+ color: #39AFD7;
}
.content span.type, .content a.type {
- color: #cfbcf5;
+ color: #cfbcf5;
}
.content span.fn, .content a.fn, .content span.method,
.content a.method, .content span.tymethod,
.content a.tymethod, .content .fnname {
- color: #fdd687;
+ color: #fdd687;
}
.content span.attr, .content a.attr, .content span.derive,
.content a.derive, .content span.macro, .content a.macro {
- color: #a37acc;
+ color: #a37acc;
}
-pre.rust .comment, pre.rust .doccomment {
+pre.rust .comment, pre.rust .doccomment {
color: #788797;
font-style: italic;
}
nav:not(.sidebar) {
- border-bottom-color: #e0e0e0;
+ border-bottom-color: #424c57;
}
nav.main .current {
border-top-color: #5c6773;
color: #39AFD7;
}
-.stab.internal a {
- color: #304FFE;
-}
-
.collapse-toggle {
color: #999;
}
#crate-search {
color: #c5c5c5;
background-color: #141920;
- border-radius: 4px;
- box-shadow: none;
- border-color: #5c6773;
+ box-shadow: 0 0 0 1px #424c57,0 0 0 2px transparent;
+ border-color: #424c57;
}
.search-input {
- color: #ffffff;
- background-color: #141920;
- box-shadow: none;
- transition: box-shadow 150ms ease-in-out;
- border-radius: 4px;
- margin-left: 8px;
+ color: #ffffff;
+ background-color: #141920;
+ box-shadow: 0 0 0 1px #424c57,0 0 0 2px transparent;
+ transition: box-shadow 150ms ease-in-out;
+}
+
+#crate-search+.search-input:focus {
+ box-shadow: 0 0 0 1px #148099,0 0 0 2px transparent;
+ color: #ffffff;
+ background-color: #141920;
+ box-shadow: none;
+ transition: box-shadow 150ms ease-in-out;
+ border-radius: 4px;
+ margin-left: 8px;
}
#crate-search+.search-input:focus {
- box-shadow: 0px 6px 20px 0px black;
+ box-shadow: 0px 6px 20px 0px black;
}
.search-focus:disabled {
}
.stab.unstable,
-.stab.internal,
.stab.deprecated,
.stab.portability {
- color: #c5c5c5;
+ color: #c5c5c5;
background: #314559 !important;
border-style: none !important;
border-radius: 4px;
}
#help > div {
- background: #14191f;
- box-shadow: 0px 6px 20px 0px black;
- border: none;
- border-radius: 4px;
+ background: #14191f;
+ box-shadow: 0px 6px 20px 0px black;
+ border: none;
+ border-radius: 4px;
}
.since {
color: #ff9011;
}
pre.rust .self {
- color: #36a3d9;
- font-style: italic;
+ color: #36a3d9;
+ font-style: italic;
}
pre.rust .attribute {
- color: #e6e1cf;
+ color: #e6e1cf;
}
pre.rust .attribute .ident, pre.rust .attribute .op {
- color: #e6e1cf;
+ color: #e6e1cf;
}
.example-wrap > pre.line-number {
}
a.test-arrow {
- font-size: 100%;
- color: #788797;
- border-radius: 4px;
- background-color: rgba(255, 255, 255, 0);
+ font-size: 100%;
+ color: #788797;
+ border-radius: 4px;
+ background-color: rgba(255, 255, 255, 0);
}
a.test-arrow:hover {
- background-color: rgba(242, 151, 24, 0.05);
- color: #ffb44c;
+ background-color: rgba(242, 151, 24, 0.05);
+ color: #ffb44c;
}
.toggle-label {
}
.tooltip .tooltiptext {
- background-color: #314559;
- color: #c5c5c5;
- border: 1px solid #5c6773;
+ background-color: #314559;
+ color: #c5c5c5;
+ border: 1px solid #5c6773;
}
.tooltip .tooltiptext::after {
border-color: transparent #314559 transparent transparent;
}
+.important-traits-tooltiptext {
+ background-color: #314559;
+ border-color: #5c6773;
+}
+
#titles > div.selected {
- background-color: #141920 !important;
+ background-color: #141920 !important;
border-bottom: 1px solid #ffb44c !important;
border-top: none;
}
}
#titles > div:hover {
- border-bottom: 1px solid rgba(242, 151, 24, 0.3);
+ border-bottom: 1px solid rgba(242, 151, 24, 0.3);
}
#titles > div > div.count {
/* rules that this theme does not need to set, here to satisfy the rule checker */
/* note that a lot of these are partially set in some way (meaning they are set
individually rather than as a group) */
-/* TODO: these rules should be at the bottom of the file but currently must be
+/* FIXME: these rules should be at the bottom of the file but currently must be
above the `@media (max-width: 700px)` rules due to a bug in the css checker */
/* see https://github.com/rust-lang/rust/pull/71237#issuecomment-618170143 */
.content .highlighted.mod, .content .highlighted.externcrate {}
.search-input:focus {}
-.content span.attr,.content a.attr,.block a.current.attr,.content span.derive,.content a.derive,.block a.current.derive,.content span.macro,.content a.macro,.block a.current.macro {}
+.content span.attr,.content a.attr,.block a.current.attr,.content span.derive,.content a.derive,
+.block a.current.derive,.content span.macro,.content a.macro,.block a.current.macro {}
.content .highlighted.trait {}
.content span.struct,.content a.struct,.block a.current.struct {}
#titles>div:hover,#titles>div.selected {}
.content .highlighted.fn,.content .highlighted.method,.content .highlighted.tymethod {}
h2,h3:not(.impl):not(.method):not(.type):not(.tymethod),h4:not(.method):not(.type):not(.tymethod) {}
.content span.enum,.content a.enum,.block a.current.enum {}
-.content span.constant,.content a.constant,.block a.current.constant,.content span.static,.content a.static,.block a.current.static {}
+.content span.constant,.content a.constant,.block a.current.constant,.content span.static,
+.content a.static,.block a.current.static {}
.content span.keyword,.content a.keyword,.block a.current.keyword {}
pre.rust .comment {}
.content .highlighted.enum {}
.content .highlighted.struct {}
.content .highlighted.keyword {}
.content span.traitalias,.content a.traitalias,.block a.current.traitalias {}
-.content span.fn,.content a.fn,.block a.current.fn,.content span.method,.content a.method,.block a.current.method,.content span.tymethod,.content a.tymethod,.block a.current.tymethod,.content .fnname {}
+.content span.fn,.content a.fn,.block a.current.fn,.content span.method,.content a.method,
+.block a.current.method,.content span.tymethod,.content a.tymethod,.block a.current.tymethod,
+.content .fnname {}
pre.rust .kw {}
-pre.rust .self,pre.rust .bool-val,pre.rust .prelude-val,pre.rust .attribute,pre.rust .attribute .ident {}
+pre.rust .self,pre.rust .bool-val,pre.rust .prelude-val,pre.rust .attribute,
+pre.rust .attribute .ident {}
.content span.foreigntype,.content a.foreigntype,.block a.current.foreigntype {}
pre.rust .doccomment {}
.stab.deprecated {}
.content .highlighted.type {}
pre.rust .kw-2,pre.rust .prelude-ty {}
.content span.trait,.content a.trait,.block a.current.trait {}
-.stab.internal {}
@media (max-width: 700px) {
.sidebar-menu {
#theme-picker, #settings-menu {
border-color: #5c6773;
- background-color: #0f1419;
+ background-color: #0f1419;
}
#theme-picker > img, #settings-menu > img {
- filter: invert(100);
+ filter: invert(100);
}
#theme-picker:hover, #theme-picker:focus,
color: #ddd;
}
-h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod),
+h4:not(.method):not(.type):not(.tymethod) {
color: #ddd;
}
h1.fqn {
border-bottom-color: #d2d2d2;
}
-h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h2, h3:not(.impl):not(.method):not(.type):not(.tymethod),
+h4:not(.method):not(.type):not(.tymethod) {
border-bottom-color: #d2d2d2;
}
color: #D2991D;
}
-.stab.internal a {
- color: #304FFE;
-}
-
a.test-arrow {
color: #dedede;
}
}
.stab.unstable { background: #FFF5D6; border-color: #FFC600; color: #2f2f2f; }
-.stab.internal { background: #FFB9B3; border-color: #B71C1C; color: #2f2f2f; }
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; color: #2f2f2f; }
.stab.portability { background: #C4ECFF; border-color: #7BA5DB; color: #2f2f2f; }
border-color: transparent black transparent transparent;
}
+.important-traits-tooltiptext {
+ background-color: #111;
+ border-color: #777;
+}
+
#titles > div:not(.selected) {
background-color: #252525;
border-top-color: #252525;
color: black;
}
-h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h1, h2, h3:not(.impl):not(.method):not(.type):not(.tymethod),
+h4:not(.method):not(.type):not(.tymethod) {
color: black;
}
h1.fqn {
border-bottom-color: #D5D5D5;
}
-h2, h3:not(.impl):not(.method):not(.type):not(.tymethod), h4:not(.method):not(.type):not(.tymethod) {
+h2, h3:not(.impl):not(.method):not(.type):not(.tymethod),
+h4:not(.method):not(.type):not(.tymethod) {
border-bottom-color: #DDDDDD;
}
color: #3873AD;
}
-.stab.internal a {
- color: #304FFE;
-}
-
a.test-arrow {
color: #f5f5f5;
}
}
.stab.unstable { background: #FFF5D6; border-color: #FFC600; }
-.stab.internal { background: #FFB9B3; border-color: #B71C1C; }
.stab.deprecated { background: #F3DFFF; border-color: #7F0087; }
.stab.portability { background: #C4ECFF; border-color: #7BA5DB; }
border-color: transparent black transparent transparent;
}
+.important-traits-tooltiptext {
+ background-color: #eee;
+ border-color: #999;
+}
+
#titles > div:not(.selected) {
background-color: #e6e6e6;
border-top-color: #e6e6e6;
#![recursion_limit = "256"]
extern crate env_logger;
+#[macro_use]
+extern crate lazy_static;
extern crate rustc_ast;
extern crate rustc_ast_pretty;
extern crate rustc_attr;
32_000_000 // 32MB on other platforms
};
rustc_driver::set_sigpipe_handler();
+ rustc_driver::install_ice_hook();
env_logger::init_from_env("RUSTDOC_LOG");
let res = std::thread::Builder::new()
.stack_size(thread_stack_size)
let mut ids = IdMap::new();
let error_codes = ErrorCodes::from(UnstableFeatures::from_environment().is_nightly_build());
let text = if !options.markdown_no_toc {
- MarkdownWithToc(text, &mut ids, error_codes, edition, &playground).to_string()
+ MarkdownWithToc(text, &mut ids, error_codes, edition, &playground).into_string()
} else {
- Markdown(text, &[], &mut ids, error_codes, edition, &playground).to_string()
+ Markdown(text, &[], &mut ids, error_codes, edition, &playground).into_string()
};
let err = write!(
Namespace::{self, *},
PerNS, Res,
};
-use rustc_hir::def_id::{DefId, LocalDefId};
+use rustc_hir::def_id::DefId;
use rustc_middle::ty;
use rustc_resolve::ParentScope;
use rustc_session::lint;
struct LinkCollector<'a, 'tcx> {
cx: &'a DocContext<'tcx>,
- mod_ids: Vec<hir::HirId>,
+ // NOTE: this may not necessarily be a module in the current crate
+ mod_ids: Vec<DefId>,
}
impl<'a, 'tcx> LinkCollector<'a, 'tcx> {
&self,
path_str: &str,
current_item: &Option<String>,
- module_id: LocalDefId,
+ module_id: DefId,
) -> Result<(Res, Option<String>), ErrorKind> {
let cx = self.cx;
}
/// Resolves a string as a macro.
- fn macro_resolve(&self, path_str: &str, parent_id: Option<hir::HirId>) -> Option<Res> {
+ fn macro_resolve(&self, path_str: &str, parent_id: Option<DefId>) -> Option<Res> {
let cx = self.cx;
let path = ast::Path::from_ident(Ident::from_str(path_str));
cx.enter_resolver(|resolver| {
if let Some(res) = resolver.all_macros().get(&Symbol::intern(path_str)) {
return Some(res.map_id(|_| panic!("unexpected id")));
}
- if let Some(module_id) = parent_id.or(self.mod_ids.last().cloned()) {
- let module_id = cx.tcx.hir().local_def_id(module_id);
+ if let Some(module_id) = parent_id {
if let Ok((_, res)) =
resolver.resolve_str_path_error(DUMMY_SP, path_str, MacroNS, module_id)
{
disambiguator: Option<&str>,
ns: Namespace,
current_item: &Option<String>,
- parent_id: Option<hir::HirId>,
+ parent_id: Option<DefId>,
extra_fragment: &Option<String>,
item_opt: Option<&Item>,
) -> Result<(Res, Option<String>), ErrorKind> {
let cx = self.cx;
// In case we're in a module, try to resolve the relative path.
- if let Some(module_id) = parent_id.or(self.mod_ids.last().cloned()) {
- let module_id = cx.tcx.hir().local_def_id(module_id);
+ if let Some(module_id) = parent_id {
let result = cx.enter_resolver(|resolver| {
resolver.resolve_str_path_error(DUMMY_SP, &path_str, ns, module_id)
});
impl<'a, 'tcx> DocFolder for LinkCollector<'a, 'tcx> {
fn fold_item(&mut self, mut item: Item) -> Option<Item> {
- let item_hir_id = if item.is_mod() {
- if let Some(def_id) = item.def_id.as_local() {
- Some(self.cx.tcx.hir().as_local_hir_id(def_id))
- } else {
- debug!("attempting to fold on a non-local item: {:?}", item);
- return self.fold_item_recur(item);
- }
- } else {
- None
- };
+ use rustc_middle::ty::DefIdTree;
- // FIXME: get the resolver to work with non-local resolve scopes.
- let parent_node = self.cx.as_local_hir_id(item.def_id).and_then(|hir_id| {
- // FIXME: this fails hard for impls in non-module scope, but is necessary for the
- // current `resolve()` implementation.
- match self.cx.as_local_hir_id(self.cx.tcx.parent_module(hir_id).to_def_id()).unwrap() {
- id if id != hir_id => Some(id),
- _ => None,
+ let parent_node = if item.is_fake() {
+ // FIXME: is this correct?
+ None
+ } else {
+ let mut current = item.def_id;
+ // The immediate parent might not always be a module.
+ // Find the first parent which is.
+ loop {
+ if let Some(parent) = self.cx.tcx.parent(current) {
+ if self.cx.tcx.def_kind(parent) == DefKind::Mod {
+ break Some(parent);
+ }
+ current = parent;
+ } else {
+ break None;
+ }
}
- });
+ };
if parent_node.is_some() {
- debug!("got parent node for {:?} {:?}, id {:?}", item.type_(), item.name, item.def_id);
+ trace!("got parent node for {:?} {:?}, id {:?}", item.type_(), item.name, item.def_id);
}
let current_item = match item.inner {
ModuleItem(..) => {
if item.attrs.inner_docs {
- if item_hir_id.unwrap() != hir::CRATE_HIR_ID { item.name.clone() } else { None }
+ if item.def_id.is_top_level_module() { item.name.clone() } else { None }
} else {
- match parent_node.or(self.mod_ids.last().cloned()) {
- Some(parent) if parent != hir::CRATE_HIR_ID => {
+ match parent_node.or(self.mod_ids.last().copied()) {
+ Some(parent) if !parent.is_top_level_module() => {
// FIXME: can we pull the parent module's name from elsewhere?
- Some(self.cx.tcx.hir().name(parent).to_string())
+ Some(self.cx.tcx.item_name(parent).to_string())
}
_ => None,
}
for_.def_id().map(|did| self.cx.tcx.item_name(did).to_string())
}
// we don't display docs on `extern crate` items anyway, so don't process them.
- ExternCrateItem(..) => return self.fold_item_recur(item),
+ ExternCrateItem(..) => {
+ debug!("ignoring extern crate item {:?}", item.def_id);
+ return self.fold_item_recur(item);
+ }
ImportItem(Import::Simple(ref name, ..)) => Some(name.clone()),
MacroItem(..) => None,
_ => item.name.clone(),
};
if item.is_mod() && item.attrs.inner_docs {
- self.mod_ids.push(item_hir_id.unwrap());
+ self.mod_ids.push(item.def_id);
}
let cx = self.cx;
let dox = item.attrs.collapsed_doc_value().unwrap_or_else(String::new);
+ trace!("got documentation '{}'", dox);
look_for_tests(&cx, &dox, &item, true);
});
for (ori_link, link_range) in markdown_links(&dox) {
+ trace!("considering link '{}'", ori_link);
+
// Bail early for real links.
if ori_link.contains('/') {
continue;
let (res, fragment) = {
let mut kind = None;
let mut disambiguator = None;
- path_str = if let Some(prefix) = ["struct@", "enum@", "type@", "trait@", "union@"]
- .iter()
- .find(|p| link.starts_with(**p))
+ path_str = if let Some(prefix) =
+ ["struct@", "enum@", "type@", "trait@", "union@", "module@", "mod@"]
+ .iter()
+ .find(|p| link.starts_with(**p))
{
kind = Some(TypeNS);
disambiguator = Some(&prefix[..prefix.len() - 1]);
link.trim_start_matches(prefix)
- } else if let Some(prefix) = [
- "const@",
- "static@",
- "value@",
- "function@",
- "mod@",
- "fn@",
- "module@",
- "method@",
- ]
- .iter()
- .find(|p| link.starts_with(**p))
+ } else if let Some(prefix) =
+ ["const@", "static@", "value@", "function@", "fn@", "method@"]
+ .iter()
+ .find(|p| link.starts_with(**p))
{
kind = Some(ValueNS);
disambiguator = Some(&prefix[..prefix.len() - 1]);
// we've already pushed this node onto the resolution stack but
// for outer comments we explicitly try and resolve against the
// parent_node first.
- let base_node =
- if item.is_mod() && item.attrs.inner_docs { None } else { parent_node };
+ let base_node = if item.is_mod() && item.attrs.inner_docs {
+ self.mod_ids.last().copied()
+ } else {
+ parent_node
+ };
// replace `Self` with suitable item's parent name
if path_str.starts_with("Self::") {
}
if item.is_mod() && !item.attrs.inner_docs {
- self.mod_ids.push(item_hir_id.unwrap());
+ self.mod_ids.push(item.def_id);
}
if item.is_mod() {
Some(hir_id) => hir_id,
None => {
// If non-local, no need to check anything.
+ info!("ignoring warning from parent crate: {}", err_msg);
return;
}
};
pub fn run(options: Options) -> Result<(), String> {
let input = config::Input::File(options.input.clone());
- let invalid_codeblock_attribute_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name;
+ let invalid_codeblock_attributes_name = rustc_lint::builtin::INVALID_CODEBLOCK_ATTRIBUTES.name;
// In addition to those specific lints, we also need to allow those given through
// command line, otherwise they'll get ignored and we don't want that.
- let allowed_lints = vec![invalid_codeblock_attribute_name.to_owned()];
+ let allowed_lints = vec![invalid_codeblock_attributes_name.to_owned()];
let (lint_opts, lint_caps) = init_lints(allowed_lints, options.lint_opts.clone(), |lint| {
- if lint.name == invalid_codeblock_attribute_name {
+ if lint.name == invalid_codeblock_attributes_name {
None
} else {
Some((lint.name_lower(), lint::Allow))
if !res_did.is_local() && !is_no_inline {
let attrs = clean::inline::load_attrs(self.cx, res_did);
let self_is_hidden = attrs.lists(sym::doc).has_word(sym::hidden);
- match res {
- Res::Def(
- DefKind::Trait
- | DefKind::Struct
- | DefKind::Union
- | DefKind::Enum
- | DefKind::ForeignTy
- | DefKind::TyAlias,
- did,
- ) if !self_is_hidden => {
- self.cx.renderinfo.get_mut().access_levels.map.insert(did, AccessLevel::Public);
- }
- Res::Def(DefKind::Mod, did) => {
- if !self_is_hidden {
- crate::visit_lib::LibEmbargoVisitor::new(self.cx).visit_mod(did);
+ if !self_is_hidden {
+ if let Res::Def(kind, did) = res {
+ if kind == DefKind::Mod {
+ crate::visit_lib::LibEmbargoVisitor::new(self.cx).visit_mod(did)
+ } else {
+ // All items need to be handled here in case someone wishes to link
+ // to them with intra-doc links
+ self.cx
+ .renderinfo
+ .get_mut()
+ .access_levels
+ .map
+ .insert(did, AccessLevel::Public);
}
}
- _ => {}
}
-
return false;
}
unwind = { path = "../libunwind" }
hashbrown = { version = "0.6.2", default-features = false, features = ['rustc-dep-of-std'] }
-[dependencies.backtrace_rs]
-package = "backtrace"
-version = "0.3.46"
-default-features = false # without the libstd `backtrace` feature, stub out everything
-features = [ "rustc-dep-of-std" ] # enable build support for integrating into libstd
+# Dependencies of the `backtrace` crate
+addr2line = { version = "0.13.0", optional = true, default-features = false }
+rustc-demangle = { version = "0.1.4", features = ['rustc-dep-of-std'] }
+miniz_oxide = { version = "0.4.0", optional = true, default-features = false }
+[dependencies.object]
+version = "0.20"
+optional = true
+default-features = false
+features = ['read_core', 'elf', 'macho', 'pe']
[dev-dependencies]
rand = "0.7"
wasi = { version = "0.9.0", features = ['rustc-dep-of-std'], default-features = false }
[features]
-default = ["std_detect_file_io", "std_detect_dlsym_getauxval", "panic-unwind"]
-
backtrace = [
- "backtrace_rs/dbghelp", # backtrace/symbolize on MSVC
- "backtrace_rs/libbacktrace", # symbolize on most platforms
- "backtrace_rs/libunwind", # backtrace on most platforms
- "backtrace_rs/dladdr", # symbolize on platforms w/o libbacktrace
+ "gimli-symbolize",
+ 'addr2line/rustc-dep-of-std',
+ 'object/rustc-dep-of-std',
+ 'miniz_oxide/rustc-dep-of-std',
]
+gimli-symbolize = []
panic-unwind = ["panic_unwind"]
profiler = ["profiler_builtins"]
//! The `#[global_allocator]` can only be used once in a crate
//! or its recursive dependencies.
+#![deny(unsafe_op_in_unsafe_fn)]
#![stable(feature = "alloc_module", since = "1.28.0")]
use core::intrinsics;
#[inline]
unsafe fn dealloc(&mut self, ptr: NonNull<u8>, layout: Layout) {
if layout.size() != 0 {
- GlobalAlloc::dealloc(self, ptr.as_ptr(), layout)
+ // SAFETY: The safety guarantees are explained in the documentation
+ // for the `GlobalAlloc` trait and its `dealloc` method.
+ unsafe { GlobalAlloc::dealloc(self, ptr.as_ptr(), layout) }
}
}
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if layout.size() == 0 => {
- let new_layout = Layout::from_size_align_unchecked(new_size, layout.align());
+ let new_layout =
+ // SAFETY: The new size and layout alignement guarantees
+ // are transfered to the caller (they come from parameters).
+ //
+ // See the preconditions for `Layout::from_size_align` to
+ // see what must be checked.
+ unsafe { Layout::from_size_align_unchecked(new_size, layout.align()) };
self.alloc(new_layout, init)
}
ReallocPlacement::MayMove => {
- // `realloc` probably checks for `new_size > size` or something similar.
- intrinsics::assume(new_size > size);
- let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
- let memory =
- MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
- init.init_offset(memory, size);
+ // SAFETY:
+ //
+ // The safety guarantees are explained in the documentation
+ // for the `GlobalAlloc` trait and its `dealloc` method.
+ //
+ // `realloc` probably checks for `new_size > size` or something
+ // similar.
+ //
+ // For the guarantees about `init_offset`, see its documentation:
+ // `ptr` is assumed valid (and checked for non-NUL) and
+ // `memory.size` is set to `new_size` so the offset being `size`
+ // is valid.
+ let memory = unsafe {
+ intrinsics::assume(new_size > size);
+ let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
+ let memory =
+ MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size };
+ init.init_offset(memory, size);
+ memory
+ };
Ok(memory)
}
}
match placement {
ReallocPlacement::InPlace => Err(AllocErr),
ReallocPlacement::MayMove if new_size == 0 => {
- self.dealloc(ptr, layout);
+ // SAFETY: see `GlobalAlloc::dealloc` for the guarantees that
+ // must be respected. `ptr` and `layout` are parameters and so
+ // those guarantees must be checked by the caller.
+ unsafe { self.dealloc(ptr, layout) };
Ok(MemoryBlock { ptr: layout.dangling(), size: 0 })
}
ReallocPlacement::MayMove => {
- // `realloc` probably checks for `new_size < size` or something similar.
- intrinsics::assume(new_size < size);
- let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
- Ok(MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size })
+ // SAFETY:
+ //
+ // See `GlobalAlloc::realloc` for more informations about the
+ // guarantees expected by this method. `ptr`, `layout` and
+ // `new_size` are parameters and the responsability for their
+ // correctness is left to the caller.
+ //
+ // `realloc` probably checks for `new_size < size` or something
+ // similar.
+ let memory = unsafe {
+ intrinsics::assume(new_size < size);
+ let ptr = GlobalAlloc::realloc(self, ptr.as_ptr(), layout, new_size);
+ MemoryBlock { ptr: NonNull::new(ptr).ok_or(AllocErr)?, size: new_size }
+ };
+ Ok(memory)
}
}
}
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rdl_alloc(size: usize, align: usize) -> *mut u8 {
- let layout = Layout::from_size_align_unchecked(size, align);
- System.alloc(layout)
+ // SAFETY: see the guarantees expected by `Layout::from_size_align` and
+ // `GlobalAlloc::alloc`.
+ unsafe {
+ let layout = Layout::from_size_align_unchecked(size, align);
+ System.alloc(layout)
+ }
}
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rdl_dealloc(ptr: *mut u8, size: usize, align: usize) {
- System.dealloc(ptr, Layout::from_size_align_unchecked(size, align))
+ // SAFETY: see the guarantees expected by `Layout::from_size_align` and
+ // `GlobalAlloc::dealloc`.
+ unsafe { System.dealloc(ptr, Layout::from_size_align_unchecked(size, align)) }
}
#[rustc_std_internal_symbol]
align: usize,
new_size: usize,
) -> *mut u8 {
- let old_layout = Layout::from_size_align_unchecked(old_size, align);
- System.realloc(ptr, old_layout, new_size)
+ // SAFETY: see the guarantees expected by `Layout::from_size_align` and
+ // `GlobalAlloc::realloc`.
+ unsafe {
+ let old_layout = Layout::from_size_align_unchecked(old_size, align);
+ System.realloc(ptr, old_layout, new_size)
+ }
}
#[rustc_std_internal_symbol]
pub unsafe extern "C" fn __rdl_alloc_zeroed(size: usize, align: usize) -> *mut u8 {
- let layout = Layout::from_size_align_unchecked(size, align);
- System.alloc_zeroed(layout)
+ // SAFETY: see the guarantees expected by `Layout::from_size_align` and
+ // `GlobalAlloc::alloc_zeroed`.
+ unsafe {
+ let layout = Layout::from_size_align_unchecked(size, align);
+ System.alloc_zeroed(layout)
+ }
}
}
// `Backtrace`, but that's a relatively small price to pay relative to capturing
// a backtrace or actually symbolizing it.
+use crate::backtrace_rs::{self, BytesOrWideString};
use crate::env;
use crate::ffi::c_void;
use crate::fmt;
use crate::sync::Mutex;
use crate::sys_common::backtrace::{lock, output_filename};
use crate::vec::Vec;
-use backtrace::BytesOrWideString;
-use backtrace_rs as backtrace;
/// A captured OS thread stack backtrace.
///
}
enum RawFrame {
- Actual(backtrace::Frame),
+ Actual(backtrace_rs::Frame),
#[cfg(test)]
Fake,
}
fn fmt(&self, fmt: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(fmt, "{{ ")?;
- if let Some(fn_name) = self.name.as_ref().map(|b| backtrace::SymbolName::new(b)) {
+ if let Some(fn_name) = self.name.as_ref().map(|b| backtrace_rs::SymbolName::new(b)) {
write!(fmt, "fn: \"{:#}\"", fn_name)?;
} else {
write!(fmt, "fn: <unknown>")?;
BytesOrWide::Bytes(w) => BytesOrWideString::Bytes(w),
BytesOrWide::Wide(w) => BytesOrWideString::Wide(w),
},
- backtrace::PrintFmt::Short,
+ backtrace_rs::PrintFmt::Short,
crate::env::current_dir().as_ref().ok(),
)
}
let mut frames = Vec::new();
let mut actual_start = None;
unsafe {
- backtrace::trace_unsynchronized(|frame| {
+ backtrace_rs::trace_unsynchronized(|frame| {
frames.push(BacktraceFrame {
frame: RawFrame::Actual(frame.clone()),
symbols: Vec::new(),
let full = fmt.alternate();
let (frames, style) = if full {
- (&capture.frames[..], backtrace::PrintFmt::Full)
+ (&capture.frames[..], backtrace_rs::PrintFmt::Full)
} else {
- (&capture.frames[capture.actual_start..], backtrace::PrintFmt::Short)
+ (&capture.frames[capture.actual_start..], backtrace_rs::PrintFmt::Short)
};
// When printing paths we try to strip the cwd if it exists, otherwise
output_filename(fmt, path, style, cwd.as_ref().ok())
};
- let mut f = backtrace::BacktraceFmt::new(fmt, style, &mut print_path);
+ let mut f = backtrace_rs::BacktraceFmt::new(fmt, style, &mut print_path);
f.add_context()?;
for frame in frames {
let mut f = f.frame();
for symbol in frame.symbols.iter() {
f.print_raw(
frame.frame.ip(),
- symbol.name.as_ref().map(|b| backtrace::SymbolName::new(b)),
+ symbol.name.as_ref().map(|b| backtrace_rs::SymbolName::new(b)),
symbol.filename.as_ref().map(|b| match b {
BytesOrWide::Bytes(w) => BytesOrWideString::Bytes(w),
BytesOrWide::Wide(w) => BytesOrWideString::Wide(w),
RawFrame::Fake => unimplemented!(),
};
unsafe {
- backtrace::resolve_frame_unsynchronized(frame, |symbol| {
+ backtrace_rs::resolve_frame_unsynchronized(frame, |symbol| {
symbols.push(BacktraceSymbol {
name: symbol.name().map(|m| m.as_bytes().to_vec()),
filename: symbol.filename_raw().map(|b| match b {
}
println!("cargo:rustc-link-lib=c");
println!("cargo:rustc-link-lib=compiler_rt");
+ } else if (target.contains("sgx") && target.contains("fortanix"))
+ || target.contains("hermit")
+ || target.contains("l4re")
+ || target.contains("redox")
+ || target.contains("haiku")
+ || target.contains("vxworks")
+ || target.contains("wasm32")
+ || target.contains("asmjs")
+ {
+ // These platforms don't have any special requirements.
+ } else {
+ // This is for Cargo's build-std support, to mark std as unstable for
+ // typically no_std platforms.
+ // This covers:
+ // - os=none ("bare metal" targets)
+ // - mipsel-sony-psp
+ // - nvptx64-nvidia-cuda
+ // - avr-unknown-unknown
+ // - tvos (aarch64-apple-tvos, x86_64-apple-tvos)
+ // - uefi (x86_64-unknown-uefi, i686-unknown-uefi)
+ // - JSON targets
+ // - Any new targets that have not been explicitly added above.
+ println!("cargo:rustc-cfg=feature=\"restricted-std\"");
}
+ println!("cargo:rustc-env=STD_ENV_ARCH={}", env::var("CARGO_CFG_TARGET_ARCH").unwrap());
+ println!("cargo:rustc-cfg=backtrace_in_libstd");
}
/// The easiest way to use `HashMap` with a custom key type is to derive [`Eq`] and [`Hash`].
/// We must also derive [`PartialEq`].
///
-/// [`Eq`]: ../../std/cmp/trait.Eq.html
-/// [`Hash`]: ../../std/hash/trait.Hash.html
-/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`default`]: #method.default
-/// [`with_hasher`]: #method.with_hasher
-/// [`with_capacity_and_hasher`]: #method.with_capacity_and_hasher
+/// [`RefCell`]: crate::cell::RefCell
+/// [`Cell`]: crate::cell::Cell
+/// [`default`]: Default::default
+/// [`with_hasher`]: Self::with_hasher
+/// [`with_capacity_and_hasher`]: Self::with_capacity_and_hasher
/// [`fnv`]: https://crates.io/crates/fnv
///
/// ```
/// let mut map = HashMap::with_hasher(s);
/// map.insert(1, 2);
/// ```
- ///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hash_builder: S) -> HashMap<K, V, S> {
/// let mut map = HashMap::with_capacity_and_hasher(10, s);
/// map.insert(1, 2);
/// ```
- ///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hash_builder: S) -> HashMap<K, V, S> {
/// Returns a reference to the map's [`BuildHasher`].
///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
- ///
/// # Examples
///
/// ```
///
/// Panics if the new allocation size overflows [`usize`].
///
- /// [`usize`]: ../../std/primitive.usize.html
- ///
/// # Examples
///
/// ```
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
/// types that can be `==` without being identical. See the [module-level
/// documentation] for more.
///
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
- /// [module-level documentation]: index.html#insert-and-complex-keys
+ /// [module-level documentation]: crate::collections#insert-and-complex-keys
///
/// # Examples
///
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
/// [`Hash`] and [`Eq`] on the borrowed form *must* match those for
/// the key type.
///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
- ///
/// # Examples
///
/// ```
/// This `struct` is created by the [`iter`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`iter`]: struct.HashMap.html#method.iter
-/// [`HashMap`]: struct.HashMap.html
+/// [`iter`]: HashMap::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a, V: 'a> {
base: base::Iter<'a, K, V>,
/// This `struct` is created by the [`iter_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`iter_mut`]: struct.HashMap.html#method.iter_mut
-/// [`HashMap`]: struct.HashMap.html
+/// [`iter_mut`]: HashMap::iter_mut
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IterMut<'a, K: 'a, V: 'a> {
base: base::IterMut<'a, K, V>,
/// This `struct` is created by the [`into_iter`] method on [`HashMap`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
-/// [`into_iter`]: struct.HashMap.html#method.into_iter
-/// [`HashMap`]: struct.HashMap.html
+/// [`into_iter`]: IntoIterator::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K, V> {
base: base::IntoIter<K, V>,
/// This `struct` is created by the [`keys`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`keys`]: struct.HashMap.html#method.keys
-/// [`HashMap`]: struct.HashMap.html
+/// [`keys`]: HashMap::keys
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Keys<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
/// This `struct` is created by the [`values`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`values`]: struct.HashMap.html#method.values
-/// [`HashMap`]: struct.HashMap.html
+/// [`values`]: HashMap::values
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Values<'a, K: 'a, V: 'a> {
inner: Iter<'a, K, V>,
/// This `struct` is created by the [`drain`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`drain`]: struct.HashMap.html#method.drain
-/// [`HashMap`]: struct.HashMap.html
+/// [`drain`]: HashMap::drain
#[stable(feature = "drain", since = "1.6.0")]
pub struct Drain<'a, K: 'a, V: 'a> {
base: base::Drain<'a, K, V>,
/// This `struct` is created by the [`values_mut`] method on [`HashMap`]. See its
/// documentation for more.
///
-/// [`values_mut`]: struct.HashMap.html#method.values_mut
-/// [`HashMap`]: struct.HashMap.html
+/// [`values_mut`]: HashMap::values_mut
#[stable(feature = "map_values_mut", since = "1.10.0")]
pub struct ValuesMut<'a, K: 'a, V: 'a> {
inner: IterMut<'a, K, V>,
///
/// See the [`HashMap::raw_entry_mut`] docs for usage examples.
///
-/// [`HashMap::raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+/// [`HashMap::raw_entry_mut`]: HashMap::raw_entry_mut
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawEntryBuilderMut<'a, K: 'a, V: 'a, S: 'a> {
/// This `enum` is constructed through the [`raw_entry_mut`] method on [`HashMap`],
/// then calling one of the methods of that [`RawEntryBuilderMut`].
///
-/// [`HashMap`]: struct.HashMap.html
/// [`Entry`]: enum.Entry.html
-/// [`raw_entry_mut`]: struct.HashMap.html#method.raw_entry_mut
+/// [`raw_entry_mut`]: HashMap::raw_entry_mut
/// [`RawEntryBuilderMut`]: struct.RawEntryBuilderMut.html
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub enum RawEntryMut<'a, K: 'a, V: 'a, S: 'a> {
/// A view into an occupied entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
-///
-/// [`RawEntryMut`]: enum.RawEntryMut.html
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawOccupiedEntryMut<'a, K: 'a, V: 'a> {
base: base::RawOccupiedEntryMut<'a, K, V>,
/// A view into a vacant entry in a `HashMap`.
/// It is part of the [`RawEntryMut`] enum.
-///
-/// [`RawEntryMut`]: enum.RawEntryMut.html
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawVacantEntryMut<'a, K: 'a, V: 'a, S: 'a> {
base: base::RawVacantEntryMut<'a, K, V, S>,
///
/// See the [`HashMap::raw_entry`] docs for usage examples.
///
-/// [`HashMap::raw_entry`]: struct.HashMap.html#method.raw_entry
+/// [`HashMap::raw_entry`]: HashMap::raw_entry
#[unstable(feature = "hash_raw_entry", issue = "56167")]
pub struct RawEntryBuilder<'a, K: 'a, V: 'a, S: 'a> {
map: &'a HashMap<K, V, S>,
///
/// This `enum` is constructed from the [`entry`] method on [`HashMap`].
///
-/// [`HashMap`]: struct.HashMap.html
-/// [`entry`]: struct.HashMap.html#method.entry
+/// [`entry`]: HashMap::entry
#[stable(feature = "rust1", since = "1.0.0")]
pub enum Entry<'a, K: 'a, V: 'a> {
/// An occupied entry.
/// If you need a reference to the `OccupiedEntry` which may outlive the
/// destruction of the `Entry` value, see [`into_mut`].
///
- /// [`into_mut`]: #method.into_mut
+ /// [`into_mut`]: Self::into_mut
///
/// # Examples
///
///
/// If you need multiple references to the `OccupiedEntry`, see [`get_mut`].
///
- /// [`get_mut`]: #method.get_mut
+ /// [`get_mut`]: Self::get_mut
///
/// # Examples
///
/// [`Hasher`], but the hashers created by two different `RandomState`
/// instances are unlikely to produce the same result for the same values.
///
-/// [`HashMap`]: struct.HashMap.html
-/// [`Hasher`]: ../../hash/trait.Hasher.html
-///
/// # Examples
///
/// ```
///
/// The internal algorithm is not specified, and so it and its hashes should
/// not be relied upon over releases.
-///
-/// [`RandomState`]: struct.RandomState.html
-/// [`Hasher`]: ../../hash/trait.Hasher.html
#[stable(feature = "hashmap_default_hasher", since = "1.13.0")]
#[allow(deprecated)]
#[derive(Clone, Debug)]
/// // use the values stored in the set
/// ```
///
-/// [`Cell`]: ../../std/cell/struct.Cell.html
-/// [`Eq`]: ../../std/cmp/trait.Eq.html
-/// [`Hash`]: ../../std/hash/trait.Hash.html
-/// [`HashMap`]: struct.HashMap.html
-/// [`PartialEq`]: ../../std/cmp/trait.PartialEq.html
-/// [`RefCell`]: ../../std/cell/struct.RefCell.html
+/// [`RefCell`]: crate::cell::RefCell
+/// [`Cell`]: crate::cell::Cell
#[derive(Clone)]
#[cfg_attr(not(test), rustc_diagnostic_item = "hashset_type")]
#[stable(feature = "rust1", since = "1.0.0")]
/// let mut set = HashSet::with_hasher(s);
/// set.insert(2);
/// ```
- ///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_hasher(hasher: S) -> HashSet<T, S> {
/// let mut set = HashSet::with_capacity_and_hasher(10, s);
/// set.insert(1);
/// ```
- ///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
#[inline]
#[stable(feature = "hashmap_build_hasher", since = "1.7.0")]
pub fn with_capacity_and_hasher(capacity: usize, hasher: S) -> HashSet<T, S> {
/// Returns a reference to the set's [`BuildHasher`].
///
- /// [`BuildHasher`]: ../../std/hash/trait.BuildHasher.html
- ///
/// # Examples
///
/// ```
/// assert_eq!(set.contains(&1), true);
/// assert_eq!(set.contains(&4), false);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn contains<Q: ?Sized>(&self, value: &Q) -> bool
/// assert_eq!(set.get(&2), Some(&2));
/// assert_eq!(set.get(&4), None);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn get<Q: ?Sized>(&self, value: &Q) -> Option<&T>
/// assert_eq!(set.remove(&2), true);
/// assert_eq!(set.remove(&2), false);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "rust1", since = "1.0.0")]
pub fn remove<Q: ?Sized>(&mut self, value: &Q) -> bool
/// assert_eq!(set.take(&2), Some(2));
/// assert_eq!(set.take(&2), None);
/// ```
- ///
- /// [`Eq`]: ../../std/cmp/trait.Eq.html
- /// [`Hash`]: ../../std/hash/trait.Hash.html
#[inline]
#[stable(feature = "set_recovery", since = "1.9.0")]
pub fn take<Q: ?Sized>(&mut self, value: &Q) -> Option<T>
/// This `struct` is created by the [`iter`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`iter`]: struct.HashSet.html#method.iter
+/// [`iter`]: HashSet::iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Iter<'a, K: 'a> {
iter: Keys<'a, K, ()>,
/// This `struct` is created by the [`into_iter`] method on [`HashSet`]
/// (provided by the `IntoIterator` trait). See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`into_iter`]: struct.HashSet.html#method.into_iter
+/// [`into_iter`]: IntoIterator::into_iter
#[stable(feature = "rust1", since = "1.0.0")]
pub struct IntoIter<K> {
iter: map::IntoIter<K, ()>,
/// This `struct` is created by the [`drain`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`drain`]: struct.HashSet.html#method.drain
+/// [`drain`]: HashSet::drain
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Drain<'a, K: 'a> {
iter: map::Drain<'a, K, ()>,
/// This `struct` is created by the [`intersection`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`intersection`]: struct.HashSet.html#method.intersection
+/// [`intersection`]: HashSet::intersection
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Intersection<'a, T: 'a, S: 'a> {
// iterator of the first set
/// This `struct` is created by the [`difference`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`difference`]: struct.HashSet.html#method.difference
+/// [`difference`]: HashSet::difference
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Difference<'a, T: 'a, S: 'a> {
// iterator of the first set
/// This `struct` is created by the [`symmetric_difference`] method on
/// [`HashSet`]. See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`symmetric_difference`]: struct.HashSet.html#method.symmetric_difference
+/// [`symmetric_difference`]: HashSet::symmetric_difference
#[stable(feature = "rust1", since = "1.0.0")]
pub struct SymmetricDifference<'a, T: 'a, S: 'a> {
iter: Chain<Difference<'a, T, S>, Difference<'a, T, S>>,
/// This `struct` is created by the [`union`] method on [`HashSet`].
/// See its documentation for more.
///
-/// [`HashSet`]: struct.HashSet.html
-/// [`union`]: struct.HashSet.html#method.union
+/// [`union`]: HashSet::union
#[stable(feature = "rust1", since = "1.0.0")]
pub struct Union<'a, T: 'a, S: 'a> {
iter: Chain<Iter<'a, T>, Difference<'a, T, S>>,
//! cost are suffixed with a `~`.
//!
//! All amortized costs are for the potential need to resize when capacity is
-//! exhausted. If a resize occurs it will take O(n) time. Our collections never
+//! exhausted. If a resize occurs it will take *O*(*n*) time. Our collections never
//! automatically shrink, so removal operations aren't amortized. Over a
//! sufficiently large series of operations, the average cost per operation will
//! deterministically equal the given cost.
/// - s390x
/// - sparc64
#[stable(feature = "env", since = "1.0.0")]
- pub const ARCH: &str = super::arch::ARCH;
+ pub const ARCH: &str = env!("STD_ENV_ARCH");
/// The family of the operating system. Example value is `unix`.
///
pub const EXE_EXTENSION: &str = os::EXE_EXTENSION;
}
-#[cfg(target_arch = "x86")]
-mod arch {
- pub const ARCH: &str = "x86";
-}
-
-#[cfg(target_arch = "x86_64")]
-mod arch {
- pub const ARCH: &str = "x86_64";
-}
-
-#[cfg(target_arch = "arm")]
-mod arch {
- pub const ARCH: &str = "arm";
-}
-
-#[cfg(target_arch = "aarch64")]
-mod arch {
- pub const ARCH: &str = "aarch64";
-}
-
-#[cfg(target_arch = "mips")]
-mod arch {
- pub const ARCH: &str = "mips";
-}
-
-#[cfg(target_arch = "mips64")]
-mod arch {
- pub const ARCH: &str = "mips64";
-}
-
-#[cfg(target_arch = "powerpc")]
-mod arch {
- pub const ARCH: &str = "powerpc";
-}
-
-#[cfg(target_arch = "powerpc64")]
-mod arch {
- pub const ARCH: &str = "powerpc64";
-}
-
-#[cfg(target_arch = "s390x")]
-mod arch {
- pub const ARCH: &str = "s390x";
-}
-
-#[cfg(target_arch = "sparc64")]
-mod arch {
- pub const ARCH: &str = "sparc64";
-}
-
-#[cfg(target_arch = "le32")]
-mod arch {
- pub const ARCH: &str = "le32";
-}
-
-#[cfg(target_arch = "asmjs")]
-mod arch {
- pub const ARCH: &str = "asmjs";
-}
-
-#[cfg(target_arch = "wasm32")]
-mod arch {
- pub const ARCH: &str = "wasm32";
-}
-
-#[cfg(target_arch = "hexagon")]
-mod arch {
- pub const ARCH: &'static str = "hexagon";
-}
-
-#[cfg(target_arch = "riscv64")]
-mod arch {
- pub const ARCH: &'static str = "riscv64";
-}
-
#[cfg(test)]
mod tests {
use super::*;
}
}
+#[stable(feature = "cstr_range_from", since = "1.47.0")]
+impl ops::Index<ops::RangeFrom<usize>> for CStr {
+ type Output = CStr;
+
+ fn index(&self, index: ops::RangeFrom<usize>) -> &CStr {
+ let bytes = self.to_bytes_with_nul();
+ // we need to manually check the starting index to account for the null
+ // byte, since otherwise we could get an empty string that doesn't end
+ // in a null.
+ if index.start < bytes.len() {
+ unsafe { CStr::from_bytes_with_nul_unchecked(&bytes[index.start..]) }
+ } else {
+ panic!(
+ "index out of bounds: the len is {} but the index is {}",
+ bytes.len(),
+ index.start
+ );
+ }
+ }
+}
+
#[stable(feature = "cstring_asref", since = "1.7.0")]
impl AsRef<CStr> for CStr {
#[inline]
assert_eq!(CSTR.to_str().unwrap(), "Hello, world!");
}
+
+ #[test]
+ fn cstr_index_from() {
+ let original = b"Hello, world!\0";
+ let cstr = CStr::from_bytes_with_nul(original).unwrap();
+ let result = CStr::from_bytes_with_nul(&original[7..]).unwrap();
+
+ assert_eq!(&cstr[7..], result);
+ }
+
+ #[test]
+ #[should_panic]
+ fn cstr_index_from_empty() {
+ let original = b"Hello, world!\0";
+ let cstr = CStr::from_bytes_with_nul(original).unwrap();
+ let _ = &cstr[original.len()..];
+ }
}
//! contract. The implementation of many of these functions are subject to change over
//! time and may call fewer or more syscalls/library functions.
//!
-//! [`Read`]: trait.Read.html
-//! [`Write`]: trait.Write.html
-//! [`Seek`]: trait.Seek.html
-//! [`BufRead`]: trait.BufRead.html
-//! [`File`]: ../fs/struct.File.html
-//! [`TcpStream`]: ../net/struct.TcpStream.html
-//! [`Vec<T>`]: ../vec/struct.Vec.html
-//! [`BufReader`]: struct.BufReader.html
-//! [`BufWriter`]: struct.BufWriter.html
-//! [`Write::write`]: trait.Write.html#tymethod.write
-//! [`io::stdout`]: fn.stdout.html
-//! [`println!`]: ../macro.println.html
-//! [`Lines`]: struct.Lines.html
-//! [`io::Result`]: type.Result.html
+//! [`File`]: crate::fs::File
+//! [`TcpStream`]: crate::net::TcpStream
+//! [`Vec<T>`]: crate::vec::Vec
+//! [`io::stdout`]: stdout
+//! [`io::Result`]: crate::io::Result
//! [`?` operator]: ../../book/appendix-02-operators.html
-//! [`Read::read`]: trait.Read.html#tymethod.read
-//! [`Result`]: ../result/enum.Result.html
-//! [`.unwrap()`]: ../result/enum.Result.html#method.unwrap
-// ignore-tidy-filelength
+//! [`Result`]: crate::result::Result
+//! [`.unwrap()`]: crate::result::Result::unwrap
#![stable(feature = "rust1", since = "1.0.0")]
/// }
/// ```
///
-/// [`read()`]: trait.Read.html#tymethod.read
-/// [`std::io`]: ../../std/io/index.html
-/// [`File`]: ../fs/struct.File.html
-/// [`BufRead`]: trait.BufRead.html
-/// [`BufReader`]: struct.BufReader.html
-/// [`&str`]: ../../std/primitive.str.html
+/// [`read()`]: Read::read
+/// [`&str`]: str
+/// [`std::io`]: self
+/// [`File`]: crate::fs::File
/// [slice]: ../../std/primitive.slice.html
#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(spotlight)]
pub trait Read {
/// Pull some bytes from this source into the specified buffer, returning
/// how many bytes were read.
/// before calling `read`. Calling `read` with an uninitialized `buf` (of the kind one
/// obtains via [`MaybeUninit<T>`]) is not safe, and can lead to undefined behavior.
///
- /// [`MaybeUninit<T>`]: ../mem/union.MaybeUninit.html
+ /// [`MaybeUninit<T>`]: crate::mem::MaybeUninit
///
/// # Errors
///
///
/// [`File`]s implement `Read`:
///
- /// [`Err`]: ../../std/result/enum.Result.html#variant.Err
- /// [`Ok(n)`]: ../../std/result/enum.Result.html#variant.Ok
- /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
- /// [`File`]: ../fs/struct.File.html
+ /// [`Ok(n)`]: Ok
+ /// [`File`]: crate::fs::File
///
/// ```no_run
/// use std::io;
/// This method is unsafe because a `Read`er could otherwise return a
/// non-zeroing `Initializer` from another `Read` type without an `unsafe`
/// block.
- ///
- /// [`Initializer::nop()`]: ../../std/io/struct.Initializer.html#method.nop
- /// [`Initializer`]: ../../std/io/struct.Initializer.html
#[unstable(feature = "read_initializer", issue = "42788")]
#[inline]
unsafe fn initializer(&self) -> Initializer {
///
/// [`File`]s implement `Read`:
///
- /// [`read()`]: trait.Read.html#tymethod.read
- /// [`Ok(0)`]: ../../std/result/enum.Result.html#variant.Ok
- /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
- /// [`File`]: ../fs/struct.File.html
+ /// [`read()`]: Read::read
+ /// [`Ok(0)`]: Ok
+ /// [`File`]: crate::fs::File
///
/// ```no_run
/// use std::io;
/// (See also the [`std::fs::read`] convenience function for reading from a
/// file.)
///
- /// [`std::fs::read`]: ../fs/fn.read.html
+ /// [`std::fs::read`]: crate::fs::read
#[stable(feature = "rust1", since = "1.0.0")]
fn read_to_end(&mut self, buf: &mut Vec<u8>) -> Result<usize> {
read_to_end(self, buf)
///
/// See [`read_to_end`][readtoend] for other error semantics.
///
- /// [readtoend]: #method.read_to_end
+ /// [readtoend]: Self::read_to_end
///
/// # Examples
///
/// [`File`][file]s implement `Read`:
///
- /// [file]: ../fs/struct.File.html
+ /// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
/// (See also the [`std::fs::read_to_string`] convenience function for
/// reading from a file.)
///
- /// [`std::fs::read_to_string`]: ../fs/fn.read_to_string.html
+ /// [`std::fs::read_to_string`]: crate::fs::read_to_string
#[stable(feature = "rust1", since = "1.0.0")]
fn read_to_string(&mut self, buf: &mut String) -> Result<usize> {
// Note that we do *not* call `.read_to_end()` here. We are passing
/// No guarantees are provided about the contents of `buf` when this
/// function is called, implementations cannot rely on any property of the
/// contents of `buf` being true. It is recommended that implementations
- /// only write data to `buf` instead of reading its contents.
+ /// only write data to `buf` instead of reading its contents. The
+ /// documentation on [`read`] has a more detailed explanation on this
+ /// subject.
///
/// # Errors
///
///
/// [`File`]s implement `Read`:
///
- /// [`File`]: ../fs/struct.File.html
- /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
- /// [`ErrorKind::UnexpectedEof`]: ../../std/io/enum.ErrorKind.html#variant.UnexpectedEof
+ /// [`read`]: Read::read
+ /// [`File`]: crate::fs::File
///
/// ```no_run
/// use std::io;
///
/// [`File`][file]s implement `Read`:
///
- /// [file]: ../fs/struct.File.html
+ /// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
///
/// [`File`][file]s implement `Read`:
///
- /// [file]: ../fs/struct.File.html
- /// [`Iterator`]: ../../std/iter/trait.Iterator.html
- /// [`Result`]: ../../std/result/enum.Result.html
- /// [`io::Error`]: ../../std/io/struct.Error.html
- /// [`u8`]: ../../std/primitive.u8.html
- /// [`Ok`]: ../../std/result/enum.Result.html#variant.Ok
- /// [`Err`]: ../../std/result/enum.Result.html#variant.Err
- /// [`None`]: ../../std/option/enum.Option.html#variant.None
+ /// [file]: crate::fs::File
+ /// [`Iterator`]: crate::iter::Iterator
+ /// [`Result`]: crate::result::Result
+ /// [`io::Error`]: self::Error
///
/// ```no_run
/// use std::io;
///
/// [`File`][file]s implement `Read`:
///
- /// [file]: ../fs/struct.File.html
+ /// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
///
/// [`File`]s implement `Read`:
///
- /// [`File`]: ../fs/struct.File.html
- /// [`Ok(0)`]: ../../std/result/enum.Result.html#variant.Ok
- /// [`read()`]: trait.Read.html#tymethod.read
+ /// [`File`]: crate::fs::File
+ /// [`Ok(0)`]: Ok
+ /// [`read()`]: Read::read
///
/// ```no_run
/// use std::io;
/// throughout [`std::io`] take and provide types which implement the `Write`
/// trait.
///
-/// [`write`]: #tymethod.write
-/// [`flush`]: #tymethod.flush
+/// [`write`]: Self::write
+/// [`flush`]: Self::flush
/// [`std::io`]: index.html
///
/// # Examples
/// The trait also provides convenience methods like [`write_all`], which calls
/// `write` in a loop until its entire input has been written.
///
-/// [`write_all`]: #method.write_all
+/// [`write_all`]: Self::write_all
#[stable(feature = "rust1", since = "1.0.0")]
+#[doc(spotlight)]
pub trait Write {
/// Write a buffer into this writer, returning how many bytes were written.
///
/// An error of the [`ErrorKind::Interrupted`] kind is non-fatal and the
/// write operation should be retried if there is nothing else to do.
///
- /// [`Err`]: ../../std/result/enum.Result.html#variant.Err
- /// [`Ok(n)`]: ../../std/result/enum.Result.html#variant.Ok
- /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
- ///
/// # Examples
///
/// ```no_run
/// This function will return the first error of
/// non-[`ErrorKind::Interrupted`] kind that [`write`] returns.
///
- /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
- /// [`write`]: #tymethod.write
+ /// [`write`]: Self::write
///
/// # Examples
///
///
/// If the buffer contains no data, this will never call [`write_vectored`].
///
- /// [`write_vectored`]: #method.write_vectored
- /// [`ErrorKind::Interrupted`]: ../../std/io/enum.ErrorKind.html#variant.Interrupted
+ /// [`write_vectored`]: Self::write_vectored
///
/// # Notes
///
/// encountered.
///
/// This method is primarily used to interface with the
- /// [`format_args!`][formatargs] macro, but it is rare that this should
- /// explicitly be called. The [`write!`][write] macro should be favored to
+ /// [`format_args!()`] macro, but it is rare that this should
+ /// explicitly be called. The [`write!()`] macro should be favored to
/// invoke this method instead.
///
- /// [formatargs]: ../macro.format_args.html
- /// [write]: ../macro.write.html
- ///
/// This function internally uses the [`write_all`][writeall] method on
/// this trait and hence will continuously write data so long as no errors
/// are received. This also means that partial writes are not indicated in
/// this signature.
///
- /// [writeall]: #method.write_all
+ /// [writeall]: Self::write_all
///
/// # Errors
///
///
/// [`File`][file]s implement `Seek`:
///
-/// [file]: ../fs/struct.File.html
+/// [file]: crate::fs::File
///
/// ```no_run
/// use std::io;
/// [`BufReader`] to the rescue!
///
/// [`BufReader`]: struct.BufReader.html
-/// [`File`]: ../fs/struct.File.html
-/// [`read_line`]: #method.read_line
-/// [`lines`]: #method.lines
+/// [`File`]: crate::fs::File
+/// [`read_line`]: Self::read_line
+/// [`lines`]: Self::lines
/// [`Read`]: trait.Read.html
///
/// ```no_run
/// be called with the number of bytes that are consumed from this buffer to
/// ensure that the bytes are never returned twice.
///
- /// [`consume`]: #tymethod.consume
+ /// [`consume`]: Self::consume
///
/// An empty buffer returned indicates that the stream has reached EOF.
///
/// Since `consume()` is meant to be used with [`fill_buf`],
/// that method's example includes an example of `consume()`.
///
- /// [`fill_buf`]: #tymethod.fill_buf
+ /// [`fill_buf`]: Self::fill_buf
#[stable(feature = "rust1", since = "1.0.0")]
fn consume(&mut self, amt: usize);
/// If an I/O error is encountered then all bytes read so far will be
/// present in `buf` and its length will have been adjusted appropriately.
///
- /// [`fill_buf`]: #tymethod.fill_buf
+ /// [`fill_buf`]: Self::fill_buf
/// [`ErrorKind::Interrupted`]: enum.ErrorKind.html#variant.Interrupted
///
/// # Examples
/// error is encountered then `buf` may contain some bytes already read in
/// the event that all data read so far was valid UTF-8.
///
- /// [`read_until`]: #method.read_until
+ /// [`read_until`]: Self::read_until
///
/// # Examples
///
/// This function will yield errors whenever [`read_until`] would have
/// also yielded an error.
///
- /// [`io::Result`]: type.Result.html
- /// [`Vec<u8>`]: ../vec/struct.Vec.html
- /// [`read_until`]: #method.read_until
+ /// [`io::Result`]: self::Result
+ /// [`Vec<u8>`]: crate::vec::Vec
+ /// [`read_until`]: Self::read_until
///
/// # Examples
///
/// [`io::Result`]`<`[`String`]`>`. Each string returned will *not* have a newline
/// byte (the 0xA byte) or CRLF (0xD, 0xA bytes) at the end.
///
- /// [`io::Result`]: type.Result.html
- /// [`String`]: ../string/struct.String.html
+ /// [`io::Result`]: self::Result
///
/// # Examples
///
/// this example, we use [`Cursor`] to iterate over all the lines in a byte
/// slice.
///
- /// [`Cursor`]: struct.Cursor.html
- ///
/// ```
/// use std::io::{self, BufRead};
///
/// This instance may reach `EOF` after reading fewer bytes than indicated by
/// this method if the underlying [`Read`] instance reaches EOF.
///
- /// [`Read`]: ../../std/io/trait.Read.html
- ///
/// # Examples
///
/// ```no_run
//
/// A value of type [`bool`] representing logical **false**.
///
-/// The documentation for this keyword is [not yet complete]. Pull requests welcome!
+/// `false` is the logical opposite of [`true`].
///
-/// [`bool`]: primitive.bool.html
-/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601
+/// See the documentation for [`true`] for more information.
+///
+/// [`true`]: keyword.true.html
mod false_keyword {}
#[doc(keyword = "fn")]
/// * `for` is also used for [higher-ranked trait bounds] as in `for<'a> &'a T: PartialEq<i32>`.
///
/// for-in-loops, or to be more precise, iterator loops, are a simple syntactic sugar over a common
-/// practice within Rust, which is to loop over an iterator until that iterator returns `None` (or
-/// `break` is called).
+/// practice within Rust, which is to loop over anything that implements [`IntoIterator`] until the
+/// iterator returned by `.into_iter()` returns `None` (or the loop body uses `break`).
///
/// ```rust
/// for i in 0..5 {
//
/// Iterate over a series of values with [`for`].
///
-/// The expression immediately following `in` must implement the [`Iterator`] trait.
+/// The expression immediately following `in` must implement the [`IntoIterator`] trait.
///
/// ## Literal Examples:
///
///
/// (Read more about [range patterns])
///
-/// [`Iterator`]: ../book/ch13-04-performance.html
+/// [`IntoIterator`]: ../book/ch13-04-performance.html
/// [range patterns]: ../reference/patterns.html?highlight=range#range-patterns
/// [`for`]: keyword.for.html
mod in_keyword {}
#[doc(keyword = "trait")]
//
-/// A common interface for a class of types.
+/// A common interface for a group of types.
///
-/// The documentation for this keyword is [not yet complete]. Pull requests welcome!
+/// A `trait` is like an interface that data types can implement. When a type
+/// implements a trait it can be treated abstractly as that trait using generics
+/// or trait objects.
///
-/// [not yet complete]: https://github.com/rust-lang/rust/issues/34601
+/// Traits can be made up of three varieties of associated items:
+///
+/// - functions and methods
+/// - types
+/// - constants
+///
+/// Traits may also contain additional type parameters. Those type parameters
+/// or the trait itself can be constrained by other traits.
+///
+/// Traits can serve as markers or carry other logical semantics that
+/// aren't expressed through their items. When a type implements that
+/// trait it is promising to uphold its contract. [`Send`] and [`Sync`] are two
+/// such marker traits present in the standard library.
+///
+/// See the [Reference][Ref-Traits] for a lot more information on traits.
+///
+/// # Examples
+///
+/// Traits are declared using the `trait` keyword. Types can implement them
+/// using [`impl`] `Trait` [`for`] `Type`:
+///
+/// ```rust
+/// trait Zero {
+/// const ZERO: Self;
+/// fn is_zero(&self) -> bool;
+/// }
+///
+/// impl Zero for i32 {
+/// const ZERO: Self = 0;
+///
+/// fn is_zero(&self) -> bool {
+/// *self == Self::ZERO
+/// }
+/// }
+///
+/// assert_eq!(i32::ZERO, 0);
+/// assert!(i32::ZERO.is_zero());
+/// assert!(!4.is_zero());
+/// ```
+///
+/// With an associated type:
+///
+/// ```rust
+/// trait Builder {
+/// type Built;
+///
+/// fn build(&self) -> Self::Built;
+/// }
+/// ```
+///
+/// Traits can be generic, with constraints or without:
+///
+/// ```rust
+/// trait MaybeFrom<T> {
+/// fn maybe_from(value: T) -> Option<Self>
+/// where
+/// Self: Sized;
+/// }
+/// ```
+///
+/// Traits can build upon the requirements of other traits. In the example
+/// below `Iterator` is a **supertrait** and `ThreeIterator` is a **subtrait**:
+///
+/// ```rust
+/// trait ThreeIterator: std::iter::Iterator {
+/// fn next_three(&mut self) -> Option<[Self::Item; 3]>;
+/// }
+/// ```
+///
+/// Traits can be used in functions, as parameters:
+///
+/// ```rust
+/// # #![allow(dead_code)]
+/// fn debug_iter<I: Iterator>(it: I) where I::Item: std::fmt::Debug {
+/// for elem in it {
+/// println!("{:#?}", elem);
+/// }
+/// }
+///
+/// // u8_len_1, u8_len_2 and u8_len_3 are equivalent
+///
+/// fn u8_len_1(val: impl Into<Vec<u8>>) -> usize {
+/// val.into().len()
+/// }
+///
+/// fn u8_len_2<T: Into<Vec<u8>>>(val: T) -> usize {
+/// val.into().len()
+/// }
+///
+/// fn u8_len_3<T>(val: T) -> usize
+/// where
+/// T: Into<Vec<u8>>,
+/// {
+/// val.into().len()
+/// }
+/// ```
+///
+/// Or as return types:
+///
+/// ```rust
+/// # #![allow(dead_code)]
+/// fn from_zero_to(v: u8) -> impl Iterator<Item = u8> {
+/// (0..v).into_iter()
+/// }
+/// ```
+///
+/// The use of the [`impl`] keyword in this position allows the function writer
+/// to hide the concrete type as an implementation detail which can change
+/// without breaking user's code.
+///
+/// # Trait objects
+///
+/// A *trait object* is an opaque value of another type that implements a set of
+/// traits. A trait object implements all specified traits as well as their
+/// supertraits (if any).
+///
+/// The syntax is the following: `dyn BaseTrait + AutoTrait1 + ... AutoTraitN`.
+/// Only one `BaseTrait` can be used so this will not compile:
+///
+/// ```rust,compile_fail,E0225
+/// trait A {}
+/// trait B {}
+///
+/// let _: Box<dyn A + B>;
+/// ```
+///
+/// Neither will this, which is a syntax error:
+///
+/// ```rust,compile_fail
+/// trait A {}
+/// trait B {}
+///
+/// let _: Box<dyn A + dyn B>;
+/// ```
+///
+/// On the other hand, this is correct:
+///
+/// ```rust
+/// trait A {}
+///
+/// let _: Box<dyn A + Send + Sync>;
+/// ```
+///
+/// The [Reference][Ref-Trait-Objects] has more information about trait objects,
+/// their limitations and the differences between editions.
+///
+/// # Unsafe traits
+///
+/// Some traits may be unsafe to implement. Using the [`unsafe`] keyword in
+/// front of the trait's declaration is used to mark this:
+///
+/// ```rust
+/// unsafe trait UnsafeTrait {}
+///
+/// unsafe impl UnsafeTrait for i32 {}
+/// ```
+///
+/// # Differences between the 2015 and 2018 editions
+///
+/// In the 2015 edition parameters pattern where not needed for traits:
+///
+/// ```rust,edition2015
+/// trait Tr {
+/// fn f(i32);
+/// }
+/// ```
+///
+/// This behavior is no longer valid in edition 2018.
+///
+/// [`for`]: keyword.for.html
+/// [`impl`]: keyword.impl.html
+/// [`unsafe`]: keyword.unsafe.html
+/// [`Send`]: marker/trait.Send.html
+/// [`Sync`]: marker/trait.Sync.html
+/// [Ref-Traits]: ../reference/items/traits.html
+/// [Ref-Trait-Objects]: ../reference/types/trait-object.html
mod trait_keyword {}
#[doc(keyword = "true")]
--- /dev/null
+//! Lazy values and one-time initialization of static data.
+
+use crate::{
+ cell::{Cell, UnsafeCell},
+ fmt,
+ mem::{self, MaybeUninit},
+ ops::{Deref, Drop},
+ panic::{RefUnwindSafe, UnwindSafe},
+ sync::Once,
+};
+
+#[doc(inline)]
+#[unstable(feature = "once_cell", issue = "74465")]
+pub use core::lazy::*;
+
+/// A synchronization primitive which can be written to only once.
+///
+/// This type is a thread-safe `OnceCell`.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(once_cell)]
+///
+/// use std::lazy::SyncOnceCell;
+///
+/// static CELL: SyncOnceCell<String> = SyncOnceCell::new();
+/// assert!(CELL.get().is_none());
+///
+/// std::thread::spawn(|| {
+/// let value: &String = CELL.get_or_init(|| {
+/// "Hello, World!".to_string()
+/// });
+/// assert_eq!(value, "Hello, World!");
+/// }).join().unwrap();
+///
+/// let value: Option<&String> = CELL.get();
+/// assert!(value.is_some());
+/// assert_eq!(value.unwrap().as_str(), "Hello, World!");
+/// ```
+#[unstable(feature = "once_cell", issue = "74465")]
+pub struct SyncOnceCell<T> {
+ once: Once,
+ // Whether or not the value is initialized is tracked by `state_and_queue`.
+ value: UnsafeCell<MaybeUninit<T>>,
+}
+
+// Why do we need `T: Send`?
+// Thread A creates a `SyncOnceCell` and shares it with
+// scoped thread B, which fills the cell, which is
+// then destroyed by A. That is, destructor observes
+// a sent value.
+#[unstable(feature = "once_cell", issue = "74465")]
+unsafe impl<T: Sync + Send> Sync for SyncOnceCell<T> {}
+#[unstable(feature = "once_cell", issue = "74465")]
+unsafe impl<T: Send> Send for SyncOnceCell<T> {}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: RefUnwindSafe + UnwindSafe> RefUnwindSafe for SyncOnceCell<T> {}
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: UnwindSafe> UnwindSafe for SyncOnceCell<T> {}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T> Default for SyncOnceCell<T> {
+ fn default() -> SyncOnceCell<T> {
+ SyncOnceCell::new()
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: fmt::Debug> fmt::Debug for SyncOnceCell<T> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.get() {
+ Some(v) => f.debug_tuple("Once").field(v).finish(),
+ None => f.write_str("Once(Uninit)"),
+ }
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: Clone> Clone for SyncOnceCell<T> {
+ fn clone(&self) -> SyncOnceCell<T> {
+ let cell = Self::new();
+ if let Some(value) = self.get() {
+ match cell.set(value.clone()) {
+ Ok(()) => (),
+ Err(_) => unreachable!(),
+ }
+ }
+ cell
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T> From<T> for SyncOnceCell<T> {
+ fn from(value: T) -> Self {
+ let cell = Self::new();
+ match cell.set(value) {
+ Ok(()) => cell,
+ Err(_) => unreachable!(),
+ }
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: PartialEq> PartialEq for SyncOnceCell<T> {
+ fn eq(&self, other: &SyncOnceCell<T>) -> bool {
+ self.get() == other.get()
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: Eq> Eq for SyncOnceCell<T> {}
+
+impl<T> SyncOnceCell<T> {
+ /// Creates a new empty cell.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub const fn new() -> SyncOnceCell<T> {
+ SyncOnceCell { once: Once::new(), value: UnsafeCell::new(MaybeUninit::uninit()) }
+ }
+
+ /// Gets the reference to the underlying value.
+ ///
+ /// Returns `None` if the cell is empty, or being initialized. This
+ /// method never blocks.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get(&self) -> Option<&T> {
+ if self.is_initialized() {
+ // Safe b/c checked is_initialized
+ Some(unsafe { self.get_unchecked() })
+ } else {
+ None
+ }
+ }
+
+ /// Gets the mutable reference to the underlying value.
+ ///
+ /// Returns `None` if the cell is empty. This method never blocks.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get_mut(&mut self) -> Option<&mut T> {
+ if self.is_initialized() {
+ // Safe b/c checked is_initialized and we have a unique access
+ Some(unsafe { self.get_unchecked_mut() })
+ } else {
+ None
+ }
+ }
+
+ /// Sets the contents of this cell to `value`.
+ ///
+ /// Returns `Ok(())` if the cell's value was updated.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::SyncOnceCell;
+ ///
+ /// static CELL: SyncOnceCell<i32> = SyncOnceCell::new();
+ ///
+ /// fn main() {
+ /// assert!(CELL.get().is_none());
+ ///
+ /// std::thread::spawn(|| {
+ /// assert_eq!(CELL.set(92), Ok(()));
+ /// }).join().unwrap();
+ ///
+ /// assert_eq!(CELL.set(62), Err(62));
+ /// assert_eq!(CELL.get(), Some(&92));
+ /// }
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn set(&self, value: T) -> Result<(), T> {
+ let mut value = Some(value);
+ self.get_or_init(|| value.take().unwrap());
+ match value {
+ None => Ok(()),
+ Some(value) => Err(value),
+ }
+ }
+
+ /// Gets the contents of the cell, initializing it with `f` if the cell
+ /// was empty.
+ ///
+ /// Many threads may call `get_or_init` concurrently with different
+ /// initializing functions, but it is guaranteed that only one function
+ /// will be executed.
+ ///
+ /// # Panics
+ ///
+ /// If `f` panics, the panic is propagated to the caller, and the cell
+ /// remains uninitialized.
+ ///
+ /// It is an error to reentrantly initialize the cell from `f`. The
+ /// exact outcome is unspecified. Current implementation deadlocks, but
+ /// this may be changed to a panic in the future.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::SyncOnceCell;
+ ///
+ /// let cell = SyncOnceCell::new();
+ /// let value = cell.get_or_init(|| 92);
+ /// assert_eq!(value, &92);
+ /// let value = cell.get_or_init(|| unreachable!());
+ /// assert_eq!(value, &92);
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get_or_init<F>(&self, f: F) -> &T
+ where
+ F: FnOnce() -> T,
+ {
+ match self.get_or_try_init(|| Ok::<T, !>(f())) {
+ Ok(val) => val,
+ }
+ }
+
+ /// Gets the contents of the cell, initializing it with `f` if
+ /// the cell was empty. If the cell was empty and `f` failed, an
+ /// error is returned.
+ ///
+ /// # Panics
+ ///
+ /// If `f` panics, the panic is propagated to the caller, and
+ /// the cell remains uninitialized.
+ ///
+ /// It is an error to reentrantly initialize the cell from `f`.
+ /// The exact outcome is unspecified. Current implementation
+ /// deadlocks, but this may be changed to a panic in the future.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::SyncOnceCell;
+ ///
+ /// let cell = SyncOnceCell::new();
+ /// assert_eq!(cell.get_or_try_init(|| Err(())), Err(()));
+ /// assert!(cell.get().is_none());
+ /// let value = cell.get_or_try_init(|| -> Result<i32, ()> {
+ /// Ok(92)
+ /// });
+ /// assert_eq!(value, Ok(&92));
+ /// assert_eq!(cell.get(), Some(&92))
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn get_or_try_init<F, E>(&self, f: F) -> Result<&T, E>
+ where
+ F: FnOnce() -> Result<T, E>,
+ {
+ // Fast path check
+ // NOTE: We need to perform an acquire on the state in this method
+ // in order to correctly synchronize `SyncLazy::force`. This is
+ // currently done by calling `self.get()`, which in turn calls
+ // `self.is_initialized()`, which in turn performs the acquire.
+ if let Some(value) = self.get() {
+ return Ok(value);
+ }
+ self.initialize(f)?;
+
+ debug_assert!(self.is_initialized());
+
+ // Safety: The inner value has been initialized
+ Ok(unsafe { self.get_unchecked() })
+ }
+
+ /// Consumes the `SyncOnceCell`, returning the wrapped value. Returns
+ /// `None` if the cell was empty.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::SyncOnceCell;
+ ///
+ /// let cell: SyncOnceCell<String> = SyncOnceCell::new();
+ /// assert_eq!(cell.into_inner(), None);
+ ///
+ /// let cell = SyncOnceCell::new();
+ /// cell.set("hello".to_string()).unwrap();
+ /// assert_eq!(cell.into_inner(), Some("hello".to_string()));
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn into_inner(mut self) -> Option<T> {
+ // Safety: Safe because we immediately free `self` without dropping
+ let inner = unsafe { self.take_inner() };
+
+ // Don't drop this `SyncOnceCell`. We just moved out one of the fields, but didn't set
+ // the state to uninitialized.
+ mem::ManuallyDrop::new(self);
+ inner
+ }
+
+ /// Takes the value out of this `SyncOnceCell`, moving it back to an uninitialized state.
+ ///
+ /// Has no effect and returns `None` if the `SyncOnceCell` hasn't been initialized.
+ ///
+ /// Safety is guaranteed by requiring a mutable reference.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::SyncOnceCell;
+ ///
+ /// let mut cell: SyncOnceCell<String> = SyncOnceCell::new();
+ /// assert_eq!(cell.take(), None);
+ ///
+ /// let mut cell = SyncOnceCell::new();
+ /// cell.set("hello".to_string()).unwrap();
+ /// assert_eq!(cell.take(), Some("hello".to_string()));
+ /// assert_eq!(cell.get(), None);
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn take(&mut self) -> Option<T> {
+ mem::take(self).into_inner()
+ }
+
+ /// Takes the wrapped value out of a `SyncOnceCell`.
+ /// Afterwards the cell is no longer initialized.
+ ///
+ /// Safety: The cell must now be free'd WITHOUT dropping. No other usages of the cell
+ /// are valid. Only used by `into_inner` and `drop`.
+ unsafe fn take_inner(&mut self) -> Option<T> {
+ // The mutable reference guarantees there are no other threads that can observe us
+ // taking out the wrapped value.
+ // Right after this function `self` is supposed to be freed, so it makes little sense
+ // to atomically set the state to uninitialized.
+ if self.is_initialized() {
+ let value = mem::replace(&mut self.value, UnsafeCell::new(MaybeUninit::uninit()));
+ Some(value.into_inner().assume_init())
+ } else {
+ None
+ }
+ }
+
+ #[inline]
+ fn is_initialized(&self) -> bool {
+ self.once.is_completed()
+ }
+
+ #[cold]
+ fn initialize<F, E>(&self, f: F) -> Result<(), E>
+ where
+ F: FnOnce() -> Result<T, E>,
+ {
+ let mut res: Result<(), E> = Ok(());
+ let slot = &self.value;
+
+ // Ignore poisoning from other threads
+ // If another thread panics, then we'll be able to run our closure
+ self.once.call_once_force(|p| {
+ match f() {
+ Ok(value) => {
+ unsafe { (&mut *slot.get()).write(value) };
+ }
+ Err(e) => {
+ res = Err(e);
+
+ // Treat the underlying `Once` as poisoned since we
+ // failed to initialize our value. Calls
+ p.poison();
+ }
+ }
+ });
+ res
+ }
+
+ /// Safety: The value must be initialized
+ unsafe fn get_unchecked(&self) -> &T {
+ debug_assert!(self.is_initialized());
+ (&*self.value.get()).get_ref()
+ }
+
+ /// Safety: The value must be initialized
+ unsafe fn get_unchecked_mut(&mut self) -> &mut T {
+ debug_assert!(self.is_initialized());
+ (&mut *self.value.get()).get_mut()
+ }
+}
+
+impl<T> Drop for SyncOnceCell<T> {
+ fn drop(&mut self) {
+ // Safety: The cell is being dropped, so it can't be accessed again
+ unsafe { self.take_inner() };
+ }
+}
+
+/// A value which is initialized on the first access.
+///
+/// This type is a thread-safe `Lazy`, and can be used in statics.
+///
+/// # Examples
+///
+/// ```
+/// #![feature(once_cell)]
+///
+/// use std::collections::HashMap;
+///
+/// use std::lazy::SyncLazy;
+///
+/// static HASHMAP: SyncLazy<HashMap<i32, String>> = SyncLazy::new(|| {
+/// println!("initializing");
+/// let mut m = HashMap::new();
+/// m.insert(13, "Spica".to_string());
+/// m.insert(74, "Hoyten".to_string());
+/// m
+/// });
+///
+/// fn main() {
+/// println!("ready");
+/// std::thread::spawn(|| {
+/// println!("{:?}", HASHMAP.get(&13));
+/// }).join().unwrap();
+/// println!("{:?}", HASHMAP.get(&74));
+///
+/// // Prints:
+/// // ready
+/// // initializing
+/// // Some("Spica")
+/// // Some("Hoyten")
+/// }
+/// ```
+#[unstable(feature = "once_cell", issue = "74465")]
+pub struct SyncLazy<T, F = fn() -> T> {
+ cell: SyncOnceCell<T>,
+ init: Cell<Option<F>>,
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: fmt::Debug, F> fmt::Debug for SyncLazy<T, F> {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ f.debug_struct("Lazy").field("cell", &self.cell).field("init", &"..").finish()
+ }
+}
+
+// We never create a `&F` from a `&SyncLazy<T, F>` so it is fine
+// to not impl `Sync` for `F`
+// we do create a `&mut Option<F>` in `force`, but this is
+// properly synchronized, so it only happens once
+// so it also does not contribute to this impl.
+#[unstable(feature = "once_cell", issue = "74465")]
+unsafe impl<T, F: Send> Sync for SyncLazy<T, F> where SyncOnceCell<T>: Sync {}
+// auto-derived `Send` impl is OK.
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T, F: RefUnwindSafe> RefUnwindSafe for SyncLazy<T, F> where SyncOnceCell<T>: RefUnwindSafe {}
+
+impl<T, F> SyncLazy<T, F> {
+ /// Creates a new lazy value with the given initializing
+ /// function.
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub const fn new(f: F) -> SyncLazy<T, F> {
+ SyncLazy { cell: SyncOnceCell::new(), init: Cell::new(Some(f)) }
+ }
+}
+
+impl<T, F: FnOnce() -> T> SyncLazy<T, F> {
+ /// Forces the evaluation of this lazy value and
+ /// returns a reference to result. This is equivalent
+ /// to the `Deref` impl, but is explicit.
+ ///
+ /// # Examples
+ ///
+ /// ```
+ /// #![feature(once_cell)]
+ ///
+ /// use std::lazy::SyncLazy;
+ ///
+ /// let lazy = SyncLazy::new(|| 92);
+ ///
+ /// assert_eq!(SyncLazy::force(&lazy), &92);
+ /// assert_eq!(&*lazy, &92);
+ /// ```
+ #[unstable(feature = "once_cell", issue = "74465")]
+ pub fn force(this: &SyncLazy<T, F>) -> &T {
+ this.cell.get_or_init(|| match this.init.take() {
+ Some(f) => f(),
+ None => panic!("Lazy instance has previously been poisoned"),
+ })
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T, F: FnOnce() -> T> Deref for SyncLazy<T, F> {
+ type Target = T;
+ fn deref(&self) -> &T {
+ SyncLazy::force(self)
+ }
+}
+
+#[unstable(feature = "once_cell", issue = "74465")]
+impl<T: Default> Default for SyncLazy<T> {
+ /// Creates a new lazy value using `Default` as the initializing function.
+ fn default() -> SyncLazy<T> {
+ SyncLazy::new(T::default)
+ }
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::{
+ lazy::{Lazy, SyncLazy, SyncOnceCell},
+ panic,
+ sync::{
+ atomic::{AtomicUsize, Ordering::SeqCst},
+ mpsc::channel,
+ Mutex,
+ },
+ };
+
+ #[test]
+ fn lazy_default() {
+ static CALLED: AtomicUsize = AtomicUsize::new(0);
+
+ struct Foo(u8);
+ impl Default for Foo {
+ fn default() -> Self {
+ CALLED.fetch_add(1, SeqCst);
+ Foo(42)
+ }
+ }
+
+ let lazy: Lazy<Mutex<Foo>> = <_>::default();
+
+ assert_eq!(CALLED.load(SeqCst), 0);
+
+ assert_eq!(lazy.lock().unwrap().0, 42);
+ assert_eq!(CALLED.load(SeqCst), 1);
+
+ lazy.lock().unwrap().0 = 21;
+
+ assert_eq!(lazy.lock().unwrap().0, 21);
+ assert_eq!(CALLED.load(SeqCst), 1);
+ }
+
+ #[test]
+ fn lazy_poisoning() {
+ let x: Lazy<String> = Lazy::new(|| panic!("kaboom"));
+ for _ in 0..2 {
+ let res = panic::catch_unwind(panic::AssertUnwindSafe(|| x.len()));
+ assert!(res.is_err());
+ }
+ }
+
+ // miri doesn't support threads
+ #[cfg(not(miri))]
+ fn spawn_and_wait<R: Send + 'static>(f: impl FnOnce() -> R + Send + 'static) -> R {
+ crate::thread::spawn(f).join().unwrap()
+ }
+
+ #[cfg(not(miri))]
+ fn spawn(f: impl FnOnce() + Send + 'static) {
+ let _ = crate::thread::spawn(f);
+ }
+
+ // "stub threads" for Miri
+ #[cfg(miri)]
+ fn spawn_and_wait<R: Send + 'static>(f: impl FnOnce() -> R + Send + 'static) -> R {
+ f(())
+ }
+
+ #[cfg(miri)]
+ fn spawn(f: impl FnOnce() + Send + 'static) {
+ f(())
+ }
+
+ #[test]
+ fn sync_once_cell() {
+ static ONCE_CELL: SyncOnceCell<i32> = SyncOnceCell::new();
+
+ assert!(ONCE_CELL.get().is_none());
+
+ spawn_and_wait(|| {
+ ONCE_CELL.get_or_init(|| 92);
+ assert_eq!(ONCE_CELL.get(), Some(&92));
+ });
+
+ ONCE_CELL.get_or_init(|| panic!("Kabom!"));
+ assert_eq!(ONCE_CELL.get(), Some(&92));
+ }
+
+ #[test]
+ fn sync_once_cell_get_mut() {
+ let mut c = SyncOnceCell::new();
+ assert!(c.get_mut().is_none());
+ c.set(90).unwrap();
+ *c.get_mut().unwrap() += 2;
+ assert_eq!(c.get_mut(), Some(&mut 92));
+ }
+
+ #[test]
+ fn sync_once_cell_get_unchecked() {
+ let c = SyncOnceCell::new();
+ c.set(92).unwrap();
+ unsafe {
+ assert_eq!(c.get_unchecked(), &92);
+ }
+ }
+
+ #[test]
+ fn sync_once_cell_drop() {
+ static DROP_CNT: AtomicUsize = AtomicUsize::new(0);
+ struct Dropper;
+ impl Drop for Dropper {
+ fn drop(&mut self) {
+ DROP_CNT.fetch_add(1, SeqCst);
+ }
+ }
+
+ let x = SyncOnceCell::new();
+ spawn_and_wait(move || {
+ x.get_or_init(|| Dropper);
+ assert_eq!(DROP_CNT.load(SeqCst), 0);
+ drop(x);
+ });
+
+ assert_eq!(DROP_CNT.load(SeqCst), 1);
+ }
+
+ #[test]
+ fn sync_once_cell_drop_empty() {
+ let x = SyncOnceCell::<String>::new();
+ drop(x);
+ }
+
+ #[test]
+ fn clone() {
+ let s = SyncOnceCell::new();
+ let c = s.clone();
+ assert!(c.get().is_none());
+
+ s.set("hello".to_string()).unwrap();
+ let c = s.clone();
+ assert_eq!(c.get().map(String::as_str), Some("hello"));
+ }
+
+ #[test]
+ fn get_or_try_init() {
+ let cell: SyncOnceCell<String> = SyncOnceCell::new();
+ assert!(cell.get().is_none());
+
+ let res = panic::catch_unwind(|| cell.get_or_try_init(|| -> Result<_, ()> { panic!() }));
+ assert!(res.is_err());
+ assert!(!cell.is_initialized());
+ assert!(cell.get().is_none());
+
+ assert_eq!(cell.get_or_try_init(|| Err(())), Err(()));
+
+ assert_eq!(
+ cell.get_or_try_init(|| Ok::<_, ()>("hello".to_string())),
+ Ok(&"hello".to_string())
+ );
+ assert_eq!(cell.get(), Some(&"hello".to_string()));
+ }
+
+ #[test]
+ fn from_impl() {
+ assert_eq!(SyncOnceCell::from("value").get(), Some(&"value"));
+ assert_ne!(SyncOnceCell::from("foo").get(), Some(&"bar"));
+ }
+
+ #[test]
+ fn partialeq_impl() {
+ assert!(SyncOnceCell::from("value") == SyncOnceCell::from("value"));
+ assert!(SyncOnceCell::from("foo") != SyncOnceCell::from("bar"));
+
+ assert!(SyncOnceCell::<String>::new() == SyncOnceCell::new());
+ assert!(SyncOnceCell::<String>::new() != SyncOnceCell::from("value".to_owned()));
+ }
+
+ #[test]
+ fn into_inner() {
+ let cell: SyncOnceCell<String> = SyncOnceCell::new();
+ assert_eq!(cell.into_inner(), None);
+ let cell = SyncOnceCell::new();
+ cell.set("hello".to_string()).unwrap();
+ assert_eq!(cell.into_inner(), Some("hello".to_string()));
+ }
+
+ #[test]
+ fn sync_lazy_new() {
+ static CALLED: AtomicUsize = AtomicUsize::new(0);
+ static SYNC_LAZY: SyncLazy<i32> = SyncLazy::new(|| {
+ CALLED.fetch_add(1, SeqCst);
+ 92
+ });
+
+ assert_eq!(CALLED.load(SeqCst), 0);
+
+ spawn_and_wait(|| {
+ let y = *SYNC_LAZY - 30;
+ assert_eq!(y, 62);
+ assert_eq!(CALLED.load(SeqCst), 1);
+ });
+
+ let y = *SYNC_LAZY - 30;
+ assert_eq!(y, 62);
+ assert_eq!(CALLED.load(SeqCst), 1);
+ }
+
+ #[test]
+ fn sync_lazy_default() {
+ static CALLED: AtomicUsize = AtomicUsize::new(0);
+
+ struct Foo(u8);
+ impl Default for Foo {
+ fn default() -> Self {
+ CALLED.fetch_add(1, SeqCst);
+ Foo(42)
+ }
+ }
+
+ let lazy: SyncLazy<Mutex<Foo>> = <_>::default();
+
+ assert_eq!(CALLED.load(SeqCst), 0);
+
+ assert_eq!(lazy.lock().unwrap().0, 42);
+ assert_eq!(CALLED.load(SeqCst), 1);
+
+ lazy.lock().unwrap().0 = 21;
+
+ assert_eq!(lazy.lock().unwrap().0, 21);
+ assert_eq!(CALLED.load(SeqCst), 1);
+ }
+
+ #[test]
+ #[cfg_attr(miri, ignore)] // leaks memory
+ fn static_sync_lazy() {
+ static XS: SyncLazy<Vec<i32>> = SyncLazy::new(|| {
+ let mut xs = Vec::new();
+ xs.push(1);
+ xs.push(2);
+ xs.push(3);
+ xs
+ });
+
+ spawn_and_wait(|| {
+ assert_eq!(&*XS, &vec![1, 2, 3]);
+ });
+
+ assert_eq!(&*XS, &vec![1, 2, 3]);
+ }
+
+ #[test]
+ #[cfg_attr(miri, ignore)] // leaks memory
+ fn static_sync_lazy_via_fn() {
+ fn xs() -> &'static Vec<i32> {
+ static XS: SyncOnceCell<Vec<i32>> = SyncOnceCell::new();
+ XS.get_or_init(|| {
+ let mut xs = Vec::new();
+ xs.push(1);
+ xs.push(2);
+ xs.push(3);
+ xs
+ })
+ }
+ assert_eq!(xs(), &vec![1, 2, 3]);
+ }
+
+ #[test]
+ fn sync_lazy_poisoning() {
+ let x: SyncLazy<String> = SyncLazy::new(|| panic!("kaboom"));
+ for _ in 0..2 {
+ let res = panic::catch_unwind(|| x.len());
+ assert!(res.is_err());
+ }
+ }
+
+ #[test]
+ fn is_sync_send() {
+ fn assert_traits<T: Send + Sync>() {}
+ assert_traits::<SyncOnceCell<String>>();
+ assert_traits::<SyncLazy<String>>();
+ }
+
+ #[test]
+ fn eval_once_macro() {
+ macro_rules! eval_once {
+ (|| -> $ty:ty {
+ $($body:tt)*
+ }) => {{
+ static ONCE_CELL: SyncOnceCell<$ty> = SyncOnceCell::new();
+ fn init() -> $ty {
+ $($body)*
+ }
+ ONCE_CELL.get_or_init(init)
+ }};
+ }
+
+ let fib: &'static Vec<i32> = eval_once! {
+ || -> Vec<i32> {
+ let mut res = vec![1, 1];
+ for i in 0..10 {
+ let next = res[i] + res[i + 1];
+ res.push(next);
+ }
+ res
+ }
+ };
+ assert_eq!(fib[5], 8)
+ }
+
+ #[test]
+ #[cfg_attr(miri, ignore)] // deadlocks without real threads
+ fn sync_once_cell_does_not_leak_partially_constructed_boxes() {
+ static ONCE_CELL: SyncOnceCell<String> = SyncOnceCell::new();
+
+ let n_readers = 10;
+ let n_writers = 3;
+ const MSG: &str = "Hello, World";
+
+ let (tx, rx) = channel();
+
+ for _ in 0..n_readers {
+ let tx = tx.clone();
+ spawn(move || {
+ loop {
+ if let Some(msg) = ONCE_CELL.get() {
+ tx.send(msg).unwrap();
+ break;
+ }
+ }
+ });
+ }
+ for _ in 0..n_writers {
+ spawn(move || {
+ let _ = ONCE_CELL.set(MSG.to_owned());
+ });
+ }
+
+ for _ in 0..n_readers {
+ let msg = rx.recv().unwrap();
+ assert_eq!(msg, MSG);
+ }
+ }
+}
//! # Contributing changes to the documentation
//!
//! Check out the rust contribution guidelines [here](
-//! https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md).
-//! The source for this documentation can be found on [Github](https://github.com/rust-lang).
+//! https://rustc-dev-guide.rust-lang.org/getting-started.html).
+//! The source for this documentation can be found on
+//! [GitHub](https://github.com/rust-lang/rust).
//! To contribute changes, make sure you read the guidelines first, then submit
//! pull-requests for your suggested changes.
//!
//! [primitive types]: ../book/ch03-02-data-types.html
//! [rust-discord]: https://discord.gg/rust-lang
-#![stable(feature = "rust1", since = "1.0.0")]
+#![cfg_attr(not(feature = "restricted-std"), stable(feature = "rust1", since = "1.0.0"))]
+#![cfg_attr(feature = "restricted-std", unstable(feature = "restricted_std", issue = "none"))]
#![doc(
html_root_url = "https://doc.rust-lang.org/nightly/",
html_playground_url = "https://play.rust-lang.org/",
#![feature(doc_cfg)]
#![feature(doc_keyword)]
#![feature(doc_masked)]
+#![cfg_attr(not(bootstrap), feature(doc_spotlight))]
#![feature(dropck_eyepatch)]
#![feature(duration_constants)]
#![feature(exact_size_is_empty)]
#![feature(linkage)]
#![feature(llvm_asm)]
#![feature(log_syntax)]
+#![feature(maybe_uninit_extra)]
#![feature(maybe_uninit_ref)]
#![feature(maybe_uninit_slice)]
#![feature(min_specialization)]
#![feature(negative_impls)]
#![feature(never_type)]
#![feature(nll)]
+#![feature(once_cell)]
#![feature(optin_builtin_traits)]
#![feature(or_patterns)]
#![feature(panic_info_message)]
#![feature(ptr_internals)]
#![feature(raw)]
#![feature(raw_ref_macros)]
+#![feature(ready_macro)]
#![feature(renamed_spin_loop)]
#![feature(rustc_attrs)]
#![feature(rustc_private)]
#![feature(toowned_clone_into)]
#![feature(total_cmp)]
#![feature(trace_macros)]
-#![cfg_attr(bootstrap, feature(track_caller))]
#![feature(try_reserve)]
#![feature(unboxed_closures)]
#![feature(unsafe_block_in_unsafe_fn)]
pub mod sync;
pub mod time;
+#[unstable(feature = "once_cell", issue = "74465")]
+pub mod lazy;
+
#[stable(feature = "futures_api", since = "1.36.0")]
pub mod task {
//! Types and Traits for working with asynchronous tasks.
// compiler
pub mod rt;
+#[path = "../backtrace/src/lib.rs"]
+#[allow(dead_code, unused_attributes)]
+mod backtrace_rs;
+
// Pull in the `std_detect` crate directly into libstd. The contents of
// `std_detect` are in a different repository: rust-lang/stdarch.
//
// the rustdoc documentation for the existing keywords. Using `include!`
// because rustdoc only looks for these modules at the crate level.
include!("keyword_docs.rs");
+
+// This is required to avoid an unstable error when `restricted-std` is not
+// enabled. The use of #![feature(restricted_std)] in rustc-std-workspace-std
+// is unconditional, so the unstable feature needs to be defined somewhere.
+#[cfg_attr(not(feature = "restricted-std"), unstable(feature = "restricted_std", issue = "none"))]
+mod __restricted_std_workaround {}
// If this is a double panic, make sure that we print a backtrace
// for this panic. Otherwise only print it if logging is enabled.
let backtrace_env = if panic_count::get() >= 2 {
- RustBacktrace::Print(backtrace_rs::PrintFmt::Full)
+ RustBacktrace::Print(crate::backtrace_rs::PrintFmt::Full)
} else {
backtrace::rust_backtrace_env()
};
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn wait_timeout_wait() {
let m = Arc::new(Mutex::new(()));
let c = Arc::new(Condvar::new());
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn wait_timeout_while_wait() {
let m = Arc::new(Mutex::new(()));
let c = Arc::new(Condvar::new());
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn wait_timeout_while_wake() {
let pair = Arc::new((Mutex::new(false), Condvar::new()));
let pair_copy = pair.clone();
#[test]
#[cfg_attr(target_os = "emscripten", ignore)]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn wait_timeout_wake() {
let m = Arc::new(Mutex::new(()));
let c = Arc::new(Condvar::new());
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn oneshot_single_thread_recv_timeout() {
let (tx, rx) = channel();
tx.send(()).unwrap();
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn stress_recv_timeout_two_threads() {
let (tx, rx) = channel();
let stress = stress_factor() + 100;
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn recv_timeout_upgrade() {
let (tx, rx) = channel::<()>();
let timeout = Duration::from_millis(1);
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn stress_recv_timeout_shared() {
let (tx, rx) = channel();
let stress = stress_factor() + 100;
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn very_long_recv_timeout_wont_panic() {
let (tx, rx) = channel::<()>();
let join_handle = thread::spawn(move || rx.recv_timeout(Duration::from_secs(u64::MAX)));
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn shared_recv_timeout() {
let (tx, rx) = channel();
let total = 5;
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn recv_timeout() {
let (tx, rx) = sync_channel::<i32>(1);
assert_eq!(rx.recv_timeout(Duration::from_millis(1)), Err(RecvTimeoutError::Timeout));
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn stress_recv_timeout_two_threads() {
let (tx, rx) = sync_channel::<i32>(0);
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn stress_recv_timeout_shared() {
const AMT: u32 = 1000;
const NTHREADS: u32 = 8;
#[derive(Debug)]
pub struct OnceState {
poisoned: bool,
+ set_state_on_drop_to: Cell<usize>,
}
/// Initialization value for static [`Once`] values.
}
let mut f = Some(f);
- self.call_inner(true, &mut |p| f.take().unwrap()(&OnceState { poisoned: p }));
+ self.call_inner(true, &mut |p| f.take().unwrap()(p));
}
/// Returns `true` if some `call_once` call has completed
// currently no way to take an `FnOnce` and call it via virtual dispatch
// without some allocation overhead.
#[cold]
- fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(bool)) {
+ fn call_inner(&self, ignore_poisoning: bool, init: &mut dyn FnMut(&OnceState)) {
let mut state_and_queue = self.state_and_queue.load(Ordering::Acquire);
loop {
match state_and_queue {
};
// Run the initialization function, letting it know if we're
// poisoned or not.
- init(state_and_queue == POISONED);
- waiter_queue.set_state_on_drop_to = COMPLETE;
+ let init_state = OnceState {
+ poisoned: state_and_queue == POISONED,
+ set_state_on_drop_to: Cell::new(COMPLETE),
+ };
+ init(&init_state);
+ waiter_queue.set_state_on_drop_to = init_state.set_state_on_drop_to.get();
break;
}
_ => {
pub fn poisoned(&self) -> bool {
self.poisoned
}
+
+ /// Poison the associated [`Once`] without explicitly panicking.
+ ///
+ /// [`Once`]: struct.Once.html
+ // NOTE: This is currently only exposed for the `lazy` module
+ pub(crate) fn poison(&self) {
+ self.set_state_on_drop_to.set(POISONED);
+ }
}
#[cfg(all(test, not(target_os = "emscripten")))]
mod sgx;
pub use self::sgx::*;
} else {
- compile_error!("libstd doesn't compile for this platform yet");
+ mod unsupported;
+ pub use self::unsupported::*;
}
}
use crate::cmp;
-use crate::io::{Error as IoError, IoSlice, IoSliceMut, Result as IoResult};
-use crate::time::Duration;
+use crate::convert::TryFrom;
+use crate::io::{Error as IoError, ErrorKind, IoSlice, IoSliceMut, Result as IoResult};
+use crate::sys::rand::rdrand64;
+use crate::time::{Duration, Instant};
pub(crate) mod alloc;
#[macro_use]
/// Usercall `wait`. See the ABI documentation for more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
-pub fn wait(event_mask: u64, timeout: u64) -> IoResult<u64> {
+pub fn wait(event_mask: u64, mut timeout: u64) -> IoResult<u64> {
+ if timeout != WAIT_NO && timeout != WAIT_INDEFINITE {
+ // We don't want people to rely on accuracy of timeouts to make
+ // security decisions in an SGX enclave. That's why we add a random
+ // amount not exceeding +/- 10% to the timeout value to discourage
+ // people from relying on accuracy of timeouts while providing a way
+ // to make things work in other cases. Note that in the SGX threat
+ // model the enclave runner which is serving the wait usercall is not
+ // trusted to ensure accurate timeouts.
+ if let Ok(timeout_signed) = i64::try_from(timeout) {
+ let tenth = timeout_signed / 10;
+ let deviation = (rdrand64() as i64).checked_rem(tenth).unwrap_or(0);
+ timeout = timeout_signed.saturating_add(deviation) as _;
+ }
+ }
unsafe { raw::wait(event_mask, timeout).from_sgx_result() }
}
+/// This function makes an effort to wait for a non-spurious event at least as
+/// long as `duration`. Note that in general there is no guarantee about accuracy
+/// of time and timeouts in SGX model. The enclave runner serving usercalls may
+/// lie about current time and/or ignore timeout values.
+///
+/// Once the event is observed, `should_wake_up` will be used to determine
+/// whether or not the event was spurious.
+#[unstable(feature = "sgx_platform", issue = "56975")]
+pub fn wait_timeout<F>(event_mask: u64, duration: Duration, should_wake_up: F)
+where
+ F: Fn() -> bool,
+{
+ // Calls the wait usercall and checks the result. Returns true if event was
+ // returned, and false if WouldBlock/TimedOut was returned.
+ // If duration is None, it will use WAIT_NO.
+ fn wait_checked(event_mask: u64, duration: Option<Duration>) -> bool {
+ let timeout = duration.map_or(raw::WAIT_NO, |duration| {
+ cmp::min((u64::MAX - 1) as u128, duration.as_nanos()) as u64
+ });
+ match wait(event_mask, timeout) {
+ Ok(eventset) => {
+ if event_mask == 0 {
+ rtabort!("expected wait() to return Err, found Ok.");
+ }
+ rtassert!(eventset != 0 && eventset & !event_mask == 0);
+ true
+ }
+ Err(e) => {
+ rtassert!(e.kind() == ErrorKind::TimedOut || e.kind() == ErrorKind::WouldBlock);
+ false
+ }
+ }
+ }
+
+ match wait_checked(event_mask, Some(duration)) {
+ false => return, // timed out
+ true if should_wake_up() => return, // woken up
+ true => {} // spurious event
+ }
+
+ // Drain all cached events.
+ // Note that `event_mask != 0` is implied if we get here.
+ loop {
+ match wait_checked(event_mask, None) {
+ false => break, // no more cached events
+ true if should_wake_up() => return, // woken up
+ true => {} // spurious event
+ }
+ }
+
+ // Continue waiting, but take note of time spent waiting so we don't wait
+ // forever. We intentionally don't call `Instant::now()` before this point
+ // to avoid the cost of the `insecure_time` usercall in case there are no
+ // spurious wakeups.
+
+ let start = Instant::now();
+ let mut remaining = duration;
+ loop {
+ match wait_checked(event_mask, Some(remaining)) {
+ false => return, // timed out
+ true if should_wake_up() => return, // woken up
+ true => {} // spurious event
+ }
+ remaining = match duration.checked_sub(start.elapsed()) {
+ Some(remaining) => remaining,
+ None => break,
+ }
+ }
+}
+
/// Usercall `send`. See the ABI documentation for more information.
#[unstable(feature = "sgx_platform", issue = "56975")]
pub fn send(event_set: u64, tcs: Option<Tcs>) -> IoResult<()> {
mutex.lock()
}
- pub unsafe fn wait_timeout(&self, _mutex: &Mutex, _dur: Duration) -> bool {
- rtabort!("timeout not supported in SGX");
+ pub unsafe fn wait_timeout(&self, mutex: &Mutex, dur: Duration) -> bool {
+ let success = WaitQueue::wait_timeout(&self.inner, dur, || mutex.unlock());
+ mutex.lock();
+ success
}
#[inline]
abort_internal();
}
-pub fn hashmap_random_keys() -> (u64, u64) {
- fn rdrand64() -> u64 {
+pub mod rand {
+ pub fn rdrand64() -> u64 {
unsafe {
let mut ret: u64 = 0;
for _ in 0..10 {
rtabort!("Failed to obtain random data");
}
}
- (rdrand64(), rdrand64())
+}
+
+pub fn hashmap_random_keys() -> (u64, u64) {
+ (self::rand::rdrand64(), self::rand::rdrand64())
}
pub use crate::sys_common::{AsInner, FromInner, IntoInner};
// FIXME: could store this pointer in TLS somewhere
}
- pub fn sleep(_dur: Duration) {
- rtabort!("can't sleep"); // FIXME
+ pub fn sleep(dur: Duration) {
+ usercalls::wait_timeout(0, dur, || true);
}
pub fn join(self) {
+//! A simple queue implementation for synchronization primitives.
+//!
+//! This queue is used to implement condition variable and mutexes.
+//!
+//! Users of this API are expected to use the `WaitVariable<T>` type. Since
+//! that type is not `Sync`, it needs to be protected by e.g., a `SpinMutex` to
+//! allow shared access.
+//!
+//! Since userspace may send spurious wake-ups, the wakeup event state is
+//! recorded in the enclave. The wakeup event state is protected by a spinlock.
+//! The queue and associated wait state are stored in a `WaitVariable`.
use crate::num::NonZeroUsize;
-/// A simple queue implementation for synchronization primitives.
-///
-/// This queue is used to implement condition variable and mutexes.
-///
-/// Users of this API are expected to use the `WaitVariable<T>` type. Since
-/// that type is not `Sync`, it needs to be protected by e.g., a `SpinMutex` to
-/// allow shared access.
-///
-/// Since userspace may send spurious wake-ups, the wakeup event state is
-/// recorded in the enclave. The wakeup event state is protected by a spinlock.
-/// The queue and associated wait state are stored in a `WaitVariable`.
use crate::ops::{Deref, DerefMut};
+use crate::time::Duration;
use super::abi::thread;
use super::abi::usercalls;
}
}
+ /// Adds the calling thread to the `WaitVariable`'s wait queue, then wait
+ /// until a wakeup event or timeout. If event was observed, returns true.
+ /// If not, it will remove the calling thread from the wait queue.
+ pub fn wait_timeout<T, F: FnOnce()>(
+ lock: &SpinMutex<WaitVariable<T>>,
+ timeout: Duration,
+ before_wait: F,
+ ) -> bool {
+ // very unsafe: check requirements of UnsafeList::push
+ unsafe {
+ let mut entry = UnsafeListEntry::new(SpinMutex::new(WaitEntry {
+ tcs: thread::current(),
+ wake: false,
+ }));
+ let entry_lock = lock.lock().queue.inner.push(&mut entry);
+ before_wait();
+ usercalls::wait_timeout(EV_UNPARK, timeout, || entry_lock.lock().wake);
+ // acquire the wait queue's lock first to avoid deadlock.
+ let mut guard = lock.lock();
+ let success = entry_lock.lock().wake;
+ if !success {
+ // nobody is waking us up, so remove our entry from the wait queue.
+ guard.queue.inner.remove(&mut entry);
+ }
+ success
+ }
+ }
+
/// Either find the next waiter on the wait queue, or return the mutex
/// guard unchanged.
///
Some((*first.as_ptr()).value.as_ref().unwrap())
}
}
+
+ /// Removes an entry from the list.
+ ///
+ /// # Safety
+ ///
+ /// The caller must ensure that `entry` has been pushed onto `self`
+ /// prior to this call and has not moved since then.
+ pub unsafe fn remove(&mut self, entry: &mut UnsafeListEntry<T>) {
+ rtassert!(!self.is_empty());
+ // BEFORE:
+ // /----\ next ---> /-----\ next ---> /----\
+ // ... |prev| |entry| |next| ...
+ // \----/ <--- prev \-----/ <--- prev \----/
+ //
+ // AFTER:
+ // /----\ next ---> /----\
+ // ... |prev| |next| ...
+ // \----/ <--- prev \----/
+ let mut prev = entry.prev;
+ let mut next = entry.next;
+ prev.as_mut().next = next;
+ next.as_mut().prev = prev;
+ entry.next = NonNull::dangling();
+ entry.prev = NonNull::dangling();
+ }
}
#[cfg(test)]
}
}
+ #[test]
+ fn push_remove() {
+ unsafe {
+ let mut node = UnsafeListEntry::new(1234);
+ let mut list = UnsafeList::new();
+ assert_eq!(list.push(&mut node), &1234);
+ list.remove(&mut node);
+ assert_empty(&mut list);
+ }
+ }
+
+ #[test]
+ fn push_remove_pop() {
+ unsafe {
+ let mut node1 = UnsafeListEntry::new(11);
+ let mut node2 = UnsafeListEntry::new(12);
+ let mut node3 = UnsafeListEntry::new(13);
+ let mut node4 = UnsafeListEntry::new(14);
+ let mut node5 = UnsafeListEntry::new(15);
+ let mut list = UnsafeList::new();
+ assert_eq!(list.push(&mut node1), &11);
+ assert_eq!(list.push(&mut node2), &12);
+ assert_eq!(list.push(&mut node3), &13);
+ assert_eq!(list.push(&mut node4), &14);
+ assert_eq!(list.push(&mut node5), &15);
+
+ list.remove(&mut node1);
+ assert_eq!(list.pop().unwrap(), &12);
+ list.remove(&mut node3);
+ assert_eq!(list.pop().unwrap(), &14);
+ list.remove(&mut node5);
+ assert_empty(&mut list);
+
+ assert_eq!(list.push(&mut node1), &11);
+ assert_eq!(list.pop().unwrap(), &11);
+ assert_empty(&mut list);
+
+ assert_eq!(list.push(&mut node3), &13);
+ assert_eq!(list.push(&mut node4), &14);
+ list.remove(&mut node3);
+ list.remove(&mut node4);
+ assert_empty(&mut list);
+ }
+ }
+
#[test]
fn complex_pushes_pops() {
unsafe {
use super::*;
use crate::sync::Arc;
use crate::thread;
- use crate::time::{Duration, SystemTime};
+ use crate::time::Duration;
#[test]
fn sleep() {
*mutex2.lock() = 1;
});
- // "sleep" for 50ms
- // FIXME: https://github.com/fortanix/rust-sgx/issues/31
- let start = SystemTime::now();
- let max = Duration::from_millis(50);
- while start.elapsed().unwrap() < max {}
+ thread::sleep(Duration::from_millis(50));
assert_eq!(*guard, 0);
drop(guard);
#[cfg(target_arch = "aarch64")]
extern "C" {
fn objc_msgSend(obj: NsId, sel: Sel) -> NsId;
- #[cfg_attr(not(bootstrap), allow(clashing_extern_declarations))]
+ #[allow(clashing_extern_declarations)]
#[link_name = "objc_msgSend"]
fn objc_msgSend_ul(obj: NsId, sel: Sel, i: libc::c_ulong) -> NsId;
}
#[cfg(not(target_arch = "aarch64"))]
extern "C" {
fn objc_msgSend(obj: NsId, sel: Sel, ...) -> NsId;
- #[cfg_attr(not(bootstrap), allow(clashing_extern_declarations))]
+ #[allow(clashing_extern_declarations)]
#[link_name = "objc_msgSend"]
fn objc_msgSend_ul(obj: NsId, sel: Sel, ...) -> NsId;
}
}
}
+/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it without copying.
impl From<c_int> for ExitStatus {
fn from(a: c_int) -> ExitStatus {
ExitStatus(a as i64)
Ok(0) => return Ok((p, ours)),
Ok(8) => {
let (errno, footer) = bytes.split_at(4);
- assert!(
- combine(CLOEXEC_MSG_FOOTER) == combine(footer.try_into().unwrap()),
+ assert_eq!(
+ CLOEXEC_MSG_FOOTER, footer,
"Validation on the CLOEXEC pipe failed: {:?}",
bytes
);
- let errno = combine(errno.try_into().unwrap());
+ let errno = i32::from_be_bytes(errno.try_into().unwrap());
assert!(p.wait().is_ok(), "wait() should either return Ok or panic");
return Err(Error::from_raw_os_error(errno));
}
}
}
}
-
- fn combine(arr: [u8; 4]) -> i32 {
- i32::from_be_bytes(arr)
- }
}
pub fn exec(&mut self, default: Stdio) -> io::Error {
}
}
+/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it without copying.
impl From<c_int> for ExitStatus {
fn from(a: c_int) -> ExitStatus {
ExitStatus(a)
--- /dev/null
+use crate::alloc::{GlobalAlloc, Layout, System};
+
+#[stable(feature = "alloc_system_type", since = "1.28.0")]
+unsafe impl GlobalAlloc for System {
+ #[inline]
+ unsafe fn alloc(&self, _layout: Layout) -> *mut u8 {
+ 0 as *mut u8
+ }
+
+ #[inline]
+ unsafe fn alloc_zeroed(&self, _layout: Layout) -> *mut u8 {
+ 0 as *mut u8
+ }
+
+ #[inline]
+ unsafe fn dealloc(&self, _ptr: *mut u8, _layout: Layout) {}
+
+ #[inline]
+ unsafe fn realloc(&self, _ptr: *mut u8, _layout: Layout, _new_size: usize) -> *mut u8 {
+ 0 as *mut u8
+ }
+}
--- /dev/null
+use crate::ffi::OsString;
+
+pub unsafe fn init(_argc: isize, _argv: *const *const u8) {}
+pub unsafe fn cleanup() {}
+
+pub struct Args {}
+
+pub fn args() -> Args {
+ Args {}
+}
+
+impl Args {
+ pub fn inner_debug(&self) -> &[OsString] {
+ &[]
+ }
+}
+
+impl Iterator for Args {
+ type Item = OsString;
+ fn next(&mut self) -> Option<OsString> {
+ None
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (0, Some(0))
+ }
+}
+
+impl ExactSizeIterator for Args {
+ fn len(&self) -> usize {
+ 0
+ }
+}
+
+impl DoubleEndedIterator for Args {
+ fn next_back(&mut self) -> Option<OsString> {
+ None
+ }
+}
--- /dev/null
+// These symbols are all defined in `compiler-builtins`
+extern "C" {
+ pub fn acos(n: f64) -> f64;
+ pub fn acosf(n: f32) -> f32;
+ pub fn asin(n: f64) -> f64;
+ pub fn asinf(n: f32) -> f32;
+ pub fn atan(n: f64) -> f64;
+ pub fn atan2(a: f64, b: f64) -> f64;
+ pub fn atan2f(a: f32, b: f32) -> f32;
+ pub fn atanf(n: f32) -> f32;
+ pub fn cbrt(n: f64) -> f64;
+ pub fn cbrtf(n: f32) -> f32;
+ pub fn cosh(n: f64) -> f64;
+ pub fn coshf(n: f32) -> f32;
+ pub fn expm1(n: f64) -> f64;
+ pub fn expm1f(n: f32) -> f32;
+ pub fn fdim(a: f64, b: f64) -> f64;
+ pub fn fdimf(a: f32, b: f32) -> f32;
+ pub fn hypot(x: f64, y: f64) -> f64;
+ pub fn hypotf(x: f32, y: f32) -> f32;
+ pub fn log1p(n: f64) -> f64;
+ pub fn log1pf(n: f32) -> f32;
+ pub fn sinh(n: f64) -> f64;
+ pub fn sinhf(n: f32) -> f32;
+ pub fn tan(n: f64) -> f64;
+ pub fn tanf(n: f32) -> f32;
+ pub fn tanh(n: f64) -> f64;
+ pub fn tanhf(n: f32) -> f32;
+}
--- /dev/null
+use crate::io as std_io;
+
+pub mod memchr {
+ pub use core::slice::memchr::{memchr, memrchr};
+}
+
+pub use crate::sys_common::os_str_bytes as os_str;
+
+// This is not necessarily correct. May want to consider making it part of the
+// spec definition?
+use crate::os::raw::c_char;
+
+#[cfg(not(test))]
+pub fn init() {}
+
+pub fn unsupported<T>() -> std_io::Result<T> {
+ Err(unsupported_err())
+}
+
+pub fn unsupported_err() -> std_io::Error {
+ std_io::Error::new(std_io::ErrorKind::Other, "operation not supported on this platform")
+}
+
+pub fn decode_error_kind(_code: i32) -> crate::io::ErrorKind {
+ crate::io::ErrorKind::Other
+}
+
+pub fn abort_internal() -> ! {
+ core::intrinsics::abort();
+}
+
+pub fn hashmap_random_keys() -> (u64, u64) {
+ (1, 2)
+}
+
+// This enum is used as the storage for a bunch of types which can't actually
+// exist.
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub enum Void {}
+
+pub unsafe fn strlen(mut s: *const c_char) -> usize {
+ let mut n = 0;
+ while *s != 0 {
+ n += 1;
+ s = s.offset(1);
+ }
+ return n;
+}
--- /dev/null
+use crate::sys::mutex::Mutex;
+use crate::time::Duration;
+
+pub struct Condvar {}
+
+impl Condvar {
+ pub const fn new() -> Condvar {
+ Condvar {}
+ }
+
+ #[inline]
+ pub unsafe fn init(&mut self) {}
+
+ #[inline]
+ pub unsafe fn notify_one(&self) {}
+
+ #[inline]
+ pub unsafe fn notify_all(&self) {}
+
+ pub unsafe fn wait(&self, _mutex: &Mutex) {
+ panic!("condvar wait not supported")
+ }
+
+ pub unsafe fn wait_timeout(&self, _mutex: &Mutex, _dur: Duration) -> bool {
+ panic!("condvar wait not supported");
+ }
+
+ #[inline]
+ pub unsafe fn destroy(&self) {}
+}
--- /dev/null
+pub mod os {
+ pub const FAMILY: &str = "";
+ pub const OS: &str = "";
+ pub const DLL_PREFIX: &str = "";
+ pub const DLL_SUFFIX: &str = "";
+ pub const DLL_EXTENSION: &str = "";
+ pub const EXE_SUFFIX: &str = "";
+ pub const EXE_EXTENSION: &str = "";
+}
--- /dev/null
+use crate::ffi::OsString;
+use crate::fmt;
+use crate::hash::{Hash, Hasher};
+use crate::io::{self, IoSlice, IoSliceMut, SeekFrom};
+use crate::path::{Path, PathBuf};
+use crate::sys::time::SystemTime;
+use crate::sys::{unsupported, Void};
+
+pub struct File(Void);
+
+pub struct FileAttr(Void);
+
+pub struct ReadDir(Void);
+
+pub struct DirEntry(Void);
+
+#[derive(Clone, Debug)]
+pub struct OpenOptions {}
+
+pub struct FilePermissions(Void);
+
+pub struct FileType(Void);
+
+#[derive(Debug)]
+pub struct DirBuilder {}
+
+impl FileAttr {
+ pub fn size(&self) -> u64 {
+ match self.0 {}
+ }
+
+ pub fn perm(&self) -> FilePermissions {
+ match self.0 {}
+ }
+
+ pub fn file_type(&self) -> FileType {
+ match self.0 {}
+ }
+
+ pub fn modified(&self) -> io::Result<SystemTime> {
+ match self.0 {}
+ }
+
+ pub fn accessed(&self) -> io::Result<SystemTime> {
+ match self.0 {}
+ }
+
+ pub fn created(&self) -> io::Result<SystemTime> {
+ match self.0 {}
+ }
+}
+
+impl Clone for FileAttr {
+ fn clone(&self) -> FileAttr {
+ match self.0 {}
+ }
+}
+
+impl FilePermissions {
+ pub fn readonly(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn set_readonly(&mut self, _readonly: bool) {
+ match self.0 {}
+ }
+}
+
+impl Clone for FilePermissions {
+ fn clone(&self) -> FilePermissions {
+ match self.0 {}
+ }
+}
+
+impl PartialEq for FilePermissions {
+ fn eq(&self, _other: &FilePermissions) -> bool {
+ match self.0 {}
+ }
+}
+
+impl Eq for FilePermissions {}
+
+impl fmt::Debug for FilePermissions {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+impl FileType {
+ pub fn is_dir(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn is_file(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn is_symlink(&self) -> bool {
+ match self.0 {}
+ }
+}
+
+impl Clone for FileType {
+ fn clone(&self) -> FileType {
+ match self.0 {}
+ }
+}
+
+impl Copy for FileType {}
+
+impl PartialEq for FileType {
+ fn eq(&self, _other: &FileType) -> bool {
+ match self.0 {}
+ }
+}
+
+impl Eq for FileType {}
+
+impl Hash for FileType {
+ fn hash<H: Hasher>(&self, _h: &mut H) {
+ match self.0 {}
+ }
+}
+
+impl fmt::Debug for FileType {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+impl fmt::Debug for ReadDir {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+impl Iterator for ReadDir {
+ type Item = io::Result<DirEntry>;
+
+ fn next(&mut self) -> Option<io::Result<DirEntry>> {
+ match self.0 {}
+ }
+}
+
+impl DirEntry {
+ pub fn path(&self) -> PathBuf {
+ match self.0 {}
+ }
+
+ pub fn file_name(&self) -> OsString {
+ match self.0 {}
+ }
+
+ pub fn metadata(&self) -> io::Result<FileAttr> {
+ match self.0 {}
+ }
+
+ pub fn file_type(&self) -> io::Result<FileType> {
+ match self.0 {}
+ }
+}
+
+impl OpenOptions {
+ pub fn new() -> OpenOptions {
+ OpenOptions {}
+ }
+
+ pub fn read(&mut self, _read: bool) {}
+ pub fn write(&mut self, _write: bool) {}
+ pub fn append(&mut self, _append: bool) {}
+ pub fn truncate(&mut self, _truncate: bool) {}
+ pub fn create(&mut self, _create: bool) {}
+ pub fn create_new(&mut self, _create_new: bool) {}
+}
+
+impl File {
+ pub fn open(_path: &Path, _opts: &OpenOptions) -> io::Result<File> {
+ unsupported()
+ }
+
+ pub fn file_attr(&self) -> io::Result<FileAttr> {
+ match self.0 {}
+ }
+
+ pub fn fsync(&self) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn datasync(&self) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn truncate(&self, _size: u64) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn read(&self, _buf: &mut [u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn read_vectored(&self, _bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn is_read_vectored(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn write(&self, _buf: &[u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn write_vectored(&self, _bufs: &[IoSlice<'_>]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn is_write_vectored(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn flush(&self) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn seek(&self, _pos: SeekFrom) -> io::Result<u64> {
+ match self.0 {}
+ }
+
+ pub fn duplicate(&self) -> io::Result<File> {
+ match self.0 {}
+ }
+
+ pub fn set_permissions(&self, _perm: FilePermissions) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn diverge(&self) -> ! {
+ match self.0 {}
+ }
+}
+
+impl DirBuilder {
+ pub fn new() -> DirBuilder {
+ DirBuilder {}
+ }
+
+ pub fn mkdir(&self, _p: &Path) -> io::Result<()> {
+ unsupported()
+ }
+}
+
+impl fmt::Debug for File {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+pub fn readdir(_p: &Path) -> io::Result<ReadDir> {
+ unsupported()
+}
+
+pub fn unlink(_p: &Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub fn rename(_old: &Path, _new: &Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub fn set_perm(_p: &Path, perm: FilePermissions) -> io::Result<()> {
+ match perm.0 {}
+}
+
+pub fn rmdir(_p: &Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub fn remove_dir_all(_path: &Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub fn readlink(_p: &Path) -> io::Result<PathBuf> {
+ unsupported()
+}
+
+pub fn symlink(_src: &Path, _dst: &Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub fn link(_src: &Path, _dst: &Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub fn stat(_p: &Path) -> io::Result<FileAttr> {
+ unsupported()
+}
+
+pub fn lstat(_p: &Path) -> io::Result<FileAttr> {
+ unsupported()
+}
+
+pub fn canonicalize(_p: &Path) -> io::Result<PathBuf> {
+ unsupported()
+}
+
+pub fn copy(_from: &Path, _to: &Path) -> io::Result<u64> {
+ unsupported()
+}
--- /dev/null
+use crate::mem;
+
+#[derive(Copy, Clone)]
+pub struct IoSlice<'a>(&'a [u8]);
+
+impl<'a> IoSlice<'a> {
+ #[inline]
+ pub fn new(buf: &'a [u8]) -> IoSlice<'a> {
+ IoSlice(buf)
+ }
+
+ #[inline]
+ pub fn advance(&mut self, n: usize) {
+ self.0 = &self.0[n..]
+ }
+
+ #[inline]
+ pub fn as_slice(&self) -> &[u8] {
+ self.0
+ }
+}
+
+pub struct IoSliceMut<'a>(&'a mut [u8]);
+
+impl<'a> IoSliceMut<'a> {
+ #[inline]
+ pub fn new(buf: &'a mut [u8]) -> IoSliceMut<'a> {
+ IoSliceMut(buf)
+ }
+
+ #[inline]
+ pub fn advance(&mut self, n: usize) {
+ let slice = mem::replace(&mut self.0, &mut []);
+ let (_, remaining) = slice.split_at_mut(n);
+ self.0 = remaining;
+ }
+
+ #[inline]
+ pub fn as_slice(&self) -> &[u8] {
+ self.0
+ }
+
+ #[inline]
+ pub fn as_mut_slice(&mut self) -> &mut [u8] {
+ self.0
+ }
+}
--- /dev/null
+pub mod alloc;
+pub mod args;
+pub mod cmath;
+pub mod condvar;
+pub mod env;
+pub mod fs;
+pub mod io;
+pub mod mutex;
+pub mod net;
+pub mod os;
+pub mod path;
+pub mod pipe;
+pub mod process;
+pub mod rwlock;
+pub mod stack_overflow;
+pub mod stdio;
+pub mod thread;
+#[cfg(target_thread_local)]
+pub mod thread_local_dtor;
+pub mod thread_local_key;
+pub mod time;
+
+mod common;
+pub use common::*;
--- /dev/null
+use crate::cell::UnsafeCell;
+
+pub struct Mutex {
+ locked: UnsafeCell<bool>,
+}
+
+unsafe impl Send for Mutex {}
+unsafe impl Sync for Mutex {} // no threads on this platform
+
+impl Mutex {
+ #[rustc_const_stable(feature = "const_sys_mutex_new", since = "1.0.0")]
+ pub const fn new() -> Mutex {
+ Mutex { locked: UnsafeCell::new(false) }
+ }
+
+ #[inline]
+ pub unsafe fn init(&mut self) {}
+
+ #[inline]
+ pub unsafe fn lock(&self) {
+ let locked = self.locked.get();
+ assert!(!*locked, "cannot recursively acquire mutex");
+ *locked = true;
+ }
+
+ #[inline]
+ pub unsafe fn unlock(&self) {
+ *self.locked.get() = false;
+ }
+
+ #[inline]
+ pub unsafe fn try_lock(&self) -> bool {
+ let locked = self.locked.get();
+ if *locked {
+ false
+ } else {
+ *locked = true;
+ true
+ }
+ }
+
+ #[inline]
+ pub unsafe fn destroy(&self) {}
+}
+
+// All empty stubs because this platform does not yet support threads, so lock
+// acquisition always succeeds.
+pub struct ReentrantMutex {}
+
+impl ReentrantMutex {
+ pub const unsafe fn uninitialized() -> ReentrantMutex {
+ ReentrantMutex {}
+ }
+
+ pub unsafe fn init(&self) {}
+
+ pub unsafe fn lock(&self) {}
+
+ #[inline]
+ pub unsafe fn try_lock(&self) -> bool {
+ true
+ }
+
+ pub unsafe fn unlock(&self) {}
+
+ pub unsafe fn destroy(&self) {}
+}
--- /dev/null
+use crate::convert::TryFrom;
+use crate::fmt;
+use crate::io::{self, IoSlice, IoSliceMut};
+use crate::net::{Ipv4Addr, Ipv6Addr, Shutdown, SocketAddr};
+use crate::sys::{unsupported, Void};
+use crate::time::Duration;
+
+pub struct TcpStream(Void);
+
+impl TcpStream {
+ pub fn connect(_: io::Result<&SocketAddr>) -> io::Result<TcpStream> {
+ unsupported()
+ }
+
+ pub fn connect_timeout(_: &SocketAddr, _: Duration) -> io::Result<TcpStream> {
+ unsupported()
+ }
+
+ pub fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
+ match self.0 {}
+ }
+
+ pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
+ match self.0 {}
+ }
+
+ pub fn peek(&self, _: &mut [u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn read(&self, _: &mut [u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn read_vectored(&self, _: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn is_read_vectored(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn write(&self, _: &[u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn write_vectored(&self, _: &[IoSlice<'_>]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn is_write_vectored(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn peer_addr(&self) -> io::Result<SocketAddr> {
+ match self.0 {}
+ }
+
+ pub fn socket_addr(&self) -> io::Result<SocketAddr> {
+ match self.0 {}
+ }
+
+ pub fn shutdown(&self, _: Shutdown) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn duplicate(&self) -> io::Result<TcpStream> {
+ match self.0 {}
+ }
+
+ pub fn set_nodelay(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn nodelay(&self) -> io::Result<bool> {
+ match self.0 {}
+ }
+
+ pub fn set_ttl(&self, _: u32) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn ttl(&self) -> io::Result<u32> {
+ match self.0 {}
+ }
+
+ pub fn take_error(&self) -> io::Result<Option<io::Error>> {
+ match self.0 {}
+ }
+
+ pub fn set_nonblocking(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+}
+
+impl fmt::Debug for TcpStream {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+pub struct TcpListener(Void);
+
+impl TcpListener {
+ pub fn bind(_: io::Result<&SocketAddr>) -> io::Result<TcpListener> {
+ unsupported()
+ }
+
+ pub fn socket_addr(&self) -> io::Result<SocketAddr> {
+ match self.0 {}
+ }
+
+ pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> {
+ match self.0 {}
+ }
+
+ pub fn duplicate(&self) -> io::Result<TcpListener> {
+ match self.0 {}
+ }
+
+ pub fn set_ttl(&self, _: u32) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn ttl(&self) -> io::Result<u32> {
+ match self.0 {}
+ }
+
+ pub fn set_only_v6(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn only_v6(&self) -> io::Result<bool> {
+ match self.0 {}
+ }
+
+ pub fn take_error(&self) -> io::Result<Option<io::Error>> {
+ match self.0 {}
+ }
+
+ pub fn set_nonblocking(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+}
+
+impl fmt::Debug for TcpListener {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+pub struct UdpSocket(Void);
+
+impl UdpSocket {
+ pub fn bind(_: io::Result<&SocketAddr>) -> io::Result<UdpSocket> {
+ unsupported()
+ }
+
+ pub fn peer_addr(&self) -> io::Result<SocketAddr> {
+ match self.0 {}
+ }
+
+ pub fn socket_addr(&self) -> io::Result<SocketAddr> {
+ match self.0 {}
+ }
+
+ pub fn recv_from(&self, _: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
+ match self.0 {}
+ }
+
+ pub fn peek_from(&self, _: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
+ match self.0 {}
+ }
+
+ pub fn send_to(&self, _: &[u8], _: &SocketAddr) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn duplicate(&self) -> io::Result<UdpSocket> {
+ match self.0 {}
+ }
+
+ pub fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
+ match self.0 {}
+ }
+
+ pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
+ match self.0 {}
+ }
+
+ pub fn set_broadcast(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn broadcast(&self) -> io::Result<bool> {
+ match self.0 {}
+ }
+
+ pub fn set_multicast_loop_v4(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn multicast_loop_v4(&self) -> io::Result<bool> {
+ match self.0 {}
+ }
+
+ pub fn set_multicast_ttl_v4(&self, _: u32) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn multicast_ttl_v4(&self) -> io::Result<u32> {
+ match self.0 {}
+ }
+
+ pub fn set_multicast_loop_v6(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn multicast_loop_v6(&self) -> io::Result<bool> {
+ match self.0 {}
+ }
+
+ pub fn join_multicast_v4(&self, _: &Ipv4Addr, _: &Ipv4Addr) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn join_multicast_v6(&self, _: &Ipv6Addr, _: u32) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn leave_multicast_v4(&self, _: &Ipv4Addr, _: &Ipv4Addr) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn leave_multicast_v6(&self, _: &Ipv6Addr, _: u32) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn set_ttl(&self, _: u32) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn ttl(&self) -> io::Result<u32> {
+ match self.0 {}
+ }
+
+ pub fn take_error(&self) -> io::Result<Option<io::Error>> {
+ match self.0 {}
+ }
+
+ pub fn set_nonblocking(&self, _: bool) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn recv(&self, _: &mut [u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn peek(&self, _: &mut [u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn send(&self, _: &[u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn connect(&self, _: io::Result<&SocketAddr>) -> io::Result<()> {
+ match self.0 {}
+ }
+}
+
+impl fmt::Debug for UdpSocket {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+pub struct LookupHost(Void);
+
+impl LookupHost {
+ pub fn port(&self) -> u16 {
+ match self.0 {}
+ }
+}
+
+impl Iterator for LookupHost {
+ type Item = SocketAddr;
+ fn next(&mut self) -> Option<SocketAddr> {
+ match self.0 {}
+ }
+}
+
+impl TryFrom<&str> for LookupHost {
+ type Error = io::Error;
+
+ fn try_from(_v: &str) -> io::Result<LookupHost> {
+ unsupported()
+ }
+}
+
+impl<'a> TryFrom<(&'a str, u16)> for LookupHost {
+ type Error = io::Error;
+
+ fn try_from(_v: (&'a str, u16)) -> io::Result<LookupHost> {
+ unsupported()
+ }
+}
+
+#[allow(nonstandard_style)]
+pub mod netc {
+ pub const AF_INET: u8 = 0;
+ pub const AF_INET6: u8 = 1;
+ pub type sa_family_t = u8;
+
+ #[derive(Copy, Clone)]
+ pub struct in_addr {
+ pub s_addr: u32,
+ }
+
+ #[derive(Copy, Clone)]
+ pub struct sockaddr_in {
+ pub sin_family: sa_family_t,
+ pub sin_port: u16,
+ pub sin_addr: in_addr,
+ }
+
+ #[derive(Copy, Clone)]
+ pub struct in6_addr {
+ pub s6_addr: [u8; 16],
+ }
+
+ #[derive(Copy, Clone)]
+ pub struct sockaddr_in6 {
+ pub sin6_family: sa_family_t,
+ pub sin6_port: u16,
+ pub sin6_addr: in6_addr,
+ pub sin6_flowinfo: u32,
+ pub sin6_scope_id: u32,
+ }
+
+ #[derive(Copy, Clone)]
+ pub struct sockaddr {}
+
+ pub type socklen_t = usize;
+}
--- /dev/null
+use super::{unsupported, Void};
+use crate::error::Error as StdError;
+use crate::ffi::{OsStr, OsString};
+use crate::fmt;
+use crate::io;
+use crate::path::{self, PathBuf};
+
+pub fn errno() -> i32 {
+ 0
+}
+
+pub fn error_string(_errno: i32) -> String {
+ "operation successful".to_string()
+}
+
+pub fn getcwd() -> io::Result<PathBuf> {
+ unsupported()
+}
+
+pub fn chdir(_: &path::Path) -> io::Result<()> {
+ unsupported()
+}
+
+pub struct SplitPaths<'a>(&'a Void);
+
+pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> {
+ panic!("unsupported")
+}
+
+impl<'a> Iterator for SplitPaths<'a> {
+ type Item = PathBuf;
+ fn next(&mut self) -> Option<PathBuf> {
+ match *self.0 {}
+ }
+}
+
+#[derive(Debug)]
+pub struct JoinPathsError;
+
+pub fn join_paths<I, T>(_paths: I) -> Result<OsString, JoinPathsError>
+where
+ I: Iterator<Item = T>,
+ T: AsRef<OsStr>,
+{
+ Err(JoinPathsError)
+}
+
+impl fmt::Display for JoinPathsError {
+ fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ "not supported on this platform yet".fmt(f)
+ }
+}
+
+impl StdError for JoinPathsError {
+ #[allow(deprecated)]
+ fn description(&self) -> &str {
+ "not supported on this platform yet"
+ }
+}
+
+pub fn current_exe() -> io::Result<PathBuf> {
+ unsupported()
+}
+
+pub struct Env(Void);
+
+impl Iterator for Env {
+ type Item = (OsString, OsString);
+ fn next(&mut self) -> Option<(OsString, OsString)> {
+ match self.0 {}
+ }
+}
+
+pub fn env() -> Env {
+ panic!("not supported on this platform")
+}
+
+pub fn getenv(_: &OsStr) -> io::Result<Option<OsString>> {
+ Ok(None)
+}
+
+pub fn setenv(_: &OsStr, _: &OsStr) -> io::Result<()> {
+ Err(io::Error::new(io::ErrorKind::Other, "cannot set env vars on this platform"))
+}
+
+pub fn unsetenv(_: &OsStr) -> io::Result<()> {
+ Err(io::Error::new(io::ErrorKind::Other, "cannot unset env vars on this platform"))
+}
+
+pub fn temp_dir() -> PathBuf {
+ panic!("no filesystem on this platform")
+}
+
+pub fn home_dir() -> Option<PathBuf> {
+ None
+}
+
+pub fn exit(_code: i32) -> ! {
+ crate::intrinsics::abort()
+}
+
+pub fn getpid() -> u32 {
+ panic!("no pids on this platform")
+}
--- /dev/null
+use crate::ffi::OsStr;
+use crate::path::Prefix;
+
+#[inline]
+pub fn is_sep_byte(b: u8) -> bool {
+ b == b'/'
+}
+
+#[inline]
+pub fn is_verbatim_sep(b: u8) -> bool {
+ b == b'/'
+}
+
+pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
+ None
+}
+
+pub const MAIN_SEP_STR: &str = "/";
+pub const MAIN_SEP: char = '/';
--- /dev/null
+use crate::io::{self, IoSlice, IoSliceMut};
+use crate::sys::Void;
+
+pub struct AnonPipe(Void);
+
+impl AnonPipe {
+ pub fn read(&self, _buf: &mut [u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn read_vectored(&self, _bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn is_read_vectored(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn write(&self, _buf: &[u8]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn write_vectored(&self, _bufs: &[IoSlice<'_>]) -> io::Result<usize> {
+ match self.0 {}
+ }
+
+ pub fn is_write_vectored(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn diverge(&self) -> ! {
+ match self.0 {}
+ }
+}
+
+pub fn read2(p1: AnonPipe, _v1: &mut Vec<u8>, _p2: AnonPipe, _v2: &mut Vec<u8>) -> io::Result<()> {
+ match p1.0 {}
+}
--- /dev/null
+use crate::ffi::OsStr;
+use crate::fmt;
+use crate::io;
+use crate::sys::fs::File;
+use crate::sys::pipe::AnonPipe;
+use crate::sys::{unsupported, Void};
+use crate::sys_common::process::CommandEnv;
+
+pub use crate::ffi::OsString as EnvKey;
+
+////////////////////////////////////////////////////////////////////////////////
+// Command
+////////////////////////////////////////////////////////////////////////////////
+
+pub struct Command {
+ env: CommandEnv,
+}
+
+// passed back to std::process with the pipes connected to the child, if any
+// were requested
+pub struct StdioPipes {
+ pub stdin: Option<AnonPipe>,
+ pub stdout: Option<AnonPipe>,
+ pub stderr: Option<AnonPipe>,
+}
+
+pub enum Stdio {
+ Inherit,
+ Null,
+ MakePipe,
+}
+
+impl Command {
+ pub fn new(_program: &OsStr) -> Command {
+ Command { env: Default::default() }
+ }
+
+ pub fn arg(&mut self, _arg: &OsStr) {}
+
+ pub fn env_mut(&mut self) -> &mut CommandEnv {
+ &mut self.env
+ }
+
+ pub fn cwd(&mut self, _dir: &OsStr) {}
+
+ pub fn stdin(&mut self, _stdin: Stdio) {}
+
+ pub fn stdout(&mut self, _stdout: Stdio) {}
+
+ pub fn stderr(&mut self, _stderr: Stdio) {}
+
+ pub fn spawn(
+ &mut self,
+ _default: Stdio,
+ _needs_stdin: bool,
+ ) -> io::Result<(Process, StdioPipes)> {
+ unsupported()
+ }
+}
+
+impl From<AnonPipe> for Stdio {
+ fn from(pipe: AnonPipe) -> Stdio {
+ pipe.diverge()
+ }
+}
+
+impl From<File> for Stdio {
+ fn from(file: File) -> Stdio {
+ file.diverge()
+ }
+}
+
+impl fmt::Debug for Command {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ Ok(())
+ }
+}
+
+pub struct ExitStatus(Void);
+
+impl ExitStatus {
+ pub fn success(&self) -> bool {
+ match self.0 {}
+ }
+
+ pub fn code(&self) -> Option<i32> {
+ match self.0 {}
+ }
+}
+
+impl Clone for ExitStatus {
+ fn clone(&self) -> ExitStatus {
+ match self.0 {}
+ }
+}
+
+impl Copy for ExitStatus {}
+
+impl PartialEq for ExitStatus {
+ fn eq(&self, _other: &ExitStatus) -> bool {
+ match self.0 {}
+ }
+}
+
+impl Eq for ExitStatus {}
+
+impl fmt::Debug for ExitStatus {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+impl fmt::Display for ExitStatus {
+ fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
+ match self.0 {}
+ }
+}
+
+#[derive(PartialEq, Eq, Clone, Copy, Debug)]
+pub struct ExitCode(bool);
+
+impl ExitCode {
+ pub const SUCCESS: ExitCode = ExitCode(false);
+ pub const FAILURE: ExitCode = ExitCode(true);
+
+ pub fn as_i32(&self) -> i32 {
+ self.0 as i32
+ }
+}
+
+pub struct Process(Void);
+
+impl Process {
+ pub fn id(&self) -> u32 {
+ match self.0 {}
+ }
+
+ pub fn kill(&mut self) -> io::Result<()> {
+ match self.0 {}
+ }
+
+ pub fn wait(&mut self) -> io::Result<ExitStatus> {
+ match self.0 {}
+ }
+
+ pub fn try_wait(&mut self) -> io::Result<Option<ExitStatus>> {
+ match self.0 {}
+ }
+}
--- /dev/null
+use crate::cell::UnsafeCell;
+
+pub struct RWLock {
+ mode: UnsafeCell<isize>,
+}
+
+unsafe impl Send for RWLock {}
+unsafe impl Sync for RWLock {} // no threads on this platform
+
+impl RWLock {
+ pub const fn new() -> RWLock {
+ RWLock { mode: UnsafeCell::new(0) }
+ }
+
+ #[inline]
+ pub unsafe fn read(&self) {
+ let mode = self.mode.get();
+ if *mode >= 0 {
+ *mode += 1;
+ } else {
+ rtabort!("rwlock locked for writing");
+ }
+ }
+
+ #[inline]
+ pub unsafe fn try_read(&self) -> bool {
+ let mode = self.mode.get();
+ if *mode >= 0 {
+ *mode += 1;
+ true
+ } else {
+ false
+ }
+ }
+
+ #[inline]
+ pub unsafe fn write(&self) {
+ let mode = self.mode.get();
+ if *mode == 0 {
+ *mode = -1;
+ } else {
+ rtabort!("rwlock locked for reading")
+ }
+ }
+
+ #[inline]
+ pub unsafe fn try_write(&self) -> bool {
+ let mode = self.mode.get();
+ if *mode == 0 {
+ *mode = -1;
+ true
+ } else {
+ false
+ }
+ }
+
+ #[inline]
+ pub unsafe fn read_unlock(&self) {
+ *self.mode.get() -= 1;
+ }
+
+ #[inline]
+ pub unsafe fn write_unlock(&self) {
+ *self.mode.get() += 1;
+ }
+
+ #[inline]
+ pub unsafe fn destroy(&self) {}
+}
--- /dev/null
+pub unsafe fn init() {}
+
+pub unsafe fn cleanup() {}
--- /dev/null
+use crate::io;
+
+pub struct Stdin;
+pub struct Stdout;
+pub struct Stderr;
+
+impl Stdin {
+ pub fn new() -> io::Result<Stdin> {
+ Ok(Stdin)
+ }
+}
+
+impl io::Read for Stdin {
+ fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {
+ Ok(0)
+ }
+}
+
+impl Stdout {
+ pub fn new() -> io::Result<Stdout> {
+ Ok(Stdout)
+ }
+}
+
+impl io::Write for Stdout {
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ Ok(buf.len())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+impl Stderr {
+ pub fn new() -> io::Result<Stderr> {
+ Ok(Stderr)
+ }
+}
+
+impl io::Write for Stderr {
+ fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
+ Ok(buf.len())
+ }
+
+ fn flush(&mut self) -> io::Result<()> {
+ Ok(())
+ }
+}
+
+pub const STDIN_BUF_SIZE: usize = 0;
+
+pub fn is_ebadf(_err: &io::Error) -> bool {
+ true
+}
+
+pub fn panic_output() -> Option<Vec<u8>> {
+ None
+}
--- /dev/null
+use super::{unsupported, Void};
+use crate::ffi::CStr;
+use crate::io;
+use crate::time::Duration;
+
+pub struct Thread(Void);
+
+pub const DEFAULT_MIN_STACK_SIZE: usize = 4096;
+
+impl Thread {
+ // unsafe: see thread::Builder::spawn_unchecked for safety requirements
+ pub unsafe fn new(_stack: usize, _p: Box<dyn FnOnce()>) -> io::Result<Thread> {
+ unsupported()
+ }
+
+ pub fn yield_now() {
+ // do nothing
+ }
+
+ pub fn set_name(_name: &CStr) {
+ // nope
+ }
+
+ pub fn sleep(_dur: Duration) {
+ panic!("can't sleep");
+ }
+
+ pub fn join(self) {
+ match self.0 {}
+ }
+}
+
+pub mod guard {
+ pub type Guard = !;
+ pub unsafe fn current() -> Option<Guard> {
+ None
+ }
+ pub unsafe fn init() -> Option<Guard> {
+ None
+ }
+}
--- /dev/null
+#![unstable(feature = "thread_local_internals", issue = "none")]
+
+pub unsafe fn register_dtor(_t: *mut u8, _dtor: unsafe extern "C" fn(*mut u8)) {
+ // FIXME: right now there is no concept of "thread exit", but this is likely
+ // going to show up at some point in the form of an exported symbol that the
+ // wasm runtime is going to be expected to call. For now we basically just
+ // ignore the arguments, but if such a function starts to exist it will
+ // likely look like the OSX implementation in `unix/fast_thread_local.rs`
+}
--- /dev/null
+pub type Key = usize;
+
+#[inline]
+pub unsafe fn create(_dtor: Option<unsafe extern "C" fn(*mut u8)>) -> Key {
+ panic!("should not be used on this target");
+}
+
+#[inline]
+pub unsafe fn set(_key: Key, _value: *mut u8) {
+ panic!("should not be used on this target");
+}
+
+#[inline]
+pub unsafe fn get(_key: Key) -> *mut u8 {
+ panic!("should not be used on this target");
+}
+
+#[inline]
+pub unsafe fn destroy(_key: Key) {
+ panic!("should not be used on this target");
+}
+
+#[inline]
+pub fn requires_synchronized_create() -> bool {
+ panic!("should not be used on this target");
+}
--- /dev/null
+use crate::time::Duration;
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub struct Instant(Duration);
+
+#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
+pub struct SystemTime(Duration);
+
+pub const UNIX_EPOCH: SystemTime = SystemTime(Duration::from_secs(0));
+
+impl Instant {
+ pub fn now() -> Instant {
+ panic!("time not implemented on this platform")
+ }
+
+ pub const fn zero() -> Instant {
+ Instant(Duration::from_secs(0))
+ }
+
+ pub fn actually_monotonic() -> bool {
+ false
+ }
+
+ pub fn checked_sub_instant(&self, other: &Instant) -> Option<Duration> {
+ self.0.checked_sub(other.0)
+ }
+
+ pub fn checked_add_duration(&self, other: &Duration) -> Option<Instant> {
+ Some(Instant(self.0.checked_add(*other)?))
+ }
+
+ pub fn checked_sub_duration(&self, other: &Duration) -> Option<Instant> {
+ Some(Instant(self.0.checked_sub(*other)?))
+ }
+}
+
+impl SystemTime {
+ pub fn now() -> SystemTime {
+ panic!("time not implemented on this platform")
+ }
+
+ pub fn sub_time(&self, other: &SystemTime) -> Result<Duration, Duration> {
+ self.0.checked_sub(other.0).ok_or_else(|| other.0 - self.0)
+ }
+
+ pub fn checked_add_duration(&self, other: &Duration) -> Option<SystemTime> {
+ Some(SystemTime(self.0.checked_add(*other)?))
+ }
+
+ pub fn checked_sub_duration(&self, other: &Duration) -> Option<SystemTime> {
+ Some(SystemTime(self.0.checked_sub(*other)?))
+ }
+}
}
}
+/// Converts a raw `c_int` to a type-safe `ExitStatus` by wrapping it without copying.
impl From<c_int> for ExitStatus {
fn from(a: c_int) -> ExitStatus {
ExitStatus(a)
use crate::io as std_io;
use crate::mem;
-use crate::os::raw::c_char;
pub mod alloc;
pub mod args;
-#[path = "../wasm/cmath.rs"]
+#[path = "../unsupported/cmath.rs"]
pub mod cmath;
-#[path = "../wasm/condvar.rs"]
+#[path = "../unsupported/condvar.rs"]
pub mod condvar;
pub mod env;
pub mod fd;
pub mod fs;
pub mod io;
-#[path = "../wasm/memchr.rs"]
-pub mod memchr;
-#[path = "../wasm/mutex.rs"]
+#[path = "../unsupported/mutex.rs"]
pub mod mutex;
pub mod net;
pub mod os;
pub mod path;
pub mod pipe;
pub mod process;
-#[path = "../wasm/rwlock.rs"]
+#[path = "../unsupported/rwlock.rs"]
pub mod rwlock;
-#[path = "../wasm/stack_overflow.rs"]
+#[path = "../unsupported/stack_overflow.rs"]
pub mod stack_overflow;
pub mod stdio;
pub mod thread;
-#[path = "../wasm/thread_local_dtor.rs"]
+#[path = "../unsupported/thread_local_dtor.rs"]
pub mod thread_local_dtor;
-#[path = "../wasm/thread_local_key.rs"]
+#[path = "../unsupported/thread_local_key.rs"]
pub mod thread_local_key;
pub mod time;
-#[cfg(not(test))]
-pub fn init() {}
-
-pub fn unsupported<T>() -> std_io::Result<T> {
- Err(unsupported_err())
-}
-
-pub fn unsupported_err() -> std_io::Error {
- std_io::Error::new(std_io::ErrorKind::Other, "operation not supported on wasm yet")
-}
+#[path = "../unsupported/common.rs"]
+#[allow(unused)]
+mod common;
+pub use common::*;
pub fn decode_error_kind(errno: i32) -> std_io::ErrorKind {
use std_io::ErrorKind::*;
}
}
-// This enum is used as the storage for a bunch of types which can't actually
-// exist.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
-pub enum Void {}
-
-pub unsafe fn strlen(mut s: *const c_char) -> usize {
- let mut n = 0;
- while *s != 0 {
- n += 1;
- s = s.offset(1);
- }
- return n;
-}
-
pub fn abort_internal() -> ! {
unsafe { libc::abort() }
}
+++ /dev/null
-// These symbols are all defined in `compiler-builtins`
-extern "C" {
- pub fn acos(n: f64) -> f64;
- pub fn acosf(n: f32) -> f32;
- pub fn asin(n: f64) -> f64;
- pub fn asinf(n: f32) -> f32;
- pub fn atan(n: f64) -> f64;
- pub fn atan2(a: f64, b: f64) -> f64;
- pub fn atan2f(a: f32, b: f32) -> f32;
- pub fn atanf(n: f32) -> f32;
- pub fn cbrt(n: f64) -> f64;
- pub fn cbrtf(n: f32) -> f32;
- pub fn cosh(n: f64) -> f64;
- pub fn coshf(n: f32) -> f32;
- pub fn expm1(n: f64) -> f64;
- pub fn expm1f(n: f32) -> f32;
- pub fn fdim(a: f64, b: f64) -> f64;
- pub fn fdimf(a: f32, b: f32) -> f32;
- pub fn hypot(x: f64, y: f64) -> f64;
- pub fn hypotf(x: f32, y: f32) -> f32;
- pub fn log1p(n: f64) -> f64;
- pub fn log1pf(n: f32) -> f32;
- pub fn sinh(n: f64) -> f64;
- pub fn sinhf(n: f32) -> f32;
- pub fn tan(n: f64) -> f64;
- pub fn tanf(n: f32) -> f32;
- pub fn tanh(n: f64) -> f64;
- pub fn tanhf(n: f32) -> f32;
-}
+++ /dev/null
-use crate::sys::mutex::Mutex;
-use crate::time::Duration;
-
-pub struct Condvar {}
-
-impl Condvar {
- pub const fn new() -> Condvar {
- Condvar {}
- }
-
- #[inline]
- pub unsafe fn init(&mut self) {}
-
- #[inline]
- pub unsafe fn notify_one(&self) {}
-
- #[inline]
- pub unsafe fn notify_all(&self) {}
-
- pub unsafe fn wait(&self, _mutex: &Mutex) {
- panic!("can't block with web assembly")
- }
-
- pub unsafe fn wait_timeout(&self, _mutex: &Mutex, _dur: Duration) -> bool {
- panic!("can't block with web assembly");
- }
-
- #[inline]
- pub unsafe fn destroy(&self) {}
-}
+++ /dev/null
-use crate::ffi::OsString;
-use crate::fmt;
-use crate::hash::{Hash, Hasher};
-use crate::io::{self, IoSlice, IoSliceMut, SeekFrom};
-use crate::path::{Path, PathBuf};
-use crate::sys::time::SystemTime;
-use crate::sys::{unsupported, Void};
-
-pub struct File(Void);
-
-pub struct FileAttr(Void);
-
-pub struct ReadDir(Void);
-
-pub struct DirEntry(Void);
-
-#[derive(Clone, Debug)]
-pub struct OpenOptions {}
-
-pub struct FilePermissions(Void);
-
-pub struct FileType(Void);
-
-#[derive(Debug)]
-pub struct DirBuilder {}
-
-impl FileAttr {
- pub fn size(&self) -> u64 {
- match self.0 {}
- }
-
- pub fn perm(&self) -> FilePermissions {
- match self.0 {}
- }
-
- pub fn file_type(&self) -> FileType {
- match self.0 {}
- }
-
- pub fn modified(&self) -> io::Result<SystemTime> {
- match self.0 {}
- }
-
- pub fn accessed(&self) -> io::Result<SystemTime> {
- match self.0 {}
- }
-
- pub fn created(&self) -> io::Result<SystemTime> {
- match self.0 {}
- }
-}
-
-impl Clone for FileAttr {
- fn clone(&self) -> FileAttr {
- match self.0 {}
- }
-}
-
-impl FilePermissions {
- pub fn readonly(&self) -> bool {
- match self.0 {}
- }
-
- pub fn set_readonly(&mut self, _readonly: bool) {
- match self.0 {}
- }
-}
-
-impl Clone for FilePermissions {
- fn clone(&self) -> FilePermissions {
- match self.0 {}
- }
-}
-
-impl PartialEq for FilePermissions {
- fn eq(&self, _other: &FilePermissions) -> bool {
- match self.0 {}
- }
-}
-
-impl Eq for FilePermissions {}
-
-impl fmt::Debug for FilePermissions {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-impl FileType {
- pub fn is_dir(&self) -> bool {
- match self.0 {}
- }
-
- pub fn is_file(&self) -> bool {
- match self.0 {}
- }
-
- pub fn is_symlink(&self) -> bool {
- match self.0 {}
- }
-}
-
-impl Clone for FileType {
- fn clone(&self) -> FileType {
- match self.0 {}
- }
-}
-
-impl Copy for FileType {}
-
-impl PartialEq for FileType {
- fn eq(&self, _other: &FileType) -> bool {
- match self.0 {}
- }
-}
-
-impl Eq for FileType {}
-
-impl Hash for FileType {
- fn hash<H: Hasher>(&self, _h: &mut H) {
- match self.0 {}
- }
-}
-
-impl fmt::Debug for FileType {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-impl fmt::Debug for ReadDir {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-impl Iterator for ReadDir {
- type Item = io::Result<DirEntry>;
-
- fn next(&mut self) -> Option<io::Result<DirEntry>> {
- match self.0 {}
- }
-}
-
-impl DirEntry {
- pub fn path(&self) -> PathBuf {
- match self.0 {}
- }
-
- pub fn file_name(&self) -> OsString {
- match self.0 {}
- }
-
- pub fn metadata(&self) -> io::Result<FileAttr> {
- match self.0 {}
- }
-
- pub fn file_type(&self) -> io::Result<FileType> {
- match self.0 {}
- }
-}
-
-impl OpenOptions {
- pub fn new() -> OpenOptions {
- OpenOptions {}
- }
-
- pub fn read(&mut self, _read: bool) {}
- pub fn write(&mut self, _write: bool) {}
- pub fn append(&mut self, _append: bool) {}
- pub fn truncate(&mut self, _truncate: bool) {}
- pub fn create(&mut self, _create: bool) {}
- pub fn create_new(&mut self, _create_new: bool) {}
-}
-
-impl File {
- pub fn open(_path: &Path, _opts: &OpenOptions) -> io::Result<File> {
- unsupported()
- }
-
- pub fn file_attr(&self) -> io::Result<FileAttr> {
- match self.0 {}
- }
-
- pub fn fsync(&self) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn datasync(&self) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn truncate(&self, _size: u64) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn read(&self, _buf: &mut [u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn read_vectored(&self, _bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn is_read_vectored(&self) -> bool {
- match self.0 {}
- }
-
- pub fn write(&self, _buf: &[u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn write_vectored(&self, _bufs: &[IoSlice<'_>]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn is_write_vectored(&self) -> bool {
- match self.0 {}
- }
-
- pub fn flush(&self) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn seek(&self, _pos: SeekFrom) -> io::Result<u64> {
- match self.0 {}
- }
-
- pub fn duplicate(&self) -> io::Result<File> {
- match self.0 {}
- }
-
- pub fn set_permissions(&self, _perm: FilePermissions) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn diverge(&self) -> ! {
- match self.0 {}
- }
-}
-
-impl DirBuilder {
- pub fn new() -> DirBuilder {
- DirBuilder {}
- }
-
- pub fn mkdir(&self, _p: &Path) -> io::Result<()> {
- unsupported()
- }
-}
-
-impl fmt::Debug for File {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-pub fn readdir(_p: &Path) -> io::Result<ReadDir> {
- unsupported()
-}
-
-pub fn unlink(_p: &Path) -> io::Result<()> {
- unsupported()
-}
-
-pub fn rename(_old: &Path, _new: &Path) -> io::Result<()> {
- unsupported()
-}
-
-pub fn set_perm(_p: &Path, perm: FilePermissions) -> io::Result<()> {
- match perm.0 {}
-}
-
-pub fn rmdir(_p: &Path) -> io::Result<()> {
- unsupported()
-}
-
-pub fn remove_dir_all(_path: &Path) -> io::Result<()> {
- unsupported()
-}
-
-pub fn readlink(_p: &Path) -> io::Result<PathBuf> {
- unsupported()
-}
-
-pub fn symlink(_src: &Path, _dst: &Path) -> io::Result<()> {
- unsupported()
-}
-
-pub fn link(_src: &Path, _dst: &Path) -> io::Result<()> {
- unsupported()
-}
-
-pub fn stat(_p: &Path) -> io::Result<FileAttr> {
- unsupported()
-}
-
-pub fn lstat(_p: &Path) -> io::Result<FileAttr> {
- unsupported()
-}
-
-pub fn canonicalize(_p: &Path) -> io::Result<PathBuf> {
- unsupported()
-}
-
-pub fn copy(_from: &Path, _to: &Path) -> io::Result<u64> {
- unsupported()
-}
+++ /dev/null
-use crate::mem;
-
-#[derive(Copy, Clone)]
-pub struct IoSlice<'a>(&'a [u8]);
-
-impl<'a> IoSlice<'a> {
- #[inline]
- pub fn new(buf: &'a [u8]) -> IoSlice<'a> {
- IoSlice(buf)
- }
-
- #[inline]
- pub fn advance(&mut self, n: usize) {
- self.0 = &self.0[n..]
- }
-
- #[inline]
- pub fn as_slice(&self) -> &[u8] {
- self.0
- }
-}
-
-pub struct IoSliceMut<'a>(&'a mut [u8]);
-
-impl<'a> IoSliceMut<'a> {
- #[inline]
- pub fn new(buf: &'a mut [u8]) -> IoSliceMut<'a> {
- IoSliceMut(buf)
- }
-
- #[inline]
- pub fn advance(&mut self, n: usize) {
- let slice = mem::replace(&mut self.0, &mut []);
- let (_, remaining) = slice.split_at_mut(n);
- self.0 = remaining;
- }
-
- #[inline]
- pub fn as_slice(&self) -> &[u8] {
- self.0
- }
-
- #[inline]
- pub fn as_mut_slice(&mut self) -> &mut [u8] {
- self.0
- }
-}
+++ /dev/null
-pub use core::slice::memchr::{memchr, memrchr};
//! compiling for wasm. That way it's a compile time error for something that's
//! guaranteed to be a runtime error!
-use crate::os::raw::c_char;
-
pub mod alloc;
pub mod args;
+#[path = "../unsupported/cmath.rs"]
pub mod cmath;
pub mod env;
+#[path = "../unsupported/fs.rs"]
pub mod fs;
+#[path = "../unsupported/io.rs"]
pub mod io;
-pub mod memchr;
+#[path = "../unsupported/net.rs"]
pub mod net;
+#[path = "../unsupported/os.rs"]
pub mod os;
+#[path = "../unsupported/path.rs"]
pub mod path;
+#[path = "../unsupported/pipe.rs"]
pub mod pipe;
+#[path = "../unsupported/process.rs"]
pub mod process;
+#[path = "../unsupported/stack_overflow.rs"]
pub mod stack_overflow;
+#[path = "../unsupported/stdio.rs"]
pub mod stdio;
pub mod thread;
+#[path = "../unsupported/thread_local_dtor.rs"]
pub mod thread_local_dtor;
+#[path = "../unsupported/thread_local_key.rs"]
pub mod thread_local_key;
+#[path = "../unsupported/time.rs"]
pub mod time;
pub use crate::sys_common::os_str_bytes as os_str;
#[path = "rwlock_atomics.rs"]
pub mod rwlock;
} else {
+ #[path = "../unsupported/condvar.rs"]
pub mod condvar;
+ #[path = "../unsupported/mutex.rs"]
pub mod mutex;
+ #[path = "../unsupported/rwlock.rs"]
pub mod rwlock;
}
}
-#[cfg(not(test))]
-pub fn init() {}
-
-pub fn unsupported<T>() -> crate::io::Result<T> {
- Err(unsupported_err())
-}
-
-pub fn unsupported_err() -> crate::io::Error {
- crate::io::Error::new(crate::io::ErrorKind::Other, "operation not supported on wasm yet")
-}
-
-pub fn decode_error_kind(_code: i32) -> crate::io::ErrorKind {
- crate::io::ErrorKind::Other
-}
-
-// This enum is used as the storage for a bunch of types which can't actually
-// exist.
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
-pub enum Void {}
-
-pub unsafe fn strlen(mut s: *const c_char) -> usize {
- let mut n = 0;
- while *s != 0 {
- n += 1;
- s = s.offset(1);
- }
- return n;
-}
-
-pub fn abort_internal() -> ! {
- unsafe { crate::arch::wasm32::unreachable() }
-}
-
-// We don't have randomness yet, but I totally used a random number generator to
-// generate these numbers.
-//
-// More seriously though this is just for DOS protection in hash maps. It's ok
-// if we don't do that on wasm just yet.
-pub fn hashmap_random_keys() -> (u64, u64) {
- (1, 2)
-}
+#[path = "../unsupported/common.rs"]
+mod common;
+pub use common::*;
+++ /dev/null
-use crate::cell::UnsafeCell;
-
-pub struct Mutex {
- locked: UnsafeCell<bool>,
-}
-
-unsafe impl Send for Mutex {}
-unsafe impl Sync for Mutex {} // no threads on wasm
-
-impl Mutex {
- pub const fn new() -> Mutex {
- Mutex { locked: UnsafeCell::new(false) }
- }
-
- #[inline]
- pub unsafe fn init(&mut self) {}
-
- #[inline]
- pub unsafe fn lock(&self) {
- let locked = self.locked.get();
- assert!(!*locked, "cannot recursively acquire mutex");
- *locked = true;
- }
-
- #[inline]
- pub unsafe fn unlock(&self) {
- *self.locked.get() = false;
- }
-
- #[inline]
- pub unsafe fn try_lock(&self) -> bool {
- let locked = self.locked.get();
- if *locked {
- false
- } else {
- *locked = true;
- true
- }
- }
-
- #[inline]
- pub unsafe fn destroy(&self) {}
-}
-
-// All empty stubs because wasm has no threads yet, so lock acquisition always
-// succeeds.
-pub struct ReentrantMutex {}
-
-impl ReentrantMutex {
- pub const unsafe fn uninitialized() -> ReentrantMutex {
- ReentrantMutex {}
- }
-
- pub unsafe fn init(&self) {}
-
- pub unsafe fn lock(&self) {}
-
- #[inline]
- pub unsafe fn try_lock(&self) -> bool {
- true
- }
-
- pub unsafe fn unlock(&self) {}
-
- pub unsafe fn destroy(&self) {}
-}
+++ /dev/null
-use crate::convert::TryFrom;
-use crate::fmt;
-use crate::io::{self, IoSlice, IoSliceMut};
-use crate::net::{Ipv4Addr, Ipv6Addr, Shutdown, SocketAddr};
-use crate::sys::{unsupported, Void};
-use crate::time::Duration;
-
-pub struct TcpStream(Void);
-
-impl TcpStream {
- pub fn connect(_: io::Result<&SocketAddr>) -> io::Result<TcpStream> {
- unsupported()
- }
-
- pub fn connect_timeout(_: &SocketAddr, _: Duration) -> io::Result<TcpStream> {
- unsupported()
- }
-
- pub fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
- match self.0 {}
- }
-
- pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
- match self.0 {}
- }
-
- pub fn peek(&self, _: &mut [u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn read(&self, _: &mut [u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn read_vectored(&self, _: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn is_read_vectored(&self) -> bool {
- match self.0 {}
- }
-
- pub fn write(&self, _: &[u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn write_vectored(&self, _: &[IoSlice<'_>]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn is_write_vectored(&self) -> bool {
- match self.0 {}
- }
-
- pub fn peer_addr(&self) -> io::Result<SocketAddr> {
- match self.0 {}
- }
-
- pub fn socket_addr(&self) -> io::Result<SocketAddr> {
- match self.0 {}
- }
-
- pub fn shutdown(&self, _: Shutdown) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn duplicate(&self) -> io::Result<TcpStream> {
- match self.0 {}
- }
-
- pub fn set_nodelay(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn nodelay(&self) -> io::Result<bool> {
- match self.0 {}
- }
-
- pub fn set_ttl(&self, _: u32) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn ttl(&self) -> io::Result<u32> {
- match self.0 {}
- }
-
- pub fn take_error(&self) -> io::Result<Option<io::Error>> {
- match self.0 {}
- }
-
- pub fn set_nonblocking(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-}
-
-impl fmt::Debug for TcpStream {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-pub struct TcpListener(Void);
-
-impl TcpListener {
- pub fn bind(_: io::Result<&SocketAddr>) -> io::Result<TcpListener> {
- unsupported()
- }
-
- pub fn socket_addr(&self) -> io::Result<SocketAddr> {
- match self.0 {}
- }
-
- pub fn accept(&self) -> io::Result<(TcpStream, SocketAddr)> {
- match self.0 {}
- }
-
- pub fn duplicate(&self) -> io::Result<TcpListener> {
- match self.0 {}
- }
-
- pub fn set_ttl(&self, _: u32) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn ttl(&self) -> io::Result<u32> {
- match self.0 {}
- }
-
- pub fn set_only_v6(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn only_v6(&self) -> io::Result<bool> {
- match self.0 {}
- }
-
- pub fn take_error(&self) -> io::Result<Option<io::Error>> {
- match self.0 {}
- }
-
- pub fn set_nonblocking(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-}
-
-impl fmt::Debug for TcpListener {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-pub struct UdpSocket(Void);
-
-impl UdpSocket {
- pub fn bind(_: io::Result<&SocketAddr>) -> io::Result<UdpSocket> {
- unsupported()
- }
-
- pub fn peer_addr(&self) -> io::Result<SocketAddr> {
- match self.0 {}
- }
-
- pub fn socket_addr(&self) -> io::Result<SocketAddr> {
- match self.0 {}
- }
-
- pub fn recv_from(&self, _: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
- match self.0 {}
- }
-
- pub fn peek_from(&self, _: &mut [u8]) -> io::Result<(usize, SocketAddr)> {
- match self.0 {}
- }
-
- pub fn send_to(&self, _: &[u8], _: &SocketAddr) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn duplicate(&self) -> io::Result<UdpSocket> {
- match self.0 {}
- }
-
- pub fn set_read_timeout(&self, _: Option<Duration>) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn set_write_timeout(&self, _: Option<Duration>) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn read_timeout(&self) -> io::Result<Option<Duration>> {
- match self.0 {}
- }
-
- pub fn write_timeout(&self) -> io::Result<Option<Duration>> {
- match self.0 {}
- }
-
- pub fn set_broadcast(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn broadcast(&self) -> io::Result<bool> {
- match self.0 {}
- }
-
- pub fn set_multicast_loop_v4(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn multicast_loop_v4(&self) -> io::Result<bool> {
- match self.0 {}
- }
-
- pub fn set_multicast_ttl_v4(&self, _: u32) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn multicast_ttl_v4(&self) -> io::Result<u32> {
- match self.0 {}
- }
-
- pub fn set_multicast_loop_v6(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn multicast_loop_v6(&self) -> io::Result<bool> {
- match self.0 {}
- }
-
- pub fn join_multicast_v4(&self, _: &Ipv4Addr, _: &Ipv4Addr) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn join_multicast_v6(&self, _: &Ipv6Addr, _: u32) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn leave_multicast_v4(&self, _: &Ipv4Addr, _: &Ipv4Addr) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn leave_multicast_v6(&self, _: &Ipv6Addr, _: u32) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn set_ttl(&self, _: u32) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn ttl(&self) -> io::Result<u32> {
- match self.0 {}
- }
-
- pub fn take_error(&self) -> io::Result<Option<io::Error>> {
- match self.0 {}
- }
-
- pub fn set_nonblocking(&self, _: bool) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn recv(&self, _: &mut [u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn peek(&self, _: &mut [u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn send(&self, _: &[u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn connect(&self, _: io::Result<&SocketAddr>) -> io::Result<()> {
- match self.0 {}
- }
-}
-
-impl fmt::Debug for UdpSocket {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-pub struct LookupHost(Void);
-
-impl LookupHost {
- pub fn port(&self) -> u16 {
- match self.0 {}
- }
-}
-
-impl Iterator for LookupHost {
- type Item = SocketAddr;
- fn next(&mut self) -> Option<SocketAddr> {
- match self.0 {}
- }
-}
-
-impl TryFrom<&str> for LookupHost {
- type Error = io::Error;
-
- fn try_from(_v: &str) -> io::Result<LookupHost> {
- unsupported()
- }
-}
-
-impl<'a> TryFrom<(&'a str, u16)> for LookupHost {
- type Error = io::Error;
-
- fn try_from(_v: (&'a str, u16)) -> io::Result<LookupHost> {
- unsupported()
- }
-}
-
-#[allow(nonstandard_style)]
-pub mod netc {
- pub const AF_INET: u8 = 0;
- pub const AF_INET6: u8 = 1;
- pub type sa_family_t = u8;
-
- #[derive(Copy, Clone)]
- pub struct in_addr {
- pub s_addr: u32,
- }
-
- #[derive(Copy, Clone)]
- pub struct sockaddr_in {
- pub sin_family: sa_family_t,
- pub sin_port: u16,
- pub sin_addr: in_addr,
- }
-
- #[derive(Copy, Clone)]
- pub struct in6_addr {
- pub s6_addr: [u8; 16],
- }
-
- #[derive(Copy, Clone)]
- pub struct sockaddr_in6 {
- pub sin6_family: sa_family_t,
- pub sin6_port: u16,
- pub sin6_addr: in6_addr,
- pub sin6_flowinfo: u32,
- pub sin6_scope_id: u32,
- }
-
- #[derive(Copy, Clone)]
- pub struct sockaddr {}
-
- pub type socklen_t = usize;
-}
+++ /dev/null
-use crate::error::Error as StdError;
-use crate::ffi::{OsStr, OsString};
-use crate::fmt;
-use crate::io;
-use crate::path::{self, PathBuf};
-use crate::str;
-use crate::sys::{unsupported, Void};
-
-pub fn errno() -> i32 {
- 0
-}
-
-pub fn error_string(_errno: i32) -> String {
- "operation successful".to_string()
-}
-
-pub fn getcwd() -> io::Result<PathBuf> {
- unsupported()
-}
-
-pub fn chdir(_: &path::Path) -> io::Result<()> {
- unsupported()
-}
-
-pub struct SplitPaths<'a>(&'a Void);
-
-pub fn split_paths(_unparsed: &OsStr) -> SplitPaths<'_> {
- panic!("unsupported")
-}
-
-impl<'a> Iterator for SplitPaths<'a> {
- type Item = PathBuf;
- fn next(&mut self) -> Option<PathBuf> {
- match *self.0 {}
- }
-}
-
-#[derive(Debug)]
-pub struct JoinPathsError;
-
-pub fn join_paths<I, T>(_paths: I) -> Result<OsString, JoinPathsError>
-where
- I: Iterator<Item = T>,
- T: AsRef<OsStr>,
-{
- Err(JoinPathsError)
-}
-
-impl fmt::Display for JoinPathsError {
- fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
- "not supported on wasm yet".fmt(f)
- }
-}
-
-impl StdError for JoinPathsError {
- #[allow(deprecated)]
- fn description(&self) -> &str {
- "not supported on wasm yet"
- }
-}
-
-pub fn current_exe() -> io::Result<PathBuf> {
- unsupported()
-}
-
-pub struct Env(Void);
-
-impl Iterator for Env {
- type Item = (OsString, OsString);
- fn next(&mut self) -> Option<(OsString, OsString)> {
- match self.0 {}
- }
-}
-
-pub fn env() -> Env {
- panic!("not supported on web assembly")
-}
-
-pub fn getenv(_: &OsStr) -> io::Result<Option<OsString>> {
- Ok(None)
-}
-
-pub fn setenv(_: &OsStr, _: &OsStr) -> io::Result<()> {
- Err(io::Error::new(io::ErrorKind::Other, "cannot set env vars on wasm32-unknown-unknown"))
-}
-
-pub fn unsetenv(_: &OsStr) -> io::Result<()> {
- Err(io::Error::new(io::ErrorKind::Other, "cannot unset env vars on wasm32-unknown-unknown"))
-}
-
-pub fn temp_dir() -> PathBuf {
- panic!("no filesystem on wasm")
-}
-
-pub fn home_dir() -> Option<PathBuf> {
- None
-}
-
-pub fn exit(_code: i32) -> ! {
- unsafe {
- crate::arch::wasm32::unreachable();
- }
-}
-
-pub fn getpid() -> u32 {
- panic!("no pids on wasm")
-}
+++ /dev/null
-use crate::ffi::OsStr;
-use crate::path::Prefix;
-
-#[inline]
-pub fn is_sep_byte(b: u8) -> bool {
- b == b'/'
-}
-
-#[inline]
-pub fn is_verbatim_sep(b: u8) -> bool {
- b == b'/'
-}
-
-pub fn parse_prefix(_: &OsStr) -> Option<Prefix<'_>> {
- None
-}
-
-pub const MAIN_SEP_STR: &str = "/";
-pub const MAIN_SEP: char = '/';
+++ /dev/null
-use crate::io::{self, IoSlice, IoSliceMut};
-use crate::sys::Void;
-
-pub struct AnonPipe(Void);
-
-impl AnonPipe {
- pub fn read(&self, _buf: &mut [u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn read_vectored(&self, _bufs: &mut [IoSliceMut<'_>]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn is_read_vectored(&self) -> bool {
- match self.0 {}
- }
-
- pub fn write(&self, _buf: &[u8]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn write_vectored(&self, _bufs: &[IoSlice<'_>]) -> io::Result<usize> {
- match self.0 {}
- }
-
- pub fn is_write_vectored(&self) -> bool {
- match self.0 {}
- }
-
- pub fn diverge(&self) -> ! {
- match self.0 {}
- }
-}
-
-pub fn read2(p1: AnonPipe, _v1: &mut Vec<u8>, _p2: AnonPipe, _v2: &mut Vec<u8>) -> io::Result<()> {
- match p1.0 {}
-}
+++ /dev/null
-use crate::ffi::OsStr;
-use crate::fmt;
-use crate::io;
-use crate::sys::fs::File;
-use crate::sys::pipe::AnonPipe;
-use crate::sys::{unsupported, Void};
-use crate::sys_common::process::CommandEnv;
-
-pub use crate::ffi::OsString as EnvKey;
-
-////////////////////////////////////////////////////////////////////////////////
-// Command
-////////////////////////////////////////////////////////////////////////////////
-
-pub struct Command {
- env: CommandEnv,
-}
-
-// passed back to std::process with the pipes connected to the child, if any
-// were requested
-pub struct StdioPipes {
- pub stdin: Option<AnonPipe>,
- pub stdout: Option<AnonPipe>,
- pub stderr: Option<AnonPipe>,
-}
-
-pub enum Stdio {
- Inherit,
- Null,
- MakePipe,
-}
-
-impl Command {
- pub fn new(_program: &OsStr) -> Command {
- Command { env: Default::default() }
- }
-
- pub fn arg(&mut self, _arg: &OsStr) {}
-
- pub fn env_mut(&mut self) -> &mut CommandEnv {
- &mut self.env
- }
-
- pub fn cwd(&mut self, _dir: &OsStr) {}
-
- pub fn stdin(&mut self, _stdin: Stdio) {}
-
- pub fn stdout(&mut self, _stdout: Stdio) {}
-
- pub fn stderr(&mut self, _stderr: Stdio) {}
-
- pub fn spawn(
- &mut self,
- _default: Stdio,
- _needs_stdin: bool,
- ) -> io::Result<(Process, StdioPipes)> {
- unsupported()
- }
-}
-
-impl From<AnonPipe> for Stdio {
- fn from(pipe: AnonPipe) -> Stdio {
- pipe.diverge()
- }
-}
-
-impl From<File> for Stdio {
- fn from(file: File) -> Stdio {
- file.diverge()
- }
-}
-
-impl fmt::Debug for Command {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- Ok(())
- }
-}
-
-pub struct ExitStatus(Void);
-
-impl ExitStatus {
- pub fn success(&self) -> bool {
- match self.0 {}
- }
-
- pub fn code(&self) -> Option<i32> {
- match self.0 {}
- }
-}
-
-impl Clone for ExitStatus {
- fn clone(&self) -> ExitStatus {
- match self.0 {}
- }
-}
-
-impl Copy for ExitStatus {}
-
-impl PartialEq for ExitStatus {
- fn eq(&self, _other: &ExitStatus) -> bool {
- match self.0 {}
- }
-}
-
-impl Eq for ExitStatus {}
-
-impl fmt::Debug for ExitStatus {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-impl fmt::Display for ExitStatus {
- fn fmt(&self, _f: &mut fmt::Formatter<'_>) -> fmt::Result {
- match self.0 {}
- }
-}
-
-#[derive(PartialEq, Eq, Clone, Copy, Debug)]
-pub struct ExitCode(bool);
-
-impl ExitCode {
- pub const SUCCESS: ExitCode = ExitCode(false);
- pub const FAILURE: ExitCode = ExitCode(true);
-
- pub fn as_i32(&self) -> i32 {
- self.0 as i32
- }
-}
-
-pub struct Process(Void);
-
-impl Process {
- pub fn id(&self) -> u32 {
- match self.0 {}
- }
-
- pub fn kill(&mut self) -> io::Result<()> {
- match self.0 {}
- }
-
- pub fn wait(&mut self) -> io::Result<ExitStatus> {
- match self.0 {}
- }
-
- pub fn try_wait(&mut self) -> io::Result<Option<ExitStatus>> {
- match self.0 {}
- }
-}
+++ /dev/null
-use crate::cell::UnsafeCell;
-
-pub struct RWLock {
- mode: UnsafeCell<isize>,
-}
-
-unsafe impl Send for RWLock {}
-unsafe impl Sync for RWLock {} // no threads on wasm
-
-impl RWLock {
- pub const fn new() -> RWLock {
- RWLock { mode: UnsafeCell::new(0) }
- }
-
- #[inline]
- pub unsafe fn read(&self) {
- let mode = self.mode.get();
- if *mode >= 0 {
- *mode += 1;
- } else {
- rtabort!("rwlock locked for writing");
- }
- }
-
- #[inline]
- pub unsafe fn try_read(&self) -> bool {
- let mode = self.mode.get();
- if *mode >= 0 {
- *mode += 1;
- true
- } else {
- false
- }
- }
-
- #[inline]
- pub unsafe fn write(&self) {
- let mode = self.mode.get();
- if *mode == 0 {
- *mode = -1;
- } else {
- rtabort!("rwlock locked for reading")
- }
- }
-
- #[inline]
- pub unsafe fn try_write(&self) -> bool {
- let mode = self.mode.get();
- if *mode == 0 {
- *mode = -1;
- true
- } else {
- false
- }
- }
-
- #[inline]
- pub unsafe fn read_unlock(&self) {
- *self.mode.get() -= 1;
- }
-
- #[inline]
- pub unsafe fn write_unlock(&self) {
- *self.mode.get() += 1;
- }
-
- #[inline]
- pub unsafe fn destroy(&self) {}
-}
+++ /dev/null
-pub unsafe fn init() {}
-
-pub unsafe fn cleanup() {}
+++ /dev/null
-use crate::io;
-
-pub struct Stdin;
-pub struct Stdout;
-pub struct Stderr;
-
-impl Stdin {
- pub fn new() -> io::Result<Stdin> {
- Ok(Stdin)
- }
-}
-
-impl io::Read for Stdin {
- fn read(&mut self, _buf: &mut [u8]) -> io::Result<usize> {
- Ok(0)
- }
-}
-
-impl Stdout {
- pub fn new() -> io::Result<Stdout> {
- Ok(Stdout)
- }
-}
-
-impl io::Write for Stdout {
- fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
- Ok(buf.len())
- }
-
- fn flush(&mut self) -> io::Result<()> {
- Ok(())
- }
-}
-
-impl Stderr {
- pub fn new() -> io::Result<Stderr> {
- Ok(Stderr)
- }
-}
-
-impl io::Write for Stderr {
- fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
- Ok(buf.len())
- }
-
- fn flush(&mut self) -> io::Result<()> {
- Ok(())
- }
-}
-
-pub const STDIN_BUF_SIZE: usize = 0;
-
-pub fn is_ebadf(_err: &io::Error) -> bool {
- true
-}
-
-pub fn panic_output() -> Option<Vec<u8>> {
- None
-}
+++ /dev/null
-#![unstable(feature = "thread_local_internals", issue = "none")]
-
-pub unsafe fn register_dtor(_t: *mut u8, _dtor: unsafe extern "C" fn(*mut u8)) {
- // FIXME: right now there is no concept of "thread exit", but this is likely
- // going to show up at some point in the form of an exported symbol that the
- // wasm runtime is going to be expected to call. For now we basically just
- // ignore the arguments, but if such a function starts to exist it will
- // likely look like the OSX implementation in `unix/fast_thread_local.rs`
-}
+++ /dev/null
-pub type Key = usize;
-
-#[inline]
-pub unsafe fn create(_dtor: Option<unsafe extern "C" fn(*mut u8)>) -> Key {
- panic!("should not be used on the wasm target");
-}
-
-#[inline]
-pub unsafe fn set(_key: Key, _value: *mut u8) {
- panic!("should not be used on the wasm target");
-}
-
-#[inline]
-pub unsafe fn get(_key: Key) -> *mut u8 {
- panic!("should not be used on the wasm target");
-}
-
-#[inline]
-pub unsafe fn destroy(_key: Key) {
- panic!("should not be used on the wasm target");
-}
-
-#[inline]
-pub fn requires_synchronized_create() -> bool {
- panic!("should not be used on the wasm target");
-}
+++ /dev/null
-use crate::time::Duration;
-
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
-pub struct Instant(Duration);
-
-#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Debug, Hash)]
-pub struct SystemTime(Duration);
-
-pub const UNIX_EPOCH: SystemTime = SystemTime(Duration::from_secs(0));
-
-impl Instant {
- pub fn now() -> Instant {
- panic!("time not implemented on wasm32-unknown-unknown")
- }
-
- pub const fn zero() -> Instant {
- Instant(Duration::from_secs(0))
- }
-
- pub fn actually_monotonic() -> bool {
- false
- }
-
- pub fn checked_sub_instant(&self, other: &Instant) -> Option<Duration> {
- self.0.checked_sub(other.0)
- }
-
- pub fn checked_add_duration(&self, other: &Duration) -> Option<Instant> {
- Some(Instant(self.0.checked_add(*other)?))
- }
-
- pub fn checked_sub_duration(&self, other: &Duration) -> Option<Instant> {
- Some(Instant(self.0.checked_sub(*other)?))
- }
-}
-
-impl SystemTime {
- pub fn now() -> SystemTime {
- panic!("time not implemented on wasm32-unknown-unknown")
- }
-
- pub fn sub_time(&self, other: &SystemTime) -> Result<Duration, Duration> {
- self.0.checked_sub(other.0).ok_or_else(|| other.0 - self.0)
- }
-
- pub fn checked_add_duration(&self, other: &Duration) -> Option<SystemTime> {
- Some(SystemTime(self.0.checked_add(*other)?))
- }
-
- pub fn checked_sub_duration(&self, other: &Duration) -> Option<SystemTime> {
- Some(SystemTime(self.0.checked_sub(*other)?))
- }
-}
}
}
+/// Converts a raw `c::DWORD` to a type-safe `ExitStatus` by wrapping it without copying.
impl From<c::DWORD> for ExitStatus {
fn from(u: c::DWORD) -> ExitStatus {
ExitStatus(u)
+use crate::backtrace_rs::{self, BacktraceFmt, BytesOrWideString, PrintFmt};
use crate::borrow::Cow;
/// Common code for printing the backtrace in the same way across the different
/// supported platforms.
use crate::sync::atomic::{self, Ordering};
use crate::sys::mutex::Mutex;
-use backtrace_rs::{BacktraceFmt, BytesOrWideString, PrintFmt};
-
/// Max number of frames to print.
const MAX_NB_FRAMES: usize = 100;
pub mod fs;
pub mod io;
pub mod mutex;
-#[cfg(any(doc, // see `mod os`, docs are generated for multiple platforms
- unix,
- target_os = "redox",
- target_os = "cloudabi",
- target_os = "hermit",
- target_arch = "wasm32",
- all(target_vendor = "fortanix", target_env = "sgx")))]
+// `doc` is required because `sys/mod.rs` imports `unix/ext/mod.rs` on Windows
+// when generating documentation.
+#[cfg(any(doc, not(windows)))]
pub mod os_str_bytes;
pub mod poison;
pub mod process;
if #[cfg(any(target_os = "cloudabi",
target_os = "l4re",
target_os = "hermit",
+ feature = "restricted-std",
all(target_arch = "wasm32", not(target_os = "emscripten")),
all(target_vendor = "fortanix", target_env = "sgx")))] {
pub use crate::sys::net;
/// Also, until `init` is called, behavior is undefined if this
/// mutex is ever used reentrantly, i.e., `raw_lock` or `try_lock`
/// are called by the thread currently holding the lock.
+ #[rustc_const_stable(feature = "const_sys_mutex_new", since = "1.0.0")]
pub const fn new() -> Mutex {
Mutex(imp::Mutex::new())
}
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn test_park_timeout_unpark_not_called() {
for _ in 0..10 {
thread::park_timeout(Duration::from_millis(10));
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn test_park_timeout_unpark_called_other_thread() {
for _ in 0..10 {
let th = thread::current();
}
#[test]
- #[cfg_attr(target_env = "sgx", ignore)] // FIXME: https://github.com/fortanix/rust-sgx/issues/31
fn sleep_ms_smoke() {
thread::sleep(Duration::from_millis(2));
}
crate-type = ["dylib", "rlib"]
[dependencies]
+cfg-if = { version = "0.1.8", features = ['rustc-dep-of-std'] }
getopts = { version = "0.2.21", features = ['rustc-dep-of-std'] }
term = { path = "../libterm" }
std = { path = "../libstd" }
# Forward features to the `std` crate as necessary
[features]
+default = ["std_detect_file_io", "std_detect_dlsym_getauxval", "panic-unwind"]
backtrace = ["std/backtrace"]
compiler-builtins-c = ["std/compiler-builtins-c"]
llvm-libunwind = ["std/llvm-libunwind"]
panic-unwind = ["std/panic_unwind"]
panic_immediate_abort = ["std/panic_immediate_abort"]
profiler = ["std/profiler"]
+std_detect_file_io = ["std/std_detect_file_io"]
+std_detect_dlsym_getauxval = ["std/std_detect_dlsym_getauxval"]
}
Err(..) => num_cpus(),
};
+}
- #[cfg(windows)]
- #[allow(nonstandard_style)]
- fn num_cpus() -> usize {
- #[repr(C)]
- struct SYSTEM_INFO {
- wProcessorArchitecture: u16,
- wReserved: u16,
- dwPageSize: u32,
- lpMinimumApplicationAddress: *mut u8,
- lpMaximumApplicationAddress: *mut u8,
- dwActiveProcessorMask: *mut u8,
- dwNumberOfProcessors: u32,
- dwProcessorType: u32,
- dwAllocationGranularity: u32,
- wProcessorLevel: u16,
- wProcessorRevision: u16,
- }
- extern "system" {
- fn GetSystemInfo(info: *mut SYSTEM_INFO) -> i32;
- }
- unsafe {
- let mut sysinfo = std::mem::zeroed();
- GetSystemInfo(&mut sysinfo);
- sysinfo.dwNumberOfProcessors as usize
+cfg_if::cfg_if! {
+ if #[cfg(windows)] {
+ #[allow(nonstandard_style)]
+ fn num_cpus() -> usize {
+ #[repr(C)]
+ struct SYSTEM_INFO {
+ wProcessorArchitecture: u16,
+ wReserved: u16,
+ dwPageSize: u32,
+ lpMinimumApplicationAddress: *mut u8,
+ lpMaximumApplicationAddress: *mut u8,
+ dwActiveProcessorMask: *mut u8,
+ dwNumberOfProcessors: u32,
+ dwProcessorType: u32,
+ dwAllocationGranularity: u32,
+ wProcessorLevel: u16,
+ wProcessorRevision: u16,
+ }
+ extern "system" {
+ fn GetSystemInfo(info: *mut SYSTEM_INFO) -> i32;
+ }
+ unsafe {
+ let mut sysinfo = std::mem::zeroed();
+ GetSystemInfo(&mut sysinfo);
+ sysinfo.dwNumberOfProcessors as usize
+ }
}
- }
-
- #[cfg(target_os = "vxworks")]
- fn num_cpus() -> usize {
- // FIXME: Implement num_cpus on vxWorks
- 1
- }
-
- #[cfg(target_os = "redox")]
- fn num_cpus() -> usize {
- // FIXME: Implement num_cpus on Redox
- 1
- }
-
- #[cfg(target_os = "hermit")]
- fn num_cpus() -> usize {
- // FIXME: Implement num_cpus on HermitCore
- 1
- }
-
- #[cfg(any(
- all(target_arch = "wasm32", not(target_os = "emscripten")),
- all(target_vendor = "fortanix", target_env = "sgx")
- ))]
- fn num_cpus() -> usize {
- 1
- }
-
- #[cfg(any(
+ } else if #[cfg(any(
target_os = "android",
target_os = "cloudabi",
target_os = "emscripten",
target_os = "macos",
target_os = "solaris",
target_os = "illumos",
- ))]
- fn num_cpus() -> usize {
- unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as usize }
- }
-
- #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "netbsd"))]
- fn num_cpus() -> usize {
- use std::ptr;
+ ))] {
+ fn num_cpus() -> usize {
+ unsafe { libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as usize }
+ }
+ } else if #[cfg(any(target_os = "freebsd", target_os = "dragonfly", target_os = "netbsd"))] {
+ fn num_cpus() -> usize {
+ use std::ptr;
- let mut cpus: libc::c_uint = 0;
- let mut cpus_size = std::mem::size_of_val(&cpus);
+ let mut cpus: libc::c_uint = 0;
+ let mut cpus_size = std::mem::size_of_val(&cpus);
- unsafe {
- cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint;
+ unsafe {
+ cpus = libc::sysconf(libc::_SC_NPROCESSORS_ONLN) as libc::c_uint;
+ }
+ if cpus < 1 {
+ let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
+ unsafe {
+ libc::sysctl(
+ mib.as_mut_ptr(),
+ 2,
+ &mut cpus as *mut _ as *mut _,
+ &mut cpus_size as *mut _ as *mut _,
+ ptr::null_mut(),
+ 0,
+ );
+ }
+ if cpus < 1 {
+ cpus = 1;
+ }
+ }
+ cpus as usize
}
- if cpus < 1 {
+ } else if #[cfg(target_os = "openbsd")] {
+ fn num_cpus() -> usize {
+ use std::ptr;
+
+ let mut cpus: libc::c_uint = 0;
+ let mut cpus_size = std::mem::size_of_val(&cpus);
let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
+
unsafe {
libc::sysctl(
mib.as_mut_ptr(),
if cpus < 1 {
cpus = 1;
}
+ cpus as usize
}
- cpus as usize
- }
-
- #[cfg(target_os = "openbsd")]
- fn num_cpus() -> usize {
- use std::ptr;
-
- let mut cpus: libc::c_uint = 0;
- let mut cpus_size = std::mem::size_of_val(&cpus);
- let mut mib = [libc::CTL_HW, libc::HW_NCPU, 0, 0];
-
- unsafe {
- libc::sysctl(
- mib.as_mut_ptr(),
- 2,
- &mut cpus as *mut _ as *mut _,
- &mut cpus_size as *mut _ as *mut _,
- ptr::null_mut(),
- 0,
- );
- }
- if cpus < 1 {
- cpus = 1;
+ } else {
+ // FIXME: implement on vxWorks, Redox, HermitCore, Haiku, l4re
+ fn num_cpus() -> usize {
+ 1
}
- cpus as usize
- }
-
- #[cfg(target_os = "haiku")]
- fn num_cpus() -> usize {
- // FIXME: implement
- 1
- }
-
- #[cfg(target_os = "l4re")]
- fn num_cpus() -> usize {
- // FIXME: implement
- 1
}
}
//! Helper module which provides a function to test
//! if stdout is a tty.
-#[cfg(any(
- target_os = "cloudabi",
- target_os = "hermit",
- all(target_arch = "wasm32", not(target_os = "emscripten")),
- all(target_vendor = "fortanix", target_env = "sgx")
-))]
-pub fn stdout_isatty() -> bool {
- // FIXME: Implement isatty on SGX
- false
-}
-#[cfg(unix)]
-pub fn stdout_isatty() -> bool {
- unsafe { libc::isatty(libc::STDOUT_FILENO) != 0 }
-}
-#[cfg(windows)]
-pub fn stdout_isatty() -> bool {
- type DWORD = u32;
- type BOOL = i32;
- type HANDLE = *mut u8;
- type LPDWORD = *mut u32;
- const STD_OUTPUT_HANDLE: DWORD = -11i32 as DWORD;
- extern "system" {
- fn GetStdHandle(which: DWORD) -> HANDLE;
- fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL;
- }
- unsafe {
- let handle = GetStdHandle(STD_OUTPUT_HANDLE);
- let mut out = 0;
- GetConsoleMode(handle, &mut out) != 0
+cfg_if::cfg_if! {
+ if #[cfg(unix)] {
+ pub fn stdout_isatty() -> bool {
+ unsafe { libc::isatty(libc::STDOUT_FILENO) != 0 }
+ }
+ } else if #[cfg(windows)] {
+ pub fn stdout_isatty() -> bool {
+ type DWORD = u32;
+ type BOOL = i32;
+ type HANDLE = *mut u8;
+ type LPDWORD = *mut u32;
+ const STD_OUTPUT_HANDLE: DWORD = -11i32 as DWORD;
+ extern "system" {
+ fn GetStdHandle(which: DWORD) -> HANDLE;
+ fn GetConsoleMode(hConsoleHandle: HANDLE, lpMode: LPDWORD) -> BOOL;
+ }
+ unsafe {
+ let handle = GetStdHandle(STD_OUTPUT_HANDLE);
+ let mut out = 0;
+ GetConsoleMode(handle, &mut out) != 0
+ }
+ }
+ } else {
+ // FIXME: Implement isatty on SGX
+ pub fn stdout_isatty() -> bool {
+ false
+ }
}
}
cfg_if::cfg_if! {
if #[cfg(target_env = "msvc")] {
- // no extra unwinder support needed
- } else if #[cfg(all(target_arch = "wasm32", not(target_os = "emscripten")))] {
- // no unwinder on the system!
- } else {
+ // Windows MSVC no extra unwinder support needed
+ } else if #[cfg(any(
+ target_os = "l4re",
+ target_os = "none",
+ ))] {
+ // These "unix" family members do not have unwinder.
+ // Note this also matches x86_64-linux-kernel.
+ } else if #[cfg(any(
+ unix,
+ windows,
+ target_os = "cloudabi",
+ all(target_vendor = "fortanix", target_env = "sgx"),
+ ))] {
mod libunwind;
pub use libunwind::*;
+ } else {
+ // no unwinder on the system!
+ // - wasm32 (not emscripten, which is "unix" family)
+ // - os=none ("bare metal" targets)
+ // - os=hermit
+ // - os=uefi
+ // - os=cuda
+ // - nvptx64-nvidia-cuda
+ // - mipsel-sony-psp
+ // - Any new targets not listed above.
}
}
--- /dev/null
+#include "rustllvm.h"
+#include "llvm/ProfileData/Coverage/CoverageMapping.h"
+#include "llvm/ProfileData/Coverage/CoverageMappingWriter.h"
+#include "llvm/ProfileData/InstrProf.h"
+#include "llvm/ADT/ArrayRef.h"
+
+#include <iostream>
+
+using namespace llvm;
+
+extern "C" SmallVectorTemplateBase<coverage::CounterExpression>
+ *LLVMRustCoverageSmallVectorCounterExpressionCreate() {
+ return new SmallVector<coverage::CounterExpression, 32>();
+}
+
+extern "C" void LLVMRustCoverageSmallVectorCounterExpressionDispose(
+ SmallVectorTemplateBase<coverage::CounterExpression> *Vector) {
+ delete Vector;
+}
+
+extern "C" void LLVMRustCoverageSmallVectorCounterExpressionAdd(
+ SmallVectorTemplateBase<coverage::CounterExpression> *Expressions,
+ coverage::CounterExpression::ExprKind Kind,
+ unsigned LeftIndex,
+ unsigned RightIndex) {
+ auto LHS = coverage::Counter::getCounter(LeftIndex);
+ auto RHS = coverage::Counter::getCounter(RightIndex);
+ Expressions->push_back(coverage::CounterExpression { Kind, LHS, RHS });
+}
+
+extern "C" SmallVectorTemplateBase<coverage::CounterMappingRegion>
+ *LLVMRustCoverageSmallVectorCounterMappingRegionCreate() {
+ return new SmallVector<coverage::CounterMappingRegion, 32>();
+}
+
+extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionDispose(
+ SmallVectorTemplateBase<coverage::CounterMappingRegion> *Vector) {
+ delete Vector;
+}
+
+extern "C" void LLVMRustCoverageSmallVectorCounterMappingRegionAdd(
+ SmallVectorTemplateBase<coverage::CounterMappingRegion> *MappingRegions,
+ unsigned Index,
+ unsigned FileID,
+ unsigned LineStart,
+ unsigned ColumnStart,
+ unsigned LineEnd,
+ unsigned ColumnEnd) {
+ auto Counter = coverage::Counter::getCounter(Index);
+ MappingRegions->push_back(coverage::CounterMappingRegion::makeRegion(
+ Counter, FileID, LineStart,
+ ColumnStart, LineEnd, ColumnEnd));
+
+ // FIXME(richkadel): As applicable, implement additional CounterMappingRegion types using the
+ // static method alternatives to `coverage::CounterMappingRegion::makeRegion`:
+ //
+ // makeExpansion(unsigned FileID, unsigned ExpandedFileID, unsigned LineStart,
+ // unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
+ // makeSkipped(unsigned FileID, unsigned LineStart, unsigned ColumnStart,
+ // unsigned LineEnd, unsigned ColumnEnd) {
+ // makeGapRegion(Counter Count, unsigned FileID, unsigned LineStart,
+ // unsigned ColumnStart, unsigned LineEnd, unsigned ColumnEnd) {
+}
+
+extern "C" void LLVMRustCoverageWriteFilenamesSectionToBuffer(
+ const char* const Filenames[],
+ size_t FilenamesLen,
+ RustStringRef BufferOut) {
+ SmallVector<StringRef,32> FilenameRefs;
+ for (size_t i = 0; i < FilenamesLen; i++) {
+ FilenameRefs.push_back(StringRef(Filenames[i]));
+ }
+ auto FilenamesWriter = coverage::CoverageFilenamesSectionWriter(
+ makeArrayRef(FilenameRefs));
+ RawRustStringOstream OS(BufferOut);
+ FilenamesWriter.write(OS);
+}
+
+extern "C" void LLVMRustCoverageWriteMappingToBuffer(
+ const unsigned *VirtualFileMappingIDs,
+ unsigned NumVirtualFileMappingIDs,
+ const SmallVectorImpl<coverage::CounterExpression> *Expressions,
+ SmallVectorImpl<coverage::CounterMappingRegion> *MappingRegions,
+ RustStringRef BufferOut) {
+ auto CoverageMappingWriter = coverage::CoverageMappingWriter(
+ makeArrayRef(VirtualFileMappingIDs, NumVirtualFileMappingIDs),
+ makeArrayRef(*Expressions),
+ MutableArrayRef<coverage::CounterMappingRegion> { *MappingRegions });
+ RawRustStringOstream OS(BufferOut);
+ CoverageMappingWriter.write(OS);
+}
+
+extern "C" uint64_t LLVMRustCoverageComputeHash(const char *Name) {
+ StringRef NameRef(Name);
+ return IndexedInstrProf::ComputeHash(NameRef);
+}
+
+extern "C" void LLVMRustCoverageWriteSectionNameToString(LLVMModuleRef M,
+ RustStringRef Str) {
+ Triple TargetTriple(unwrap(M)->getTargetTriple());
+ auto name = getInstrProfSectionName(IPSK_covmap,
+ TargetTriple.getObjectFormat());
+ RawRustStringOstream OS(Str);
+ OS << name;
+}
+
+extern "C" void LLVMRustCoverageWriteMappingVarNameToString(RustStringRef Str) {
+ auto name = getCoverageMappingVarName();
+ RawRustStringOstream OS(Str);
+ OS << name;
+}
+
+extern "C" uint32_t LLVMRustCoverageMappingVersion() {
+ return coverage::CovMapVersion::CurrentVersion;
+}
FTy, Callee, makeArrayRef(unwrap(Args), NumArgs), Bundles));
}
-extern "C" LLVMValueRef LLVMRustGetInstrprofIncrementIntrinsic(LLVMModuleRef M) {
+extern "C" LLVMValueRef LLVMRustGetInstrProfIncrementIntrinsic(LLVMModuleRef M) {
return wrap(llvm::Intrinsic::getDeclaration(unwrap(M),
(llvm::Intrinsic::ID)llvm::Intrinsic::instrprof_increment));
}
#include "llvm-c/Object.h"
#include "llvm/ADT/ArrayRef.h"
#include "llvm/ADT/DenseSet.h"
+#include "llvm/ADT/SmallVector.h"
#include "llvm/ADT/Triple.h"
#include "llvm/Analysis/Lint.h"
#include "llvm/Analysis/Passes.h"
# source tarball for a stable release you'll likely see `1.x.0` for rustc and
# `0.(x+1).0` for Cargo where they were released on `date`.
-date: 2020-06-16
+date: 2020-07-16
rustc: beta
cargo: beta
# bootstrapping issues with use of new syntax in this repo. If you're looking at
# the beta/stable branch, this key should be omitted, as we don't want to depend
# on rustfmt from nightly there.
-rustfmt: nightly-2020-04-22
+rustfmt: nightly-2020-07-12
# When making a stable release the process currently looks like:
#
--- /dev/null
+// Code generation of atomic operations.
+//
+// compile-flags: -O
+#![crate_type = "lib"]
+
+use std::sync::atomic::{AtomicI32, Ordering::*};
+
+// CHECK-LABEL: @compare_exchange
+#[no_mangle]
+pub fn compare_exchange(a: &AtomicI32) {
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 10 monotonic monotonic
+ let _ = a.compare_exchange(0, 10, Relaxed, Relaxed);
+
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 20 release monotonic
+ let _ = a.compare_exchange(0, 20, Release, Relaxed);
+
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 30 acquire monotonic
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 31 acquire acquire
+ let _ = a.compare_exchange(0, 30, Acquire, Relaxed);
+ let _ = a.compare_exchange(0, 31, Acquire, Acquire);
+
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 40 acq_rel monotonic
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 41 acq_rel acquire
+ let _ = a.compare_exchange(0, 40, AcqRel, Relaxed);
+ let _ = a.compare_exchange(0, 41, AcqRel, Acquire);
+
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 50 seq_cst monotonic
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 51 seq_cst acquire
+ // CHECK: cmpxchg i32* %{{.*}}, i32 0, i32 52 seq_cst seq_cst
+ let _ = a.compare_exchange(0, 50, SeqCst, Relaxed);
+ let _ = a.compare_exchange(0, 51, SeqCst, Acquire);
+ let _ = a.compare_exchange(0, 52, SeqCst, SeqCst);
+}
+
+// CHECK-LABEL: @compare_exchange_weak
+#[no_mangle]
+pub fn compare_exchange_weak(w: &AtomicI32) {
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 10 monotonic monotonic
+ let _ = w.compare_exchange_weak(1, 10, Relaxed, Relaxed);
+
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 20 release monotonic
+ let _ = w.compare_exchange_weak(1, 20, Release, Relaxed);
+
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 30 acquire monotonic
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 31 acquire acquire
+ let _ = w.compare_exchange_weak(1, 30, Acquire, Relaxed);
+ let _ = w.compare_exchange_weak(1, 31, Acquire, Acquire);
+
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 40 acq_rel monotonic
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 41 acq_rel acquire
+ let _ = w.compare_exchange_weak(1, 40, AcqRel, Relaxed);
+ let _ = w.compare_exchange_weak(1, 41, AcqRel, Acquire);
+
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 50 seq_cst monotonic
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 51 seq_cst acquire
+ // CHECK: cmpxchg weak i32* %{{.*}}, i32 1, i32 52 seq_cst seq_cst
+ let _ = w.compare_exchange_weak(1, 50, SeqCst, Relaxed);
+ let _ = w.compare_exchange_weak(1, 51, SeqCst, Acquire);
+ let _ = w.compare_exchange_weak(1, 52, SeqCst, SeqCst);
+}
#![crate_type = "rlib"]
#![feature(thread_local)]
-// CHECK: @STATIC_VAR_1 = thread_local local_unnamed_addr global <{ [32 x i8] }> zeroinitializer, section "__DATA,__thread_bss", align 4
+// local_unnamed_addr does not appear when std is built with debug assertions.
+// CHECK: @STATIC_VAR_1 = thread_local {{(local_unnamed_addr )?}}global <{ [32 x i8] }> zeroinitializer, section "__DATA,__thread_bss", align 4
#[no_mangle]
#[thread_local]
static mut STATIC_VAR_1: [u32; 8] = [0; 8];
-// CHECK: @STATIC_VAR_2 = thread_local local_unnamed_addr global <{ [32 x i8] }> <{{[^>]*}}>, section "__DATA,__thread_data", align 4
+// CHECK: @STATIC_VAR_2 = thread_local {{(local_unnamed_addr )?}}global <{ [32 x i8] }> <{{[^>]*}}>, section "__DATA,__thread_data", align 4
#[no_mangle]
#[thread_local]
static mut STATIC_VAR_2: [u32; 8] = [4; 8];
// min-system-llvm-version: 9.0
// ignore-arm
+// ignore-aarch64
// ignore-mips
// ignore-mips64
// ignore-powerpc
-// WONTFIX(#20184) Needs landing pads (not present in stage1) or the compiler hangs.
-// ignore-stage1
// compile-flags: -C codegen-units=2
// ignore-emscripten
// compile-flags: --extern std=
-// error-pattern: can't find crate for `std`
+// error-pattern: extern location for std does not exist
fn main() {}
pub mod fn_calls_methods_in_same_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let x = Point { x: 2.0, y: 2.0 };
x.distance_from_origin();
pub mod fn_calls_free_fn {
use point::{self, Point};
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let x = Point { x: 2.0, y: 2.0 };
point::distance_squared(&x);
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
extern crate a;
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn call_function0() {
a::function0(77);
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn call_function1() {
a::function1(77);
}
pub mod fn_with_type_in_sig {
use point::Point;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn boop(p: Option<&Point>) -> f32 {
p.map(|p| p.total()).unwrap_or(0.0)
}
pub mod call_fn_with_type_in_sig {
use fn_with_type_in_sig;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn bip() -> f32 {
fn_with_type_in_sig::boop(None)
}
pub mod fn_with_type_in_body {
use point::Point;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn boop() -> f32 {
Point::origin().total()
}
pub mod call_fn_with_type_in_body {
use fn_with_type_in_body;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn bip() -> f32 {
fn_with_type_in_body::boop()
}
pub mod fn_make_struct {
use point::Point;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn make_origin(p: Point) -> Point {
Point { ..p }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
use a::A;
use b::B;
-//? #[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+//? #[rustc_clean(label="typeck", cfg="rpass2")]
pub fn main() {
A + B;
}
pub mod fn_calls_methods_in_same_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let x = Point { x: 2.0, y: 2.0 };
x.distance_from_origin();
pub mod fn_calls_methods_in_another_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let mut x = Point { x: 2.0, y: 2.0 };
x.translate(3.0, 3.0);
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
pub mod fn_calls_methods_in_same_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let x = Point { x: 2.0, y: 2.0 };
x.distance_from_origin();
pub mod fn_calls_methods_in_another_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let mut x = Point { x: 2.0, y: 2.0 };
x.translate(3.0, 3.0);
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
pub mod fn_calls_methods_in_same_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let x = Point { x: 2.0, y: 2.0 };
x.distance_from_origin();
pub mod fn_calls_methods_in_another_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let mut x = Point { x: 2.0, y: 2.0 };
x.translate(3.0, 3.0);
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
pub mod fn_calls_methods_in_same_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let x = Point { x: 2.0, y: 2.0 };
x.distance_from_origin();
pub mod fn_calls_methods_in_another_impl {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn dirty() {
let mut x = Point { x: 2.0, y: 2.0 };
x.translate(3.0, 3.0);
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
pub mod fn_calls_changed_method {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let p = Point { x: 2.0, y: 2.0 };
p.distance_from_origin();
pub mod fn_calls_another_method {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let p = Point { x: 2.0, y: 2.0 };
p.x();
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
pub mod fn_calls_changed_method {
use point::Point;
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn check() {
let p = Point { x: 2.0, y: 2.0 };
p.distance_from_point(None);
pub mod fn_calls_another_method {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn check() {
let p = Point { x: 2.0, y: 2.0 };
p.x();
pub mod fn_make_struct {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn make_origin() -> Point {
Point { x: 2.0, y: 2.0 }
}
pub mod fn_read_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn get_x(p: Point) -> f32 {
p.x
}
pub mod fn_write_field {
use point::Point;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn inc_x(p: &mut Point) {
p.x += 1.0;
}
mod y {
use x;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
pub fn y() {
- //[cfail2]~^ ERROR `typeck_tables_of(y::y)` should be clean but is not
+ //[cfail2]~^ ERROR `typeck(y::y)` should be clean but is not
x::x();
}
}
mod z {
- #[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn z() {
- //[cfail2]~^ ERROR `typeck_tables_of(z::z)` should be dirty but is not
+ //[cfail2]~^ ERROR `typeck(z::z)` should be dirty but is not
}
}
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_callee_function() {
callee2(1, 2)
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_callee_method() {
let s = Struct;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_ufcs_callee_method() {
let s = Struct;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
// One might think this would be expanded in the hir_owner_nodes/Mir, but it actually
// results in slightly different hir_owner/Mir.
#[cfg(not(cfail1))]
use super::Struct2 as Struct;
- #[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ #[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_parameter() {
let x = 0u32;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_parameter_pattern() {
let _ = |(x,): (u32,)| x;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg = "cfail2", except = "hir_owner_nodes, typeck_tables_of")]
+#[rustc_clean(cfg = "cfail2", except = "hir_owner_nodes, typeck")]
#[rustc_clean(cfg = "cfail3")]
pub fn add_type_ascription_to_parameter() {
let closure = |x: u32| x + 1u32;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_parameter_type() {
let closure = |x: u16| (x as u64) + 1;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck")]
#[rustc_clean(cfg="cfail3")]
// FIXME(michaelwoerister):Interesting. I would have thought that that changes the MIR. And it
// would if it were not all constants
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_constructor_path_struct_like() {
let _ = Enum2::Struct {
#[rustc_clean(
cfg="cfail2",
except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,\
- typeck_tables_of"
+ typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn function() -> TheEnum {
#[cfg(not(cfail1))]
#[rustc_clean(
cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of"
+ except="hir_owner_nodes,optimized_mir,typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn change_constructor_path_tuple_like() {
#[cfg(not(cfail1))]
#[rustc_clean(
cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of"
+ except="hir_owner_nodes,optimized_mir,typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn change_constructor_variant_tuple_like() {
#[rustc_clean(
cfg="cfail2",
except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,\
- typeck_tables_of"
+ typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn function() -> TheEnum {
#[cfg(not(cfail1))]
use super::Enum2::Tuple2 as Variant;
- #[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ #[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn function() -> Enum2 {
Variant(0, 1, 2)
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_constructor_path_c_like() {
let _x = Clike2::B;
#[rustc_clean(
cfg="cfail2",
except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,\
- typeck_tables_of"
+ typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn function() -> TheEnum {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_iteration_variable_pattern() {
let mut _x = 0;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_break() {
let mut _x = 0;
#[cfg(not(cfail1))]
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub fn add_parameter(p: i32) {}
#[cfg(not(cfail1))]
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub fn type_of_parameter(p: i64) {}
#[cfg(not(cfail1))]
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub fn type_of_parameter_ref(p: &mut i32) {}
#[cfg(not(cfail1))]
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub fn order_of_parameters(p2: i64, p1: i32) {}
#[cfg(not(cfail1))]
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub unsafe fn make_unsafe() {}
pub fn make_extern() {}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg = "cfail2", except = "hir_owner, hir_owner_nodes, typeck_tables_of, fn_sig")]
+#[rustc_clean(cfg = "cfail2", except = "hir_owner, hir_owner_nodes, typeck, fn_sig")]
#[rustc_clean(cfg = "cfail3")]
pub extern "C" fn make_extern() {}
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg = "cfail2", except = "hir_owner, hir_owner_nodes, typeck_tables_of, fn_sig")]
+#[rustc_clean(cfg = "cfail2", except = "hir_owner, hir_owner_nodes, typeck, fn_sig")]
#[rustc_clean(cfg = "cfail3")]
pub fn return_impl_trait() -> impl Clone {
0
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub fn indirect_return_type() -> ReturnType {
#[rustc_clean(
cfg = "cfail2",
- except = "hir_owner, hir_owner_nodes, optimized_mir, typeck_tables_of, fn_sig"
+ except = "hir_owner, hir_owner_nodes, optimized_mir, typeck, fn_sig"
)]
#[rustc_clean(cfg = "cfail3")]
pub fn indirect_parameter_type(p: ParameterType) {}
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_condition(x: bool) -> u32 {
if !x {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_condition_if_let(x: Option<u32>) -> u32 {
if let Some(_) = x {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_then_branch_if_let(x: Option<u32>) -> u32 {
if let Some(x) = x {
impl Foo {
#[rustc_clean(
cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,promoted_mir,typeck_tables_of"
+ except="hir_owner_nodes,optimized_mir,promoted_mir,typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn method_body() {
impl Foo {
#[rustc_clean(
cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,promoted_mir,typeck_tables_of"
+ except="hir_owner_nodes,optimized_mir,promoted_mir,typeck"
)]
#[rustc_clean(cfg="cfail3")]
#[inline]
impl Foo {
#[rustc_clean(
cfg="cfail2",
- except="hir_owner,hir_owner_nodes,fn_sig,typeck_tables_of,optimized_mir"
+ except="hir_owner,hir_owner_nodes,fn_sig,typeck,optimized_mir"
)]
#[rustc_clean(cfg="cfail3")]
pub fn method_selfmutness(&mut self) { }
impl Foo {
#[rustc_clean(
cfg="cfail2",
- except="hir_owner,hir_owner_nodes,fn_sig,typeck_tables_of,optimized_mir"
+ except="hir_owner,hir_owner_nodes,fn_sig,typeck,optimized_mir"
)]
#[rustc_clean(cfg="cfail3")]
pub fn add_method_parameter(&self, _: i32) { }
impl Foo {
#[rustc_clean(
cfg="cfail2",
- except="hir_owner,hir_owner_nodes,fn_sig,optimized_mir,typeck_tables_of")]
+ except="hir_owner,hir_owner_nodes,fn_sig,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_method_return_type(&self) -> u8 { 0 }
}
impl Foo {
#[rustc_clean(
cfg="cfail2",
- except="hir_owner,hir_owner_nodes,fn_sig,typeck_tables_of,optimized_mir"
+ except="hir_owner,hir_owner_nodes,fn_sig,typeck,optimized_mir"
)]
#[rustc_clean(cfg="cfail3")]
pub unsafe fn make_method_unsafe(&self) { }
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
impl Foo {
- #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck_tables_of")]
+ #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck")]
#[rustc_clean(cfg="cfail3")]
pub extern fn make_method_extern(&self) { }
}
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
impl Foo {
- #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck_tables_of")]
+ #[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,fn_sig,typeck")]
#[rustc_clean(cfg="cfail3")]
pub extern "system" fn change_method_calling_convention(&self) { }
}
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
impl Foo {
- // Warning: Note that `typeck_tables_of` are coming up clean here.
+ // Warning: Note that `typeck` are coming up clean here.
// The addition or removal of lifetime parameters that don't
// appear in the arguments or fn body in any way does not, in
- // fact, affect the `typeck_tables_of` in any semantic way (at least
+ // fact, affect the `typeck` in any semantic way (at least
// as of this writing). **However,** altering the order of
- // lowering **can** cause it appear to affect the `typeck_tables_of`:
+ // lowering **can** cause it appear to affect the `typeck`:
// if we lower generics before the body, then the `HirId` for
// things in the body will be affected. So if you start to see
- // `typeck_tables_of` appear dirty, that might be the cause. -nmatsakis
+ // `typeck` appear dirty, that might be the cause. -nmatsakis
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
pub fn add_lifetime_parameter_to_method<'a>(&self) { }
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
impl Foo {
- // Warning: Note that `typeck_tables_of` are coming up clean here.
+ // Warning: Note that `typeck` are coming up clean here.
// The addition or removal of type parameters that don't appear in
// the arguments or fn body in any way does not, in fact, affect
- // the `typeck_tables_of` in any semantic way (at least as of this
+ // the `typeck` in any semantic way (at least as of this
// writing). **However,** altering the order of lowering **can**
- // cause it appear to affect the `typeck_tables_of`: if we lower
+ // cause it appear to affect the `typeck`: if we lower
// generics before the body, then the `HirId` for things in the
- // body will be affected. So if you start to see `typeck_tables_of`
+ // body will be affected. So if you start to see `typeck`
// appear dirty, that might be the cause. -nmatsakis
#[rustc_clean(
cfg="cfail2",
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
impl Foo {
- // Warning: Note that `typeck_tables_of` are coming up clean here.
+ // Warning: Note that `typeck` are coming up clean here.
// The addition or removal of bounds that don't appear in the
// arguments or fn body in any way does not, in fact, affect the
- // `typeck_tables_of` in any semantic way (at least as of this
+ // `typeck` in any semantic way (at least as of this
// writing). **However,** altering the order of lowering **can**
- // cause it appear to affect the `typeck_tables_of`: if we lower
+ // cause it appear to affect the `typeck`: if we lower
// generics before the body, then the `HirId` for things in the
- // body will be affected. So if you start to see `typeck_tables_of`
+ // body will be affected. So if you start to see `typeck`
// appear dirty, that might be the cause. -nmatsakis
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,generics_of,predicates_of,\
type_of")]
#[rustc_clean(cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
impl Foo {
- // Warning: Note that `typeck_tables_of` are coming up clean here.
+ // Warning: Note that `typeck` are coming up clean here.
// The addition or removal of bounds that don't appear in the
// arguments or fn body in any way does not, in fact, affect the
- // `typeck_tables_of` in any semantic way (at least as of this
+ // `typeck` in any semantic way (at least as of this
// writing). **However,** altering the order of lowering **can**
- // cause it appear to affect the `typeck_tables_of`: if we lower
+ // cause it appear to affect the `typeck`: if we lower
// generics before the body, then the `HirId` for things in the
- // body will be affected. So if you start to see `typeck_tables_of`
+ // body will be affected. So if you start to see `typeck`
// appear dirty, that might be the cause. -nmatsakis
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes,predicates_of")]
#[rustc_clean(cfg="cfail3")]
impl<T> Bar<T> {
#[rustc_clean(
cfg="cfail2",
- except="generics_of,fn_sig,typeck_tables_of,type_of,optimized_mir"
+ except="generics_of,fn_sig,typeck,type_of,optimized_mir"
)]
#[rustc_clean(cfg="cfail3")]
pub fn add_type_parameter_to_impl(&self) { }
#[rustc_clean(cfg="cfail2", except="hir_owner,hir_owner_nodes")]
#[rustc_clean(cfg="cfail3")]
impl Bar<u64> {
- #[rustc_clean(cfg="cfail2", except="fn_sig,optimized_mir,typeck_tables_of")]
+ #[rustc_clean(cfg="cfail2", except="fn_sig,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_impl_self_type(&self) { }
}
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of")]
+ except="hir_owner_nodes,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_type() {
let _x: u32 = 2u32;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn change_type() {
let _x: u8 = 2;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn change_mutability_of_reference_type() {
let _x: &mut u64;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn change_mutability_of_slot() {
let _x: u64 = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn change_simple_binding_to_pattern() {
let (_a, _b) = (0u8, 'x');
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn add_ref_in_pattern() {
let (ref _a, _b) = (1u8, 'y');
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn add_amp_in_pattern() {
let (&_a, _b) = (&1u8, 'y');
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn change_mutability_of_binding_in_pattern() {
let (mut _a, _b) = (99u8, 'q');
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,typeck_tables_of,optimized_mir")]
+ except="hir_owner_nodes,typeck,optimized_mir")]
#[rustc_clean(cfg="cfail3")]
pub fn add_initializer() {
let _x: i16 = 3i16;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_break() {
let mut _x = 0;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_break_label() {
let mut _x = 0;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_continue_label() {
let mut _x = 0;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_continue_to_break() {
let mut _x = 0;
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_arm(x: u32) -> u32 {
match x {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_guard_clause(x: u32, y: bool) -> u32 {
match x {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_guard_clause(x: u32, y: bool) -> u32 {
match x {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_at_binding(x: u32) -> u32 {
match x {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_simple_name_to_pattern(x: u32) -> u32 {
match (x, x & 1) {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_mutability_of_binding_in_pattern(x: u32) -> u32 {
match (x, x & 1) {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_ref_to_binding_in_pattern(x: u32) -> u32 {
match (x, x & 1) {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
-except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_amp_to_binding_in_pattern(x: u32) -> u32 {
match (&x, x & 1) {
#[cfg(not(cfail1))]
#[rustc_clean(cfg="cfail2",
- except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+ except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_alternative_to_arm(x: u32) -> u32 {
match x {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_field_order_regular_struct() -> RegularStruct {
RegularStruct {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_field_regular_struct() -> RegularStruct {
let struct1 = RegularStruct {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,optimized_mir,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_field_label_regular_struct() -> RegularStruct {
let struct1 = RegularStruct {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_constructor_path_regular_struct() {
let _ = RegularStruct2 {
#[rustc_clean(
cfg="cfail2",
- except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,typeck_tables_of"
+ except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn function() -> Struct {
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes,typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn change_constructor_path_tuple_struct() {
let _ = TupleStruct2(0, 1, 2);
#[rustc_clean(
cfg="cfail2",
- except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,typeck_tables_of"
+ except="fn_sig,hir_owner,hir_owner_nodes,optimized_mir,typeck"
)]
#[rustc_clean(cfg="cfail3")]
pub fn function() -> Struct {
}
#[cfg(not(cfail1))]
-#[rustc_clean(except="hir_owner_nodes,optimized_mir,typeck_tables_of", cfg="cfail2")]
+#[rustc_clean(except="hir_owner_nodes,optimized_mir,typeck", cfg="cfail2")]
#[rustc_clean(cfg="cfail3")]
pub fn type_cast(a: u8) -> u64 {
let b = a as u32;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_break() {
let mut _x = 0;
}
#[cfg(not(cfail1))]
-#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck_tables_of")]
+#[rustc_clean(cfg="cfail2", except="hir_owner_nodes, optimized_mir, typeck")]
#[rustc_clean(cfg="cfail3")]
pub fn add_break() {
let mut _x = 0;
mod y {
use x;
- #[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+ #[rustc_clean(label="typeck", cfg="rpass2")]
pub fn yyyy() {
x::xxxx();
}
mod z {
use y;
- #[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+ #[rustc_clean(label="typeck", cfg="rpass2")]
pub fn z() {
y::yyyy();
}
#[rustc_clean(label="hir_owner", cfg="rpass2")]
#[rustc_clean(label="hir_owner_nodes", cfg="rpass2")]
- #[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+ #[rustc_dirty(label="typeck", cfg="rpass2")]
fn bar() {
().method();
}
#[rustc_clean(label="hir_owner", cfg="rpass2")]
#[rustc_clean(label="hir_owner_nodes", cfg="rpass2")]
- #[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+ #[rustc_clean(label="typeck", cfg="rpass2")]
fn baz() {
22; // no method call, traits in scope don't matter
}
extern crate a;
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
-#[rustc_clean(label="typeck_tables_of", cfg="rpass3")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass3")]
pub fn use_X() -> u32 {
let x: a::X = 22;
x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
-#[rustc_clean(label="typeck_tables_of", cfg="rpass3")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass3")]
pub fn use_Y() {
let x: a::Y = 'c';
}
pub mod y {
use x;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
#[rustc_clean(label="optimized_mir", cfg="cfail2")]
pub fn y() {
x::x();
pub mod z {
use y;
- #[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+ #[rustc_clean(label="typeck", cfg="cfail2")]
#[rustc_clean(label="optimized_mir", cfg="cfail2")]
pub fn z() {
y::y();
pub y: char
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_X(x: X) -> u32 {
x.x as u32
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_EmbedX(embed: EmbedX) -> u32 {
embed.x.x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub y: char
}
-#[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+#[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
//[cfail2]~^ ERROR struct `X` has no field named `x`
//[cfail2]~^ ERROR no field `x` on type `X`
}
-#[rustc_dirty(label="typeck_tables_of", cfg="cfail2")]
+#[rustc_dirty(label="typeck", cfg="cfail2")]
pub fn use_EmbedX(embed: EmbedX) -> u32 {
embed.x.x as u32
//[cfail2]~^ ERROR no field `x` on type `X`
}
-#[rustc_clean(label="typeck_tables_of", cfg="cfail2")]
+#[rustc_clean(label="typeck", cfg="cfail2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub y: char
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
x.x as u32
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_EmbedX(x: EmbedX) -> u32 {
let x: X = X { x: 22 };
x.x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
use a::*;
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
x.x as u32
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_EmbedX(embed: EmbedX) -> u32 {
embed.x.x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub y: char
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_X() -> u32 {
let x: X = X { x: 22 };
x.x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_EmbedX(x: EmbedX) -> u32 {
let x: X = X { x: 22 };
x.x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
pub y: char
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_X(x: X) -> u32 {
x.x as u32
}
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
pub fn use_EmbedX(embed: EmbedX) -> u32 {
embed.x.x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
pub fn use_Y() {
let x: Y = Y { y: 'c' };
}
extern crate a;
-#[rustc_dirty(label="typeck_tables_of", cfg="rpass2")]
-#[rustc_clean(label="typeck_tables_of", cfg="rpass3")]
+#[rustc_dirty(label="typeck", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass3")]
pub fn use_X() -> u32 {
let x: a::X = 22;
x as u32
}
-#[rustc_clean(label="typeck_tables_of", cfg="rpass2")]
-#[rustc_clean(label="typeck_tables_of", cfg="rpass3")]
+#[rustc_clean(label="typeck", cfg="rpass2")]
+#[rustc_clean(label="typeck", cfg="rpass3")]
pub fn use_Y() {
let x: a::Y = 'c';
}
fn bar() -> bool {
let mut _0: bool; // return place in scope 0 at $DIR/instrument_coverage.rs:18:13: 18:17
-+ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
++ let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
bb0: {
-+ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
-+ _1 = const std::intrinsics::count_code_region(const 0_u32, const 484_u32, const 513_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:1: 20:2
++ StorageLive(_1); // scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
++ _1 = const std::intrinsics::count_code_region(const 10208505205182607101_u64, const 0_u32, const 501_u32, const 513_u32) -> bb2; // scope 0 at $DIR/instrument_coverage.rs:18:18: 18:18
+ // ty::Const
-+ // + ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}
++ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
-+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
++ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
++ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
++ // ty::Const
++ // + ty: u64
++ // + val: Value(Scalar(0x8dabe565aaa2aefd))
++ // mir::Constant
++ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
++ // + literal: Const { ty: u64, val: Value(Scalar(0x8dabe565aaa2aefd)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000000))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
++ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
+ // ty::Const
+ // + ty: u32
-+ // + val: Value(Scalar(0x000001e4))
++ // + val: Value(Scalar(0x000001f5))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
-+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001e4)) }
++ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
++ // + literal: Const { ty: u32, val: Value(Scalar(0x000001f5)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000201))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:18:1: 18:1
++ // + span: $DIR/instrument_coverage.rs:18:18: 18:18
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000201)) }
+ }
+
let mut _1: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
let mut _2: bool; // in scope 0 at $DIR/instrument_coverage.rs:11:12: 11:17
let mut _3: !; // in scope 0 at $DIR/instrument_coverage.rs:11:18: 13:10
-+ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
++ let mut _4: (); // in scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
bb0: {
- falseUnwind -> [real: bb1, cleanup: bb2]; // scope 0 at $DIR/instrument_coverage.rs:10:5: 14:6
-+ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
-+ _4 = const std::intrinsics::count_code_region(const 0_u32, const 387_u32, const 465_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:1: 15:2
++ StorageLive(_4); // scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
++ _4 = const std::intrinsics::count_code_region(const 16004455475339839479_u64, const 0_u32, const 397_u32, const 465_u32) -> bb7; // scope 0 at $DIR/instrument_coverage.rs:9:11: 9:11
+ // ty::Const
-+ // + ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}
++ // + ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}
+ // + val: Value(Scalar(<ZST>))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
-+ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
++ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
++ // + literal: Const { ty: unsafe extern "rust-intrinsic" fn(u64, u32, u32, u32) {std::intrinsics::count_code_region}, val: Value(Scalar(<ZST>)) }
++ // ty::Const
++ // + ty: u64
++ // + val: Value(Scalar(0xde1b3f75a72fc7f7))
++ // mir::Constant
++ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
++ // + literal: Const { ty: u64, val: Value(Scalar(0xde1b3f75a72fc7f7)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x00000000))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
++ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000000)) }
+ // ty::Const
+ // + ty: u32
-+ // + val: Value(Scalar(0x00000183))
++ // + val: Value(Scalar(0x0000018d))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
-+ // + literal: Const { ty: u32, val: Value(Scalar(0x00000183)) }
++ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
++ // + literal: Const { ty: u32, val: Value(Scalar(0x0000018d)) }
+ // ty::Const
+ // + ty: u32
+ // + val: Value(Scalar(0x000001d1))
+ // mir::Constant
-+ // + span: $DIR/instrument_coverage.rs:9:1: 9:1
++ // + span: $DIR/instrument_coverage.rs:9:11: 9:11
+ // + literal: Const { ty: u32, val: Value(Scalar(0x000001d1)) }
}
}
bb10: {
- _4 = const (); // scope 0 at $DIR/issue-49232.rs:10:25: 10:30
- // ty::Const
- // + ty: ()
- // + val: Value(Scalar(<ZST>))
- // mir::Constant
- // + span: $DIR/issue-49232.rs:10:25: 10:30
- // + literal: Const { ty: (), val: Value(Scalar(<ZST>)) }
unreachable; // scope 0 at $DIR/issue-49232.rs:10:25: 10:30
}
let mut _24: &[&str; 3]; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
let _25: &[&str; 3]; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
let _26: [&str; 3]; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _27: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _28: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _29: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _30: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _31: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _32: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _33: &[std::fmt::ArgumentV1]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _34: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let _35: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let _36: [std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _37: (&&i32, &&i32); // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _38: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _39: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _40: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _41: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _44: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _45: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _46: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _47: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _48: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _49: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _27: &[std::fmt::ArgumentV1]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _28: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let _29: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let _30: [std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _31: (&&i32, &&i32); // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _32: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _33: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _34: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _35: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _38: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _39: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _40: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _41: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _42: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _43: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 1 {
debug split => _1; // in scope 1 at $DIR/issue-73223.rs:2:9: 2:14
let _6: std::option::Option<i32>; // in scope 1 at $DIR/issue-73223.rs:7:9: 7:14
debug _prev => _6; // in scope 3 at $DIR/issue-73223.rs:7:9: 7:14
let _13: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
let _14: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _51: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _45: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 4 {
debug left_val => _13; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
debug right_val => _14; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _42: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _43: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _50: &[&str; 3]; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _36: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _37: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _44: &[&str; 3]; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 5 {
- debug arg0 => _42; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- debug arg1 => _43; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ debug arg0 => _36; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ debug arg1 => _37; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 6 {
- debug x => _45; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- debug f => _46; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- let mut _52: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _53: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _54: &core::fmt::Opaque; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _55: &&i32; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ debug x => _39; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ debug f => _40; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ let mut _46: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _47: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _48: &core::fmt::Opaque; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _49: &&i32; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
}
scope 8 {
- debug x => _48; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- debug f => _49; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- let mut _56: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _57: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _58: &core::fmt::Opaque; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _59: &&i32; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ debug x => _42; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ debug f => _43; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ let mut _50: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _51: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _52: &core::fmt::Opaque; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _53: &&i32; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
}
}
scope 10 {
debug pieces => _23; // in scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- debug args => _33; // in scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- let mut _60: &[&str]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _61: std::option::Option<&[std::fmt::rt::v1::Argument]>; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _62: &[std::fmt::ArgumentV1]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
+ debug args => _27; // in scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ let mut _54: &[&str]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _55: std::option::Option<&[std::fmt::rt::v1::Argument]>; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _56: &[std::fmt::ArgumentV1]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
}
}
}
StorageLive(_10); // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
_10 = &_1; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_11); // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _51 = const main::promoted[1]; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _45 = const main::promoted[1]; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: &i32
// + val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[1]))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: &i32, val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[1])) }
- _11 = _51; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _11 = _45; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
(_9.0: &i32) = move _10; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
(_9.1: &i32) = move _11; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageDead(_11); // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_23); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_24); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_25); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _50 = const main::promoted[0]; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _44 = const main::promoted[0]; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: &[&str; 3]
// + val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[0]))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: &[&str; 3], val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[0])) }
- _25 = _50; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _25 = _44; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
_24 = _25; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
_23 = move _24 as &[&str] (Pointer(Unsize)); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageDead(_24); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_33); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_34); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_35); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_36); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_37); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_38); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_39); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _39 = _13; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _38 = &_39; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_40); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_41); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _41 = _14; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _40 = &_41; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- (_37.0: &&i32) = move _38; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- (_37.1: &&i32) = move _40; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_40); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_38); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_42); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _42 = (_37.0: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_43); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _43 = (_37.1: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_44); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_45); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _45 = _42; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_46); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _46 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_27); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_28); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_29); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_30); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_31); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_33); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _33 = _13; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _32 = &_33; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_34); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_35); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _35 = _14; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _34 = &_35; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ (_31.0: &&i32) = move _32; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ (_31.1: &&i32) = move _34; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_34); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_32); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_36); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _36 = (_31.0: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_37); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _37 = (_31.1: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_38); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_39); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _39 = _36; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_40); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _40 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}
// + val: Value(Scalar(<ZST>))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}, val: Value(Scalar(<ZST>)) }
- StorageLive(_52); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_53); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _53 = _46; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _52 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _53) -> bb6; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_46); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_47); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _47 = _40; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _46 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _47) -> bb6; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) -> for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>}
// + val: Value(Scalar(<ZST>))
}
bb6: {
- StorageDead(_53); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_54); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_55); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _55 = _45; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _54 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _55) -> bb7; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_47); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_48); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_49); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _49 = _39; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _48 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _49) -> bb7; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(&&i32) -> &core::fmt::Opaque {std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>}
// + val: Value(Scalar(<ZST>))
}
bb7: {
- StorageDead(_55); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_44.0: &core::fmt::Opaque) = move _54; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_44.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _52; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_54); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_52); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_46); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_45); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_47); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_48); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _48 = _43; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_49); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _49 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageDead(_49); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_38.0: &core::fmt::Opaque) = move _48; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_38.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _46; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_48); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_46); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_40); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_39); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_41); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_42); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _42 = _37; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_43); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _43 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}
// + val: Value(Scalar(<ZST>))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}, val: Value(Scalar(<ZST>)) }
- StorageLive(_56); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_57); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _57 = _49; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _56 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _57) -> bb8; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_50); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_51); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _51 = _43; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _50 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _51) -> bb8; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) -> for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>}
// + val: Value(Scalar(<ZST>))
}
bb8: {
- StorageDead(_57); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_58); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_59); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _59 = _48; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _58 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _59) -> bb9; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_51); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_52); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_53); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _53 = _42; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _52 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _53) -> bb9; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(&&i32) -> &core::fmt::Opaque {std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>}
// + val: Value(Scalar(<ZST>))
}
bb9: {
- StorageDead(_59); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_47.0: &core::fmt::Opaque) = move _58; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_47.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _56; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_58); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_56); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_49); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_48); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- _36 = [move _44, move _47]; // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_47); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_44); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_43); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_42); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- _35 = &_36; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- _34 = _35; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- _33 = move _34 as &[std::fmt::ArgumentV1] (Pointer(Unsize)); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_34); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_60); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _60 = _23; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_61); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- discriminant(_61) = 0; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_62); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _62 = _33; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_22.0: &[&str]) = move _60; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_22.1: std::option::Option<&[std::fmt::rt::v1::Argument]>) = move _61; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_22.2: &[std::fmt::ArgumentV1]) = move _62; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_62); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_61); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_60); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_33); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_53); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_41.0: &core::fmt::Opaque) = move _52; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_41.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _50; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_52); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_50); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_43); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_42); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _30 = [move _38, move _41]; // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_41); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_38); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_37); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_36); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _29 = &_30; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _28 = _29; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _27 = move _28 as &[std::fmt::ArgumentV1] (Pointer(Unsize)); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_28); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_54); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _54 = _23; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_55); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ discriminant(_55) = 0; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_56); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _56 = _27; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_22.0: &[&str]) = move _54; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_22.1: std::option::Option<&[std::fmt::rt::v1::Argument]>) = move _55; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_22.2: &[std::fmt::ArgumentV1]) = move _56; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_56); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_55); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_54); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_27); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
StorageDead(_23); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
_21 = &_22; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
_20 = _21; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
let mut _24: &[&str; 3]; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
let _25: &[&str; 3]; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
let _26: [&str; 3]; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _27: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _28: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _29: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _30: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _31: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _32: &str; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _33: &[std::fmt::ArgumentV1]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _34: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let _35: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let _36: [std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _37: (&&i32, &&i32); // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _38: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _39: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _40: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _41: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _44: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _45: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _46: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _47: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _48: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _49: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _27: &[std::fmt::ArgumentV1]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _28: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let _29: &[std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let _30: [std::fmt::ArgumentV1; 2]; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _31: (&&i32, &&i32); // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _32: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _33: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _34: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _35: &i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _38: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _39: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _40: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _41: std::fmt::ArgumentV1; // in scope 0 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _42: &&i32; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _43: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 0 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 1 {
debug split => _1; // in scope 1 at $DIR/issue-73223.rs:2:9: 2:14
let _6: std::option::Option<i32>; // in scope 1 at $DIR/issue-73223.rs:7:9: 7:14
debug _prev => _6; // in scope 3 at $DIR/issue-73223.rs:7:9: 7:14
let _13: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
let _14: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _51: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _45: &i32; // in scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 4 {
debug left_val => _13; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
debug right_val => _14; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _42: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let _43: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- let mut _50: &[&str; 3]; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _36: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let _37: &&i32; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ let mut _44: &[&str; 3]; // in scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 5 {
- debug arg0 => _42; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- debug arg1 => _43; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ debug arg0 => _36; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ debug arg1 => _37; // in scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
scope 6 {
- debug x => _45; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- debug f => _46; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- let mut _52: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _53: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _54: &core::fmt::Opaque; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _55: &&i32; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ debug x => _39; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ debug f => _40; // in scope 6 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ let mut _46: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _47: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _48: &core::fmt::Opaque; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _49: &&i32; // in scope 6 at $SRC_DIR/libstd/macros.rs:LL:COL
}
scope 8 {
- debug x => _48; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- debug f => _49; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- let mut _56: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _57: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _58: &core::fmt::Opaque; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _59: &&i32; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ debug x => _42; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ debug f => _43; // in scope 8 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ let mut _50: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _51: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _52: &core::fmt::Opaque; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _53: &&i32; // in scope 8 at $SRC_DIR/libstd/macros.rs:LL:COL
}
}
scope 10 {
debug pieces => _23; // in scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- debug args => _33; // in scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- let mut _60: &[&str]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _61: std::option::Option<&[std::fmt::rt::v1::Argument]>; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
- let mut _62: &[std::fmt::ArgumentV1]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
+ debug args => _27; // in scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ let mut _54: &[&str]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _55: std::option::Option<&[std::fmt::rt::v1::Argument]>; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
+ let mut _56: &[std::fmt::ArgumentV1]; // in scope 10 at $SRC_DIR/libstd/macros.rs:LL:COL
}
}
}
StorageLive(_10); // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
_10 = &_1; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_11); // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _51 = const main::promoted[1]; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _45 = const main::promoted[1]; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: &i32
// + val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[1]))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: &i32, val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[1])) }
- _11 = _51; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _11 = _45; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
(_9.0: &i32) = move _10; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
(_9.1: &i32) = move _11; // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageDead(_11); // scope 3 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_23); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_24); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageLive(_25); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _50 = const main::promoted[0]; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _44 = const main::promoted[0]; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: &[&str; 3]
// + val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[0]))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: &[&str; 3], val: Unevaluated(WithOptConstParam { did: DefId(0:3 ~ issue_73223[317d]::main[0]), const_param_did: None }, [], Some(promoted[0])) }
- _25 = _50; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _25 = _44; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
_24 = _25; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
_23 = move _24 as &[&str] (Pointer(Unsize)); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
StorageDead(_24); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_33); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_34); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_35); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_36); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_37); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_38); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_39); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _39 = _13; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _38 = &_39; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_40); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_41); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _41 = _14; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _40 = &_41; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- (_37.0: &&i32) = move _38; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- (_37.1: &&i32) = move _40; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_40); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_38); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_42); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _42 = (_37.0: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_43); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _43 = (_37.1: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_44); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_45); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _45 = _42; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_46); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _46 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_27); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_28); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_29); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_30); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_31); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_33); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _33 = _13; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _32 = &_33; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_34); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_35); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _35 = _14; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _34 = &_35; // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ (_31.0: &&i32) = move _32; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ (_31.1: &&i32) = move _34; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_34); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_32); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_36); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _36 = (_31.0: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_37); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _37 = (_31.1: &&i32); // scope 4 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_38); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_39); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _39 = _36; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_40); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _40 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}
// + val: Value(Scalar(<ZST>))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}, val: Value(Scalar(<ZST>)) }
- StorageLive(_52); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_53); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _53 = _46; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _52 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _53) -> bb6; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_46); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_47); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _47 = _40; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _46 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _47) -> bb6; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) -> for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>}
// + val: Value(Scalar(<ZST>))
}
bb6: {
- StorageDead(_53); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_54); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_55); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _55 = _45; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _54 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _55) -> bb7; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_47); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_48); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_49); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _49 = _39; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _48 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _49) -> bb7; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(&&i32) -> &core::fmt::Opaque {std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>}
// + val: Value(Scalar(<ZST>))
}
bb7: {
- StorageDead(_55); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_44.0: &core::fmt::Opaque) = move _54; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_44.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _52; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_54); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_52); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_46); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_45); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_47); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_48); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _48 = _43; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- StorageLive(_49); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
- _49 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageDead(_49); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_38.0: &core::fmt::Opaque) = move _48; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_38.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _46; // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_48); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_46); // scope 7 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_40); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_39); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_41); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_42); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _42 = _37; // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ StorageLive(_43); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
+ _43 = const <&i32 as std::fmt::Debug>::fmt as for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> (Pointer(ReifyFnPointer)); // scope 5 at $SRC_DIR/libcore/macros/mod.rs:LL:COL
// ty::Const
// + ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}
// + val: Value(Scalar(<ZST>))
// mir::Constant
// + span: $SRC_DIR/libcore/macros/mod.rs:LL:COL
// + literal: Const { ty: for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {<&i32 as std::fmt::Debug>::fmt}, val: Value(Scalar(<ZST>)) }
- StorageLive(_56); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_57); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _57 = _49; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _56 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _57) -> bb8; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_50); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_51); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _51 = _43; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _50 = const std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>(move _51) -> bb8; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) -> for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error> {std::intrinsics::transmute::<for<'r, 's, 't0> fn(&'r &i32, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>, for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>>}
// + val: Value(Scalar(<ZST>))
}
bb8: {
- StorageDead(_57); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_58); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_59); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _59 = _48; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _58 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _59) -> bb9; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_51); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_52); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_53); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _53 = _42; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _52 = const std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>(move _53) -> bb9; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
// ty::Const
// + ty: unsafe extern "rust-intrinsic" fn(&&i32) -> &core::fmt::Opaque {std::intrinsics::transmute::<&&i32, &core::fmt::Opaque>}
// + val: Value(Scalar(<ZST>))
}
bb9: {
- StorageDead(_59); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_47.0: &core::fmt::Opaque) = move _58; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_47.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _56; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_58); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_56); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_49); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_48); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- _36 = [move _44, move _47]; // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_47); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_44); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_43); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_42); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- _35 = &_36; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- _34 = _35; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- _33 = move _34 as &[std::fmt::ArgumentV1] (Pointer(Unsize)); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageDead(_34); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
- StorageLive(_60); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _60 = _23; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_61); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- discriminant(_61) = 0; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageLive(_62); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- _62 = _33; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_22.0: &[&str]) = move _60; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_22.1: std::option::Option<&[std::fmt::rt::v1::Argument]>) = move _61; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- (_22.2: &[std::fmt::ArgumentV1]) = move _62; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_62); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_61); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_60); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
- StorageDead(_33); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_53); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_41.0: &core::fmt::Opaque) = move _52; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_41.1: for<'r, 's, 't0> fn(&'r core::fmt::Opaque, &'s mut std::fmt::Formatter<'t0>) -> std::result::Result<(), std::fmt::Error>) = move _50; // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_52); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_50); // scope 9 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_43); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_42); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _30 = [move _38, move _41]; // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_41); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_38); // scope 5 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_37); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_36); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _29 = &_30; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _28 = _29; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ _27 = move _28 as &[std::fmt::ArgumentV1] (Pointer(Unsize)); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageDead(_28); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
+ StorageLive(_54); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _54 = _23; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_55); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ discriminant(_55) = 0; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageLive(_56); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ _56 = _27; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_22.0: &[&str]) = move _54; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_22.1: std::option::Option<&[std::fmt::rt::v1::Argument]>) = move _55; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ (_22.2: &[std::fmt::ArgumentV1]) = move _56; // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_56); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_55); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_54); // scope 10 at $SRC_DIR/libcore/fmt/mod.rs:LL:COL
+ StorageDead(_27); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
StorageDead(_23); // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
_21 = &_22; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
_20 = _21; // scope 4 at $SRC_DIR/libstd/macros.rs:LL:COL
((::alloc::fmt::format as
for<'r> fn(std::fmt::Arguments<'r>) -> std::string::String {std::fmt::format})(((::core::fmt::Arguments::new_v1
as
- fn(&[&str], &[std::fmt::ArgumentV1]) -> std::fmt::Arguments {std::fmt::Arguments::new_v1})((&([("test"
- as
- &str)]
- as
- [&str; 1])
- as
- &[&str; 1]),
- (&(match (()
- as
- ())
- {
- ()
- =>
- ([]
- as
- [std::fmt::ArgumentV1; 0]),
- }
- as
- [std::fmt::ArgumentV1; 0])
- as
- &[std::fmt::ArgumentV1; 0]))
+ fn(&[&'static str], &[std::fmt::ArgumentV1]) -> std::fmt::Arguments {std::fmt::Arguments::new_v1})((&([("test"
+ as
+ &str)]
+ as
+ [&str; 1])
+ as
+ &[&str; 1]),
+ (&(match (()
+ as
+ ())
+ {
+ ()
+ =>
+ ([]
+ as
+ [std::fmt::ArgumentV1; 0]),
+ }
+ as
+ [std::fmt::ArgumentV1; 0])
+ as
+ &[std::fmt::ArgumentV1; 0]))
as
std::fmt::Arguments))
as std::string::String);
-include ../tools.mk
-# only-mingw
+# only-windows-gnu
all:
$(CXX) foo.cpp -c -o $(TMPDIR)/foo.o
-include ../tools.mk
-# ignore-windows
# ignore-freebsd
-# FIXME: on windows `rustc --dep-info` produces Makefile dependency with
-# windows native paths (e.g. `c:\path\to\libfoo.a`)
-# but msys make seems to fail to recognize such paths, so test fails.
all:
$(RUSTC) --emit dep-info main.rs
--- /dev/null
+# needs-profiler-support
+# ignore-msvc
+
+# FIXME(richkadel): Debug the following problem, and reenable on Windows (by
+# removing the `# ignore-msvc` directive above). The current implementation
+# generates a segfault when running the instrumented `main` executable,
+# after the `main` program code executes, but before the process terminates.
+# This most likely points to a problem generating the LLVM "main.profraw"
+# file.
+
+-include ../tools.mk
+
+# This test makes sure that LLVM coverage maps are genereated in LLVM IR.
+
+COMMON_FLAGS=-Zinstrument-coverage
+
+all:
+ # Compile the test program with instrumentation, and also generate LLVM IR
+ $(RUSTC) $(COMMON_FLAGS) main.rs
+
+ # Run it in order to generate some profiling data,
+ # with `LLVM_PROFILE_FILE=<profdata_file>` environment variable set to
+ # output the coverage stats for this run.
+ LLVM_PROFILE_FILE="$(TMPDIR)"/main.profraw \
+ $(call RUN,main)
+
+ # Postprocess the profiling data so it can be used by the llvm-cov tool
+ "$(LLVM_BIN_DIR)"/llvm-profdata merge --sparse \
+ "$(TMPDIR)"/main.profraw \
+ -o "$(TMPDIR)"/main.profdata
+
+ # Generate a coverage report using `llvm-cov show`. The output ordering
+ # can be non-deterministic, so ignore the return status. If the test fails
+ # when comparing the JSON `export`, the `show` output may be useful when
+ # debugging.
+ "$(LLVM_BIN_DIR)"/llvm-cov show \
+ --Xdemangler="$(RUST_DEMANGLER)" \
+ --show-line-counts-or-regions \
+ --instr-profile="$(TMPDIR)"/main.profdata \
+ $(call BIN,"$(TMPDIR)"/main) \
+ > "$(TMPDIR)"/actual_show_coverage.txt
+
+ # Compare the show coverage output
+ $(DIFF) typical_show_coverage.txt "$(TMPDIR)"/actual_show_coverage.txt || \
+ >&2 echo 'diff failed for `llvm-cov show` (might not be an error)'
+
+ # Generate a coverage report in JSON, using `llvm-cov export`, and fail if
+ # there are differences from the expected output.
+ "$(LLVM_BIN_DIR)"/llvm-cov export \
+ --summary-only \
+ --instr-profile="$(TMPDIR)"/main.profdata \
+ $(call BIN,"$(TMPDIR)"/main) \
+ | "$(PYTHON)" prettify_json.py \
+ > "$(TMPDIR)"/actual_export_coverage.json
+
+ # Check that the exported JSON coverage data matches what we expect
+ $(DIFF) expected_export_coverage.json "$(TMPDIR)"/actual_export_coverage.json
--- /dev/null
+{
+ "data": [
+ {
+ "files": [
+ {
+ "filename": "main.rs",
+ "summary": {
+ "functions": {
+ "count": 7,
+ "covered": 5,
+ "percent": 71.42857142857143
+ },
+ "instantiations": {
+ "count": 8,
+ "covered": 6,
+ "percent": 75
+ },
+ "lines": {
+ "count": 30,
+ "covered": 25,
+ "percent": 83.33333333333334
+ },
+ "regions": {
+ "count": 7,
+ "covered": 5,
+ "notcovered": 2,
+ "percent": 71.42857142857143
+ }
+ }
+ }
+ ],
+ "totals": {
+ "functions": {
+ "count": 7,
+ "covered": 5,
+ "percent": 71.42857142857143
+ },
+ "instantiations": {
+ "count": 8,
+ "covered": 6,
+ "percent": 75
+ },
+ "lines": {
+ "count": 30,
+ "covered": 25,
+ "percent": 83.33333333333334
+ },
+ "regions": {
+ "count": 7,
+ "covered": 5,
+ "notcovered": 2,
+ "percent": 71.42857142857143
+ }
+ }
+ }
+ ],
+ "type": "llvm.coverage.json.export",
+ "version": "2.0.0"
+}
--- /dev/null
+pub fn will_be_called() -> &'static str {
+ let val = "called";
+ println!("{}", val);
+ val
+}
+
+pub fn will_not_be_called() -> bool {
+ println!("should not have been called");
+ false
+}
+
+pub fn print<T>(left: &str, value: T, right: &str)
+where
+ T: std::fmt::Display,
+{
+ println!("{}{}{}", left, value, right);
+}
+
+pub fn wrap_with<F, T>(inner: T, should_wrap: bool, wrapper: F)
+where
+ F: FnOnce(&T)
+{
+ if should_wrap {
+ wrapper(&inner)
+ }
+}
+
+fn main() {
+ let less = 1;
+ let more = 100;
+
+ if less < more {
+ wrap_with(will_be_called(), less < more, |inner| print(" ***", inner, "*** "));
+ wrap_with(will_be_called(), more < less, |inner| print(" ***", inner, "*** "));
+ } else {
+ wrap_with(will_not_be_called(), true, |inner| print("wrapped result is: ", inner, ""));
+ }
+}
--- /dev/null
+#!/usr/bin/env python
+
+import sys
+import json
+
+# Try to decode line in order to ensure it is a valid JSON document
+for line in sys.stdin:
+ parsed = json.loads(line)
+ print (json.dumps(parsed, indent=2, separators=(',', ': '), sort_keys=True))
--- /dev/null
+ 1| 2|pub fn will_be_called() -> &'static str {
+ 2| 2| let val = "called";
+ 3| 2| println!("{}", val);
+ 4| 2| val
+ 5| 2|}
+ 6| |
+ 7| 0|pub fn will_not_be_called() -> bool {
+ 8| 0| println!("should not have been called");
+ 9| 0| false
+ 10| 0|}
+ 11| |
+ 12| |pub fn print<T>(left: &str, value: T, right: &str)
+ 13| |where
+ 14| | T: std::fmt::Display,
+ 15| 1|{
+ 16| 1| println!("{}{}{}", left, value, right);
+ 17| 1|}
+ 18| |
+ 19| |pub fn wrap_with<F, T>(inner: T, should_wrap: bool, wrapper: F)
+ 20| |where
+ 21| | F: FnOnce(&T)
+ 22| 2|{
+ 23| 2| if should_wrap {
+ 24| 2| wrapper(&inner)
+ 25| 2| }
+ 26| 2|}
+ ------------------
+ | main[317d481089b8c8fe]::wrap_with::<main[317d481089b8c8fe]::main::{closure#0}, &str>:
+ | 22| 1|{
+ | 23| 1| if should_wrap {
+ | 24| 1| wrapper(&inner)
+ | 25| 1| }
+ | 26| 1|}
+ ------------------
+ | main[317d481089b8c8fe]::wrap_with::<main[317d481089b8c8fe]::main::{closure#1}, &str>:
+ | 22| 1|{
+ | 23| 1| if should_wrap {
+ | 24| 1| wrapper(&inner)
+ | 25| 1| }
+ | 26| 1|}
+ ------------------
+ 27| |
+ 28| 1|fn main() {
+ 29| 1| let less = 1;
+ 30| 1| let more = 100;
+ 31| 1|
+ 32| 1| if less < more {
+ 33| 1| wrap_with(will_be_called(), less < more, |inner| print(" ***", inner, "*** "));
+ 34| 1| wrap_with(will_be_called(), more < less, |inner| print(" ***", inner, "*** "));
+ ^0
+ 35| 1| } else {
+ 36| 1| wrap_with(will_not_be_called(), true, |inner| print("wrapped result is: ", inner, ""));
+ 37| 1| }
+ 38| 1|}
+
-include ../tools.mk
-# ignore-windows
+# ignore-windows-msvc
#
# Because of Windows exception handling, the code is not necessarily any shorter.
# https://github.com/llvm-mirror/llvm/commit/64b2297786f7fd6f5fa24cdd4db0298fbf211466
-include ../tools.mk
-# ignore-stage1
-
all:
$(RUSTC) a.rs && $(RUSTC) b.rs
$(BARE_RUSTC) c.rs -L dependency=$(TMPDIR) --extern b=$(TMPDIR)/libb.rlib \
-include ../tools.mk
-# ignore-stage1
-
all:
$(RUSTC) a.rs && $(RUSTC) b.rs && $(RUSTC) c.rs
-include ../tools.mk
-# ignore-windows
+# ignore-windows-msvc
all:
$(RUSTC) --emit=obj app.rs
+++ /dev/null
--include ../tools.mk
-
-# only-mingw
-
-all: empty.rs
- cp -r $(shell cygpath -u $(shell $(RUSTC) --print sysroot)) $(TMPDIR)/sysroot
- $(RUSTC) --target $(TARGET) --sysroot $(TMPDIR)/sysroot -L$(TMPDIR)/obj -Z print-link-args empty.rs | $(CGREP) 'lib\\crt2.o'
- mkdir -p $(TMPDIR)/obj
- mv $(TMPDIR)/sysroot/lib/rustlib/$(TARGET)/lib/crt2.o $(TMPDIR)/obj/crt2.o
- $(RUSTC) --target $(TARGET) --sysroot $(TMPDIR)/sysroot -L$(TMPDIR)/obj -Z print-link-args empty.rs | $(CGREP) 'obj\\crt2.o'
+++ /dev/null
-fn main() {}
-include ../tools.mk
-ifdef IS_WINDOWS
-all:
-else
+# ignore-windows-msvc
# rustc will remove one of the two redundant references to foo below. Depending
# on which one gets removed, we'll get a linker error on SOME platforms (like
all: $(call DYLIB,foo) $(call STATICLIB,bar) $(call STATICLIB,baz)
$(RUSTC) $(RUSTC_FLAGS) main.rs
$(call RUN,main)
-
-endif
--- /dev/null
+-include ../tools.mk
+
+# Assert that the search index is generated deterministically, regardless of the
+# order that crates are documented in.
+
+# ignore-windows
+# Uses `diff`.
+
+all:
+ $(RUSTDOC) foo.rs -o $(TMPDIR)/foo_first
+ $(RUSTDOC) bar.rs -o $(TMPDIR)/foo_first
+
+ $(RUSTDOC) bar.rs -o $(TMPDIR)/bar_first
+ $(RUSTDOC) foo.rs -o $(TMPDIR)/bar_first
+
+ diff $(TMPDIR)/foo_first/search-index.js $(TMPDIR)/bar_first/search-index.js
--- /dev/null
+pub struct Bar;
--- /dev/null
+pub struct Foo;
include ../tools.mk
-# ignore-windows
-#
-# On MINGW the --version-script, --dynamic-list, and --retain-symbol args don't
-# seem to work reliably.
+# ignore-windows-msvc
NM=nm -D
CDYLIB_NAME=liba_cdylib.so
COMBINED_CDYLIB_NAME=libcombined_rlib_dylib.dylib
endif
+ifdef IS_WINDOWS
+NM=nm -g
+CDYLIB_NAME=liba_cdylib.dll.a
+RDYLIB_NAME=liba_rust_dylib.dll.a
+EXE_NAME=an_executable.exe
+COMBINED_CDYLIB_NAME=libcombined_rlib_dylib.dll.a
+endif
+
# `grep` regex for symbols produced by either `legacy` or `v0` mangling
RE_ANY_RUST_SYMBOL="_ZN.*h.*E\|_R[a-zA-Z0-9_]+"
$(RUSTC) -Zshare-generics=no a_cdylib.rs --crate-name combined_rlib_dylib --crate-type=rlib,cdylib
# Check that a cdylib exports its public #[no_mangle] functions
- [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_cdylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_cdylib)" -eq "1" ]
# Check that a cdylib exports the public #[no_mangle] functions of dependencies
- [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "1" ]
# Check that a cdylib DOES NOT export any public Rust functions
- [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c $(RE_ANY_RUST_SYMBOL))" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -v __imp_ | grep -c $(RE_ANY_RUST_SYMBOL))" -eq "0" ]
# Check that a Rust dylib exports its monomorphic functions
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rust_dylib)" -eq "1" ]
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_rust_function_from_rust_dylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rust_dylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_rust_function_from_rust_dylib)" -eq "1" ]
# Check that a Rust dylib does not export generics if -Zshare-generics=no
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_generic_function_from_rust_dylib)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_generic_function_from_rust_dylib)" -eq "0" ]
# Check that a Rust dylib exports the monomorphic functions from its dependencies
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_rust_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_rust_function_from_rlib)" -eq "1" ]
# Check that a Rust dylib does not export generics if -Zshare-generics=no
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_generic_function_from_rlib)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_generic_function_from_rlib)" -eq "0" ]
# Check that an executable does not export any dynamic symbols
- [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_c_function_from_rlib)" -eq "0" ]
- [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_rust_function_from_exe)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -v __imp_ | grep -c public_rust_function_from_exe)" -eq "0" ]
# Check the combined case, where we generate a cdylib and an rlib in the same
# compilation session:
# Check that a cdylib exports its public #[no_mangle] functions
- [ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -c public_c_function_from_cdylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_cdylib)" -eq "1" ]
# Check that a cdylib exports the public #[no_mangle] functions of dependencies
- [ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "1" ]
# Check that a cdylib DOES NOT export any public Rust functions
- [ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -c $(RE_ANY_RUST_SYMBOL))" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(COMBINED_CDYLIB_NAME) | grep -v __imp_ | grep -c $(RE_ANY_RUST_SYMBOL))" -eq "0" ]
$(RUSTC) -Zshare-generics=yes an_rlib.rs
$(RUSTC) -Zshare-generics=yes an_executable.rs
# Check that a cdylib exports its public #[no_mangle] functions
- [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_cdylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_cdylib)" -eq "1" ]
# Check that a cdylib exports the public #[no_mangle] functions of dependencies
- [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "1" ]
# Check that a cdylib DOES NOT export any public Rust functions
- [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -c $(RE_ANY_RUST_SYMBOL))" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(CDYLIB_NAME) | grep -v __imp_ | grep -c $(RE_ANY_RUST_SYMBOL))" -eq "0" ]
# Check that a Rust dylib exports its monomorphic functions, including generics this time
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rust_dylib)" -eq "1" ]
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_rust_function_from_rust_dylib)" -eq "1" ]
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_generic_function_from_rust_dylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rust_dylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_rust_function_from_rust_dylib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_generic_function_from_rust_dylib)" -eq "1" ]
# Check that a Rust dylib exports the monomorphic functions from its dependencies
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_c_function_from_rlib)" -eq "1" ]
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_rust_function_from_rlib)" -eq "1" ]
- [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -c public_generic_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_rust_function_from_rlib)" -eq "1" ]
+ [ "$$($(NM) $(TMPDIR)/$(RDYLIB_NAME) | grep -v __imp_ | grep -c public_generic_function_from_rlib)" -eq "1" ]
# Check that an executable does not export any dynamic symbols
- [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_c_function_from_rlib)" -eq "0" ]
- [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -c public_rust_function_from_exe)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -v __imp_ | grep -c public_c_function_from_rlib)" -eq "0" ]
+ [ "$$($(NM) $(TMPDIR)/$(EXE_NAME) | grep -v __imp_ | grep -c public_rust_function_from_exe)" -eq "0" ]
HTMLDOCCK := '$(PYTHON)' '$(S)/src/etc/htmldocck.py'
CGREP := "$(S)/src/etc/cat-and-grep.sh"
+# diff with common flags for multi-platform diffs against text output
+DIFF := diff -u --strip-trailing-cr
+
# This is the name of the binary we will generate and run; use this
# e.g. for `$(CC) -o $(RUN_BINFILE)`.
RUN_BINFILE = $(TMPDIR)/$(1)
$(CC) $< -link -dll -out:`cygpath -w $@`
else
%.dll: lib%.o
- $(CC) -o $@ $< -shared
+ $(CC) -o $@ $< -shared -Wl,--out-implib=$@.a
endif
$(TMPDIR)/lib%.o: %.c
-include ../tools.mk
-# ignore-windows
+# ignore-windows-msvc
all:
$(RUSTC) -C opt-level=3 --emit=obj used.rs
}
Some(s) if s.eq("--test-aslr") => {
let cnt = run_self(&arg0);
- if cnt != NUM_RUNS {
+ if cnt == 1 {
eprintln!("FAIL: {} most likely no ASLR", arg0);
std::process::exit(1);
}
+++ /dev/null
-#![feature(doc_alias)]
-
-#[doc(alias = "foo")] // ok!
-pub struct Bar;
-
-#[doc(alias)] //~ ERROR
-#[doc(alias = 0)] //~ ERROR
-#[doc(alias("bar"))] //~ ERROR
-pub struct Foo;
+++ /dev/null
-error: doc alias attribute expects a string: #[doc(alias = "0")]
- --> $DIR/check-doc-alias-attr.rs:6:7
- |
-LL | #[doc(alias)]
- | ^^^^^
-
-error: doc alias attribute expects a string: #[doc(alias = "0")]
- --> $DIR/check-doc-alias-attr.rs:7:7
- |
-LL | #[doc(alias = 0)]
- | ^^^^^^^^^
-
-error: doc alias attribute expects a string: #[doc(alias = "0")]
- --> $DIR/check-doc-alias-attr.rs:8:7
- |
-LL | #[doc(alias("bar"))]
- | ^^^^^^^^^^^^
-
-error: aborting due to 3 previous errors
-
--- /dev/null
+Each of these needs to be in a separate file,
+because the `delay_span_bug` ICE in rustdoc won't be triggerred
+if even a single other error was emitted.
+
+However, conceptually they are all testing basically the same thing.
+See https://github.com/rust-lang/rust/pull/73566#issuecomment-653689128
+for more details.
--- /dev/null
+// edition:2018
+
+/// This used to work with ResolveBodyWithLoop.
+/// However now that we ignore type checking instead of modifying the function body,
+/// the return type is seen as `impl Future<Output = u32>`, not a `u32`.
+/// So it no longer allows errors in the function body.
+pub async fn a() -> u32 {
+ error::_in::async_fn()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `error::_in::async_fn`
+ --> $DIR/async.rs:8:5
+ |
+LL | error::_in::async_fn()
+ | ^^^^^^^^^^^^^^^^^^^^ could not resolve path `error::_in::async_fn`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+// manually desugared version of an `async fn` (but with a closure instead of a generator)
+pub fn a() -> impl Fn() -> u32 {
+ || content::doesnt::matter()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `content::doesnt::matter`
+ --> $DIR/closure.rs:3:8
+ |
+LL | || content::doesnt::matter()
+ | ^^^^^^^^^^^^^^^^^^^^^^^ could not resolve path `content::doesnt::matter`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+trait ValidTrait {}
+
+/// This has docs
+pub fn f() -> impl ValidTrait {
+ Vec::<DoesNotExist>::new()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `DoesNotExist`
+ --> $DIR/generic-argument.rs:5:11
+ |
+LL | Vec::<DoesNotExist>::new()
+ | ^^^^^^^^^^^^ could not resolve path `DoesNotExist`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+pub trait ValidTrait {}
+/// This returns impl trait
+pub fn g() -> impl ValidTrait {
+ (|| error::_in::impl_trait::alias::nested::closure())()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `error::_in::impl_trait::alias::nested::closure`
+ --> $DIR/impl-keyword-closure.rs:4:9
+ |
+LL | (|| error::_in::impl_trait::alias::nested::closure())()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ could not resolve path `error::_in::impl_trait::alias::nested::closure`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+pub trait ValidTrait {}
+/// This returns impl trait
+pub fn g() -> impl ValidTrait {
+ error::_in::impl_trait()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `error::_in::impl_trait`
+ --> $DIR/impl-keyword.rs:4:5
+ |
+LL | error::_in::impl_trait()
+ | ^^^^^^^^^^^^^^^^^^^^^^ could not resolve path `error::_in::impl_trait`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+#![feature(type_alias_impl_trait)]
+
+pub trait ValidTrait {}
+type ImplTrait = impl ValidTrait;
+
+/// This returns impl trait, but using a type alias
+pub fn h() -> ImplTrait {
+ (|| error::_in::impl_trait::alias::nested::closure())()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `error::_in::impl_trait::alias::nested::closure`
+ --> $DIR/trait-alias-closure.rs:8:9
+ |
+LL | (|| error::_in::impl_trait::alias::nested::closure())()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ could not resolve path `error::_in::impl_trait::alias::nested::closure`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+#![feature(type_alias_impl_trait)]
+
+pub trait ValidTrait {}
+type ImplTrait = impl ValidTrait;
+
+/// This returns impl trait, but using a type alias
+pub fn h() -> ImplTrait {
+ error::_in::impl_trait::alias()
+ //~^ ERROR failed to resolve
+}
--- /dev/null
+error[E0433]: failed to resolve: could not resolve path `error::_in::impl_trait::alias`
+ --> $DIR/trait-alias.rs:8:5
+ |
+LL | error::_in::impl_trait::alias()
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ could not resolve path `error::_in::impl_trait::alias`
+ |
+ = note: this error was originally ignored because you are running `rustdoc`
+ = note: try running again with `rustc` or `cargo check` and you may get a more detailed error
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0433`.
--- /dev/null
+// Ensure that rustdoc gives errors for trait impls inside function bodies that don't resolve.
+// See https://github.com/rust-lang/rust/pull/73566
+pub struct ValidType;
+pub trait ValidTrait {}
+pub trait NeedsBody {
+ type Item;
+ fn f();
+}
+
+/// This function has docs
+pub fn f<B: UnknownBound>(a: UnknownType, b: B) {
+//~^ ERROR cannot find trait `UnknownBound` in this scope
+//~| ERROR cannot find type `UnknownType` in this scope
+ impl UnknownTrait for ValidType {} //~ ERROR cannot find trait `UnknownTrait`
+ impl<T: UnknownBound> UnknownTrait for T {}
+ //~^ ERROR cannot find trait `UnknownBound` in this scope
+ //~| ERROR cannot find trait `UnknownTrait` in this scope
+ impl ValidTrait for UnknownType {}
+ //~^ ERROR cannot find type `UnknownType` in this scope
+ impl ValidTrait for ValidType where ValidTrait: UnknownBound {}
+ //~^ ERROR cannot find trait `UnknownBound` in this scope
+
+ /// This impl has documentation
+ impl NeedsBody for ValidType {
+ type Item = UnknownType;
+ //~^ ERROR cannot find type `UnknownType` in this scope
+
+ /// This function has documentation
+ fn f() {
+ <UnknownTypeShouldBeIgnored>::a();
+ content::shouldnt::matter();
+ unknown_macro!();
+ //~^ ERROR cannot find macro `unknown_macro` in this scope
+
+ /// This is documentation for a macro
+ macro_rules! can_define_macros_here_too {
+ () => {
+ this::content::should::also::be::ignored()
+ }
+ }
+ can_define_macros_here_too!();
+
+ /// This also is documented.
+ pub fn doubly_nested(c: UnknownType) {
+ //~^ ERROR cannot find type `UnknownType` in this scope
+ }
+ }
+ }
+}
--- /dev/null
+error: cannot find macro `unknown_macro` in this scope
+ --> $DIR/impl-fn-nesting.rs:32:13
+ |
+LL | unknown_macro!();
+ | ^^^^^^^^^^^^^
+
+error[E0405]: cannot find trait `UnknownBound` in this scope
+ --> $DIR/impl-fn-nesting.rs:11:13
+ |
+LL | pub fn f<B: UnknownBound>(a: UnknownType, b: B) {
+ | ^^^^^^^^^^^^ not found in this scope
+
+error[E0412]: cannot find type `UnknownType` in this scope
+ --> $DIR/impl-fn-nesting.rs:11:30
+ |
+LL | pub fn f<B: UnknownBound>(a: UnknownType, b: B) {
+ | ^^^^^^^^^^^ not found in this scope
+
+error[E0405]: cannot find trait `UnknownTrait` in this scope
+ --> $DIR/impl-fn-nesting.rs:14:10
+ |
+LL | impl UnknownTrait for ValidType {}
+ | ^^^^^^^^^^^^ not found in this scope
+
+error[E0405]: cannot find trait `UnknownTrait` in this scope
+ --> $DIR/impl-fn-nesting.rs:15:27
+ |
+LL | impl<T: UnknownBound> UnknownTrait for T {}
+ | ^^^^^^^^^^^^ not found in this scope
+
+error[E0405]: cannot find trait `UnknownBound` in this scope
+ --> $DIR/impl-fn-nesting.rs:15:13
+ |
+LL | impl<T: UnknownBound> UnknownTrait for T {}
+ | ^^^^^^^^^^^^ not found in this scope
+
+error[E0412]: cannot find type `UnknownType` in this scope
+ --> $DIR/impl-fn-nesting.rs:18:25
+ |
+LL | impl ValidTrait for UnknownType {}
+ | ^^^^^^^^^^^ not found in this scope
+
+error[E0405]: cannot find trait `UnknownBound` in this scope
+ --> $DIR/impl-fn-nesting.rs:20:53
+ |
+LL | impl ValidTrait for ValidType where ValidTrait: UnknownBound {}
+ | ^^^^^^^^^^^^ not found in this scope
+
+error[E0412]: cannot find type `UnknownType` in this scope
+ --> $DIR/impl-fn-nesting.rs:25:21
+ |
+LL | type Item = UnknownType;
+ | ^^^^^^^^^^^ not found in this scope
+
+error[E0412]: cannot find type `UnknownType` in this scope
+ --> $DIR/impl-fn-nesting.rs:44:37
+ |
+LL | pub fn doubly_nested(c: UnknownType) {
+ | ^^^^^^^^^^^ not found in this scope
+
+error: Compilation failed, aborting rustdoc
+
+error: aborting due to 11 previous errors
+
+Some errors have detailed explanations: E0405, E0412.
+For more information about an error, try `rustc --explain E0405`.
--- /dev/null
+enum E {
+//~^ ERROR recursive type `E` has infinite size
+ V(E),
+}
--- /dev/null
+error[E0072]: recursive type `E` has infinite size
+ --> $DIR/infinite-recursive-type.rs:1:1
+ |
+LL | enum E {
+ | ^^^^^^ recursive type has infinite size
+LL |
+LL | V(E),
+ | - recursive without indirection
+ |
+help: insert some indirection (e.g., a `Box`, `Rc`, or `&`) to make `E` representable
+ |
+LL | V(Box<E>),
+ | ^^^^ ^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0072`.
// check-pass
// revisions: public private
// [private]compile-flags: --document-private-items
-#![cfg_attr(private, deny(intra_doc_resolution_failure))]
+#![cfg_attr(private, deny(intra_doc_link_resolution_failure))]
/// docs [DontDocMe]
//[public]~^ WARNING `[DontDocMe]` public documentation for `DocMe` links to a private item
--- /dev/null
+#![feature(staged_api)]
+#![stable(feature = "private_general", since = "1.0.0")]
+
+#[unstable(feature = "private_trait", issue = "none")]
+pub trait Bar {}
+
+#[stable(feature = "private_general", since = "1.0.0")]
+pub struct Foo {
+ // nothing
+}
+
+impl Foo {
+ #[stable(feature = "private_general", since = "1.0.0")]
+ pub fn stable_impl() {}
+}
+
+impl Foo {
+ #[unstable(feature = "private_trait", issue = "none")]
+ pub fn bar() {}
+
+ #[stable(feature = "private_general", since = "1.0.0")]
+ pub fn bar2() {}
+}
+
+#[stable(feature = "private_general", since = "1.0.0")]
+impl Bar for Foo {}
--- /dev/null
+#![feature(doc_spotlight)]
+
+pub struct Wrapper<T> {
+ inner: T,
+}
+
+impl<T: SomeTrait> SomeTrait for Wrapper<T> {}
+
+#[doc(spotlight)]
+pub trait SomeTrait {
+ // @has doc_spotlight/trait.SomeTrait.html
+ // @has - '//code[@class="content"]' 'impl<T: SomeTrait> SomeTrait for Wrapper<T>'
+ fn wrap_me(self) -> Wrapper<Self> where Self: Sized {
+ Wrapper {
+ inner: self,
+ }
+ }
+}
+
+pub struct SomeStruct;
+impl SomeTrait for SomeStruct {}
+
+impl SomeStruct {
+ // @has doc_spotlight/struct.SomeStruct.html
+ // @has - '//code[@class="content"]' 'impl SomeTrait for SomeStruct'
+ // @has - '//code[@class="content"]' 'impl<T: SomeTrait> SomeTrait for Wrapper<T>'
+ pub fn new() -> SomeStruct {
+ SomeStruct
+ }
+}
+
+// @has doc_spotlight/fn.bare_fn.html
+// @has - '//code[@class="content"]' 'impl SomeTrait for SomeStruct'
+pub fn bare_fn() -> SomeStruct {
+ SomeStruct
+}
--- /dev/null
+// aux-build:unstable-trait.rs
+
+#![crate_name = "foo"]
+#![feature(private_trait)]
+
+extern crate unstable_trait;
+
+// @has foo/struct.Foo.html 'bar'
+// @has foo/struct.Foo.html 'bar2'
+#[doc(inline)]
+pub use unstable_trait::Foo;
--- /dev/null
+#![feature(type_alias_impl_trait)]
+
+trait MyTrait {}
+impl MyTrait for i32 {}
+
+// @has impl_trait_alias/type.Foo.html 'Foo'
+/// debug type
+pub type Foo = impl MyTrait;
+
+// @has impl_trait_alias/fn.foo.html 'foo'
+/// debug function
+pub fn foo() -> Foo {
+ 1
+}
// compile-flags: -Z force-unstable-if-unmarked
-// @matches internal/index.html '//*[@class="docblock-short"]/span[@class="stab internal"]' \
-// 'Internal'
+// Check that the unstable marker is not added for "rustc_private".
+
+// @!matches internal/index.html '//*[@class="docblock-short"]/span[@class="stab unstable"]'
+// @!matches internal/index.html '//*[@class="docblock-short"]/span[@class="stab internal"]'
// @matches - '//*[@class="docblock-short"]' 'Docs'
-// @has internal/struct.S.html '//*[@class="stab internal"]' \
-// 'This is an internal compiler API. (rustc_private)'
+// @!has internal/struct.S.html '//*[@class="stab unstable"]'
+// @!has internal/struct.S.html '//*[@class="stab internal"]'
/// Docs
pub struct S;
--- /dev/null
+// aux-build:additional_doc.rs
+// build-aux-docs
+#![deny(intra_doc_link_resolution_failure)]
+
+extern crate my_rand;
+
+// @has 'additional_doc/trait.Rng.html' '//a[@href="../additional_doc/trait.Rng.html"]' 'Rng'
+// @has 'additional_doc/trait.Rng.html' '//a[@href="../my_rand/trait.RngCore.html"]' 'RngCore'
+/// This is an [`Rng`].
+pub use my_rand::Rng;
--- /dev/null
+#![crate_name = "my_rand"]
+#![deny(intra_doc_link_resolution_failure)]
+
+pub trait RngCore {}
+/// Rng extends [`RngCore`].
+pub trait Rng: RngCore {}
--- /dev/null
+#![crate_name = "hidden_dep"]
+#![deny(intra_doc_link_resolution_failure)]
+
+#[doc(hidden)]
+pub mod __reexport {
+ pub use crate::*;
+}
+
+pub mod future {
+ mod ready {
+
+ /// Link to [`ready`](function@ready)
+ pub struct Ready;
+ pub fn ready() {}
+
+ }
+ pub use self::ready::{ready, Ready};
+
+}
--- /dev/null
+#![crate_name = "a"]
+#![deny(intra_doc_link_resolution_failure)]
+
+pub struct Foo;
+
+/// Link to [Foo]
+pub struct Bar;
--- /dev/null
+#![crate_name = "macro_inner"]
+#![deny(intra_doc_link_resolution_failure)]
+
+pub struct Foo;
+
+/// See also [`Foo`]
+#[macro_export]
+macro_rules! my_macro {
+ () => {}
+}
--- /dev/null
+#![crate_name = "module_inner"]
+#![deny(intra_doc_link_resolution_failure)]
+/// [SomeType] links to [bar]
+pub struct SomeType;
+pub trait SomeTrait {}
+/// [bar] links to [SomeTrait] and also [SomeType]
+pub mod bar {}
--- /dev/null
+// force-host
+// no-prefer-dynamic
+// compile-flags: --crate-type proc-macro
+#![crate_type="proc-macro"]
+#![crate_name="proc_macro_inner"]
+
+extern crate proc_macro;
+
+use proc_macro::TokenStream;
+
+/// Links to [`OtherDerive`]
+#[proc_macro_derive(DeriveA)]
+pub fn a_derive(input: TokenStream) -> TokenStream {
+ input
+}
+
+#[proc_macro_derive(OtherDerive)]
+pub fn other_derive(input: TokenStream) -> TokenStream {
+ input
+}
--- /dev/null
+#![crate_name = "a"]
+#![deny(intra_doc_link_resolution_failure)]
+
+pub mod bar {
+ pub struct Bar;
+}
+
+pub mod foo {
+ use crate::bar;
+ /// link to [bar::Bar]
+ pub struct Foo;
+}
--- /dev/null
+#![crate_name = "bar"]
+#![deny(intra_doc_link_resolution_failure)]
+
+pub trait Foo {
+ /// [`Bar`] [`Baz`]
+ fn foo();
+}
+
+pub trait Bar {
+}
+
+pub trait Baz {
+}
--- /dev/null
+#![crate_name = "inner"]
+/// this is a trait
+pub trait SomeTrait {
+ /// this is a method for [a trait][SomeTrait]
+ fn foo();
+}
+
+pub mod bar {
+ use super::SomeTrait;
+
+ pub struct BarStruct;
+
+ impl SomeTrait for BarStruct {
+ fn foo() {}
+ }
+}
--- /dev/null
+// aux-build:intra-doc-basic.rs
+// build-aux-docs
+#![deny(intra_doc_link_resolution_failure)]
+
+// from https://github.com/rust-lang/rust/issues/65983
+extern crate a;
+
+// @has 'basic/struct.Bar.html' '//a[@href="../a/struct.Foo.html"]' 'Foo'
+pub use a::Bar;
--- /dev/null
+// aux-build:hidden.rs
+// build-aux-docs
+#![deny(intra_doc_link_resolution_failure)]
+
+// tests https://github.com/rust-lang/rust/issues/73363
+
+extern crate hidden_dep;
+
+// @has 'hidden/struct.Ready.html' '//a/@href' '../hidden/fn.ready.html'
+pub use hidden_dep::future::{ready, Ready};
--- /dev/null
+// ignore-tidy-linelength
+// aux-build:macro_inner.rs
+// aux-build:proc_macro.rs
+// build-aux-docs
+#![deny(intra_doc_link_resolution_failure)]
+extern crate macro_inner;
+extern crate proc_macro_inner;
+
+// @has 'macro/macro.my_macro.html' '//a[@href="../macro_inner/struct.Foo.html"]' 'Foo'
+pub use macro_inner::my_macro;
+// @has 'macro/derive.DeriveA.html' '//a[@href="../proc_macro_inner/derive.OtherDerive.html"]' 'OtherDerive'
+pub use proc_macro_inner::DeriveA;
--- /dev/null
+// outer.rs
+// aux-build: module.rs
+// build-aux-docs
+#![deny(intra_doc_link_resolution_failure)]
+extern crate module_inner;
+// @has 'module/bar/index.html' '//a[@href="../../module_inner/trait.SomeTrait.html"]' 'SomeTrait'
+// @has 'module/bar/index.html' '//a[@href="../../module_inner/struct.SomeType.html"]' 'SomeType'
+pub use module_inner::bar;
--- /dev/null
+// aux-build:submodule-inner.rs
+// build-aux-docs
+#![deny(intra_doc_link_resolution_failure)]
+
+extern crate a;
+
+// @has 'submodule_inner/struct.Foo.html' '//a[@href="../a/bar/struct.Bar.html"]' 'Bar'
+pub use a::foo::Foo;
--- /dev/null
+// aux-build:submodule-outer.rs
+// edition:2018
+#![deny(intra_doc_link_resolution_failure)]
+
+extern crate bar as bar_;
+
+// from https://github.com/rust-lang/rust/issues/60883
+pub mod bar {
+ pub use ::bar_::Bar;
+}
+
+// NOTE: we re-exported both `Foo` and `Bar` here,
+// NOTE: so they are inlined and therefore we link to the current module.
+// @has 'submodule_outer/trait.Foo.html' '//a[@href="../submodule_outer/bar/trait.Bar.html"]' 'Bar'
+// @has 'submodule_outer/trait.Foo.html' '//a[@href="../submodule_outer/trait.Baz.html"]' 'Baz'
+pub use ::bar_::{Foo, Baz};
--- /dev/null
+// ignore-test
+// ^ this is https://github.com/rust-lang/rust/issues/73829
+// aux-build:traits.rs
+// build-aux-docs
+// ignore-tidy-line-length
+#![deny(intra_doc_link_resolution_failure)]
+
+extern crate inner;
+use inner::SomeTrait;
+
+pub struct SomeStruct;
+
+ // @has 'traits/struct.SomeStruct.html' '//a[@href="../inner/trait.SomeTrait.html"]' 'SomeTrait'
+impl SomeTrait for SomeStruct {
+ // @has 'traits/struct.SomeStruct.html' '//a[@href="../inner/trait.SomeTrait.html"]' 'a trait'
+ fn foo() {}
+}
--- /dev/null
+// ignore-tidy-linelength
+
+#![deny(intra_doc_link_resolution_failure)]
+
+
+pub fn foo() {
+
+}
+
+pub mod foo {}
+// @has intra_doc_link_mod_ambiguity/struct.A.html '//a/@href' '../intra_doc_link_mod_ambiguity/foo/index.html'
+/// Module is [`module@foo`]
+pub struct A;
+
+
+// @has intra_doc_link_mod_ambiguity/struct.B.html '//a/@href' '../intra_doc_link_mod_ambiguity/fn.foo.html'
+/// Function is [`fn@foo`]
+pub struct B;
// ignore-tidy-linelength
-#![deny(intra_doc_resolution_failure)]
+#![deny(intra_doc_link_resolution_failure)]
pub mod char {}
--- /dev/null
+// Regression issue for rustdoc ICE encountered in PR #72088.
+// edition:2018
+#![feature(decl_macro)]
+
+fn main() {
+ async {
+ macro m() {}
+ };
+}
|| {
macro m() {}
};
+
+ let _ = || {
+ macro n() {}
+ };
+
+ let cond = true;
+ let _ = || if cond { macro n() {} } else { panic!() };
}
--- /dev/null
+#![feature(type_alias_impl_trait)]
+
+pub trait Backend {}
+
+impl Backend for () {}
+
+pub struct Module<T>(T);
+
+pub type BackendImpl = impl Backend;
+
+// @has return_impl_trait/fn.make_module.html
+/// Documentation
+pub fn make_module() -> Module<BackendImpl> {
+ Module(())
+}
// aux-build:rlib-crate-test.rs
-// ignore-tidy-linelength
// ignore-cross-compile gives a different error message
#![feature(plugin)]
#![plugin(rlib_crate_test)]
-//~^ ERROR: plugin `rlib_crate_test` only found in rlib format, but must be available in dylib format
-//~| WARN use of deprecated attribute `plugin`: compiler plugins are deprecated
+//~^ ERROR: plugin `rlib_crate_test` only found in rlib format, but must be available in dylib
fn main() {}
error[E0457]: plugin `rlib_crate_test` only found in rlib format, but must be available in dylib format
- --> $DIR/macro-crate-rlib.rs:6:11
+ --> $DIR/macro-crate-rlib.rs:5:11
|
LL | #![plugin(rlib_crate_test)]
| ^^^^^^^^^^^^^^^
-warning: use of deprecated attribute `plugin`: compiler plugins are deprecated. See https://github.com/rust-lang/rust/pull/64675
- --> $DIR/macro-crate-rlib.rs:6:1
- |
-LL | #![plugin(rlib_crate_test)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^ help: may be removed in a future compiler version
- |
- = note: `#[warn(deprecated)]` on by default
-
-error: aborting due to previous error; 1 warning emitted
+error: aborting due to previous error
--- /dev/null
+#![crate_type = "lib"]
+#![feature(doc_alias)]
+
+#[doc(alias = "foo")] // ok!
+pub struct Bar;
+
+#[doc(alias)] //~ ERROR
+#[doc(alias = 0)] //~ ERROR
+#[doc(alias("bar"))] //~ ERROR
+pub struct Foo;
--- /dev/null
+error: doc alias attribute expects a string: #[doc(alias = "0")]
+ --> $DIR/check-doc-alias-attr.rs:7:7
+ |
+LL | #[doc(alias)]
+ | ^^^^^
+
+error: doc alias attribute expects a string: #[doc(alias = "0")]
+ --> $DIR/check-doc-alias-attr.rs:8:7
+ |
+LL | #[doc(alias = 0)]
+ | ^^^^^^^^^
+
+error: doc alias attribute expects a string: #[doc(alias = "0")]
+ --> $DIR/check-doc-alias-attr.rs:9:7
+ |
+LL | #[doc(alias("bar"))]
+ | ^^^^^^^^^^^^
+
+error: aborting due to 3 previous errors
+
--- /dev/null
+// run-pass
+
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete
+#![allow(dead_code)]
+
+fn test<const N: usize>() {}
+
+fn wow<'a>() -> &'a () {
+ test::<{
+ let _: &'a ();
+ 3
+ }>();
+ &()
+}
+
+fn main() {}
--- /dev/null
+warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
+ --> $DIR/const-argument-non-static-lifetime.rs:3:12
+ |
+LL | #![feature(const_generics)]
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `#[warn(incomplete_features)]` on by default
+ = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
+
+warning: 1 warning emitted
+
--- /dev/null
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete
+
+// Currently, const parameters cannot depend on other generic parameters,
+// as our current implementation can't really support this.
+//
+// We may want to lift this restriction in the future.
+
+pub struct Dependent<const N: usize, const X: [u8; N]>([(); N]);
+//~^ ERROR: the type of const parameters must not depend on other generic parameters
+
+pub struct SelfDependent<const N: [u8; N]>;
+//~^ ERROR: the type of const parameters must not depend on other generic parameters
+
+fn main() {}
--- /dev/null
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/const-param-type-depends-on-const-param.rs:9:52
+ |
+LL | pub struct Dependent<const N: usize, const X: [u8; N]>([(); N]);
+ | ^ the type must not depend on the parameter `N`
+
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/const-param-type-depends-on-const-param.rs:12:40
+ |
+LL | pub struct SelfDependent<const N: [u8; N]>;
+ | ^ the type must not depend on the parameter `N`
+
+warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
+ --> $DIR/const-param-type-depends-on-const-param.rs:1:12
+ |
+LL | #![feature(const_generics)]
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `#[warn(incomplete_features)]` on by default
+ = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
+
+error: aborting due to 2 previous errors; 1 warning emitted
+
+For more information about this error, try `rustc --explain E0770`.
+// compile-flags: -Zsave-analysis
+// Regression test for #69414 ^
+
use std::marker::PhantomData;
struct B<T, const N: T>(PhantomData<[T; N]>); //~ ERROR const generics are unstable
-//~^ ERROR `T` is not guaranteed to `#[derive(PartialEq, Eq)]`
+//~^ ERROR the type of const parameters must not depend on other generic parameters
fn main() {}
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/const-param-type-depends-on-type-param-ungated.rs:6:22
+ |
+LL | struct B<T, const N: T>(PhantomData<[T; N]>);
+ | ^ the type must not depend on the parameter `T`
+
error[E0658]: const generics are unstable
- --> $DIR/const-param-type-depends-on-type-param-ungated.rs:3:19
+ --> $DIR/const-param-type-depends-on-type-param-ungated.rs:6:19
|
LL | struct B<T, const N: T>(PhantomData<[T; N]>);
| ^
= note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
= help: add `#![feature(const_generics)]` to the crate attributes to enable
-error[E0741]: `T` is not guaranteed to `#[derive(PartialEq, Eq)]`, so may not be used as the type of a const parameter
- --> $DIR/const-param-type-depends-on-type-param-ungated.rs:3:22
- |
-LL | struct B<T, const N: T>(PhantomData<[T; N]>);
- | ^ `T` may not derive both `PartialEq` and `Eq`
- |
- = note: it is not currently possible to use a type parameter as the type of a const parameter
-
error: aborting due to 2 previous errors
-Some errors have detailed explanations: E0658, E0741.
+Some errors have detailed explanations: E0658, E0770.
For more information about an error, try `rustc --explain E0658`.
#![feature(const_generics)]
//~^ WARN the feature `const_generics` is incomplete
-// Currently, const parameters cannot depend on type parameters, because there is no way to
-// enforce the structural-match property on an arbitrary type parameter. This restriction
-// may be relaxed in the future. See https://github.com/rust-lang/rfcs/pull/2000 for more
-// details.
+// Currently, const parameters cannot depend on other generic parameters,
+// as our current implementation can't really support this.
+//
+// We may want to lift this restriction in the future.
pub struct Dependent<T, const X: T>([(); X]);
-//~^ ERROR `T` is not guaranteed to `#[derive(PartialEq, Eq)]`
+//~^ ERROR: the type of const parameters must not depend on other generic parameters
+//~| ERROR: parameter `T` is never used
fn main() {}
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/const-param-type-depends-on-type-param.rs:9:34
+ |
+LL | pub struct Dependent<T, const X: T>([(); X]);
+ | ^ the type must not depend on the parameter `T`
+
warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
--> $DIR/const-param-type-depends-on-type-param.rs:1:12
|
= note: `#[warn(incomplete_features)]` on by default
= note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
-error[E0741]: `T` is not guaranteed to `#[derive(PartialEq, Eq)]`, so may not be used as the type of a const parameter
- --> $DIR/const-param-type-depends-on-type-param.rs:9:34
+error[E0392]: parameter `T` is never used
+ --> $DIR/const-param-type-depends-on-type-param.rs:9:22
|
LL | pub struct Dependent<T, const X: T>([(); X]);
- | ^ `T` may not derive both `PartialEq` and `Eq`
+ | ^ unused parameter
|
- = note: it is not currently possible to use a type parameter as the type of a const parameter
+ = help: consider removing `T`, referring to it in a field, or using a marker such as `std::marker::PhantomData`
-error: aborting due to previous error; 1 warning emitted
+error: aborting due to 2 previous errors; 1 warning emitted
-For more information about this error, try `rustc --explain E0741`.
+Some errors have detailed explanations: E0392, E0770.
+For more information about an error, try `rustc --explain E0392`.
--- /dev/null
+#![feature(const_generics)]
+
+// All of these three items must be in `lib2` to reproduce the error
+
+pub trait TypeFn {
+ type Output;
+}
+
+pub struct GenericType<const B: i8>;
+
+// Removing the braces around `42` resolves the crash
+impl TypeFn for GenericType<{ 42 }> {
+ type Output = ();
+}
--- /dev/null
+#![feature(const_generics)] //~ WARN the feature `const_generics` is incomplete
+
+fn foo<const N: usize, const A: [u8; N]>() {}
+//~^ ERROR the type of const parameters must not
+
+fn main() {
+ foo::<_, {[1]}>();
+ //~^ ERROR wrong number of const arguments
+ //~| ERROR wrong number of type arguments
+ //~| ERROR mismatched types
+}
--- /dev/null
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/issue-62878.rs:3:38
+ |
+LL | fn foo<const N: usize, const A: [u8; N]>() {}
+ | ^ the type must not depend on the parameter `N`
+
+warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
+ --> $DIR/issue-62878.rs:1:12
+ |
+LL | #![feature(const_generics)]
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `#[warn(incomplete_features)]` on by default
+ = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
+
+error[E0107]: wrong number of const arguments: expected 2, found 1
+ --> $DIR/issue-62878.rs:7:5
+ |
+LL | foo::<_, {[1]}>();
+ | ^^^^^^^^^^^^^^^ expected 2 const arguments
+
+error[E0107]: wrong number of type arguments: expected 0, found 1
+ --> $DIR/issue-62878.rs:7:11
+ |
+LL | foo::<_, {[1]}>();
+ | ^ unexpected type argument
+
+error[E0308]: mismatched types
+ --> $DIR/issue-62878.rs:7:15
+ |
+LL | foo::<_, {[1]}>();
+ | ^^^ expected `usize`, found array `[{integer}; 1]`
+
+error: aborting due to 4 previous errors; 1 warning emitted
+
+Some errors have detailed explanations: E0107, E0308, E0770.
+For more information about an error, try `rustc --explain E0107`.
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+pub struct S(u8);
+
+impl S {
+ pub fn get<const A: u8>(&self) -> &u8 {
+ &self.0
+ }
+}
+
+fn main() {
+ const A: u8 = 5;
+ let s = S(0);
+
+ s.get::<A>();
+}
--- /dev/null
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+fn foo<const LEN: usize, const DATA: [u8; LEN]>() {}
+//~^ ERROR the type of const parameters must not
+fn main() {
+ const DATA: [u8; 4] = *b"ABCD";
+ foo::<4, DATA>();
+ //~^ ERROR constant expression depends on
+}
--- /dev/null
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/issue-71169.rs:4:43
+ |
+LL | fn foo<const LEN: usize, const DATA: [u8; LEN]>() {}
+ | ^^^ the type must not depend on the parameter `LEN`
+
+error: constant expression depends on a generic parameter
+ --> $DIR/issue-71169.rs:8:14
+ |
+LL | foo::<4, DATA>();
+ | ^^^^
+ |
+ = note: this may fail depending on what value the parameter takes
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0770`.
impl Test {
pub fn call_me<Args: Sized, const IDX: usize, const FN: unsafe extern "C" fn(Args)>(&self) {
//~^ ERROR: using function pointers as const generic parameters is forbidden
+ //~| ERROR: the type of const parameters must not depend on other generic parameters
self.0 = Self::trampiline::<Args, IDX, FN> as _
}
const IDX: usize,
const FN: unsafe extern "C" fn(Args),
//~^ ERROR: using function pointers as const generic parameters is forbidden
+ //~| ERROR: the type of const parameters must not depend on other generic parameters
>(
args: Args,
) {
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/issue-71381.rs:13:82
+ |
+LL | pub fn call_me<Args: Sized, const IDX: usize, const FN: unsafe extern "C" fn(Args)>(&self) {
+ | ^^^^ the type must not depend on the parameter `Args`
+
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/issue-71381.rs:22:40
+ |
+LL | const FN: unsafe extern "C" fn(Args),
+ | ^^^^ the type must not depend on the parameter `Args`
+
error: using function pointers as const generic parameters is forbidden
--> $DIR/issue-71381.rs:13:61
|
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
error: using function pointers as const generic parameters is forbidden
- --> $DIR/issue-71381.rs:21:19
+ --> $DIR/issue-71381.rs:22:19
|
LL | const FN: unsafe extern "C" fn(Args),
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: aborting due to 2 previous errors
+error: aborting due to 4 previous errors
+For more information about this error, try `rustc --explain E0770`.
fn func<A, const F: fn(inner: A)>(outer: A) {
//~^ ERROR: using function pointers as const generic parameters is forbidden
+ //~| ERROR: the type of const parameters must not depend on other generic parameters
F(outer);
}
+error[E0770]: the type of const parameters must not depend on other generic parameters
+ --> $DIR/issue-71611.rs:4:31
+ |
+LL | fn func<A, const F: fn(inner: A)>(outer: A) {
+ | ^ the type must not depend on the parameter `A`
+
error: using function pointers as const generic parameters is forbidden
--> $DIR/issue-71611.rs:4:21
|
LL | fn func<A, const F: fn(inner: A)>(outer: A) {
| ^^^^^^^^^^^^
-error: aborting due to previous error
+error: aborting due to 2 previous errors
+For more information about this error, try `rustc --explain E0770`.
--- /dev/null
+// check-pass
+// aux-build:const_generic_issues_lib.rs
+extern crate const_generic_issues_lib as lib2;
+fn unused_function(
+ _: <lib2::GenericType<42> as lib2::TypeFn>::Output
+) {}
+
+fn main() {}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+const LEN: usize = 1024;
+
+fn hoge<const IN: [u32; LEN]>() {}
+
+fn main() {}
--- /dev/null
+#![feature(const_generics)] //~ WARN the feature `const_generics` is incomplete
+
+pub const fn func_name<const X: *const u32>() {}
+//~^ ERROR using raw pointers
+
+fn main() {}
--- /dev/null
+warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
+ --> $DIR/issue-73508.rs:1:12
+ |
+LL | #![feature(const_generics)]
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `#[warn(incomplete_features)]` on by default
+ = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
+
+error: using raw pointers as const generic parameters is forbidden
+ --> $DIR/issue-73508.rs:3:33
+ |
+LL | pub const fn func_name<const X: *const u32>() {}
+ | ^^^^^^^^^^
+
+error: aborting due to previous error; 1 warning emitted
+
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+fn test<const N: [u8; 1 + 2]>() {}
+
+struct Foo<const N: [u8; 1 + 2]>;
+
+fn main() {}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(dead_code, incomplete_features)]
+
+#[derive(PartialEq, Eq)]
+enum IceEnum {
+ Variant
+}
+
+struct IceStruct;
+
+impl IceStruct {
+ fn ice_struct_fn<const I: IceEnum>() {}
+}
+
+fn main() {
+ IceStruct::ice_struct_fn::<{IceEnum::Variant}>();
+}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+struct X;
+
+impl X {
+ pub fn getn<const N: usize>(&self) -> [u8; N] {
+ getn::<N>()
+ }
+}
+
+fn getn<const N: usize>() -> [u8; N] {
+ unsafe {
+ std::mem::zeroed()
+ }
+}
+
+fn main() {
+ // works
+ let [a,b,c] = getn::<3>();
+
+ // cannot pattern-match on an array without a fixed length
+ let [a,b,c] = X.getn::<3>();
+
+ // mismatched types, expected array `[u8; 3]` found array `[u8; _]`
+ let arr: [u8; 3] = X.getn::<3>();
+}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+struct A<const N: usize>;
+
+struct X;
+
+impl X {
+ fn inner<const N: usize>() -> A<N> {
+ outer::<N>()
+ }
+}
+
+fn outer<const N: usize>() -> A<N> {
+ A
+}
+
+fn main() {
+ let i: A<3usize> = outer::<3usize>();
+ let o: A<3usize> = X::inner::<3usize>();
+}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+struct Struct<const N: usize>;
+
+impl<const N: usize> Struct<N> {
+ fn method<const M: usize>(&self) {}
+}
+
+fn test<const N: usize, const M: usize>(x: Struct<N>) {
+ Struct::<N>::method::<M>(&x);
+ x.method::<N>();
+}
+
+fn main() {}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+use std::marker::PhantomData;
+
+// This namespace is necessary for the ICE to trigger
+struct Namespace;
+
+impl Namespace {
+ pub fn const_chunks_exact<T, const N: usize>() -> ConstChunksExact<'static, T, N> {
+ ConstChunksExact { inner: PhantomData }
+ }
+}
+
+
+#[derive(Debug)]
+pub struct ConstChunksExact<'a, T, const N: usize> {
+ inner: PhantomData<&'a T>
+}
+
+impl <'a, T, const N: usize> Iterator for ConstChunksExact<'a, T, { N }> {
+ type Item = &'a [T; N];
+
+ fn next(&mut self) -> Option<Self::Item> {
+ unreachable!()
+ }
+}
+
+fn main() {
+ let mut chunks = Namespace::const_chunks_exact::<i32, 3usize>();
+ let _next: &[i32; 3] = chunks.next().unwrap();
+}
--- /dev/null
+// run-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+struct Foo {
+ i: i32,
+}
+
+trait Get<'a, const N: &'static str> {
+ type Target: 'a;
+
+ fn get(&'a self) -> &'a Self::Target;
+}
+
+impl Foo {
+ fn ask<'a, const N: &'static str>(&'a self) -> &'a <Self as Get<N>>::Target
+ where
+ Self: Get<'a, N>,
+ {
+ self.get()
+ }
+}
+
+impl<'a> Get<'a, "int"> for Foo {
+ type Target = i32;
+
+ fn get(&'a self) -> &'a Self::Target {
+ &self.i
+ }
+}
+
+fn main() {
+ let foo = Foo { i: 123 };
+ assert_eq!(foo.ask::<"int">(), &123);
+}
--- /dev/null
+// run-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+use std::mem::MaybeUninit;
+
+trait CollectSlice<'a>: Iterator {
+ fn inner_array<const N: usize>(&mut self) -> [Self::Item; N];
+
+ fn collect_array<const N: usize>(&mut self) -> [Self::Item; N] {
+ let result = self.inner_array();
+ assert!(self.next().is_none());
+ result
+ }
+}
+
+impl<'a, I: ?Sized> CollectSlice<'a> for I
+where
+ I: Iterator,
+{
+ fn inner_array<const N: usize>(&mut self) -> [Self::Item; N] {
+ let mut result: [MaybeUninit<Self::Item>; N] =
+ unsafe { MaybeUninit::uninit().assume_init() };
+
+ let mut count = 0;
+ for (dest, item) in result.iter_mut().zip(self) {
+ *dest = MaybeUninit::new(item);
+ count += 1;
+ }
+
+ assert_eq!(N, count);
+
+ let temp_ptr: *const [MaybeUninit<Self::Item>; N] = &result;
+ unsafe { std::ptr::read(temp_ptr as *const [Self::Item; N]) }
+ }
+}
+
+fn main() {
+ let mut foos = [0u64; 9].iter().cloned();
+ let _bar: [u64; 9] = foos.collect_array::<9_usize>();
+}
--- /dev/null
+// check-pass
+#![feature(const_generics)]
+#![allow(incomplete_features)]
+
+trait Foo<'a, A>: Iterator<Item=A> {
+ fn bar<const N: usize>(&mut self) -> *const [A; N];
+}
+
+impl<'a, A, I: ?Sized> Foo<'a, A> for I where I: Iterator<Item=A> {
+ fn bar<const N: usize>(&mut self) -> *const [A; N] {
+ std::ptr::null()
+ }
+}
+
+fn main() {
+ (0_u8 .. 10).bar::<10_usize>();
+}
// check-pass
+// compile-flags: --crate-type lib
+
+#![warn(unconditional_panic)]
+
pub struct Fixed64(i64);
-pub fn div(f: Fixed64) {
- f.0 / 0;
+// HACK: this test passes only because this is a const fn that is written to metadata
+pub const fn div(f: Fixed64) {
+ f.0 / 0; //~ WARN will panic at runtime
}
-
-fn main() {}
--- /dev/null
+warning: this operation will panic at runtime
+ --> $DIR/ice-assert-fail-div-by-zero.rs:11:5
+ |
+LL | f.0 / 0;
+ | ^^^^^^^ attempt to divide _ by zero
+ |
+note: the lint level is defined here
+ --> $DIR/ice-assert-fail-div-by-zero.rs:5:9
+ |
+LL | #![warn(unconditional_panic)]
+ | ^^^^^^^^^^^^^^^^^^^
+
+warning: 1 warning emitted
+
--- /dev/null
+// run-pass
+
+#![feature(const_option)]
+
+const X: Option<i32> = Some(32);
+const Y: Option<&i32> = X.as_ref();
+
+const IS_SOME: bool = X.is_some();
+const IS_NONE: bool = Y.is_none();
+
+fn main() {
+ assert!(IS_SOME);
+ assert!(!IS_NONE)
+}
--- /dev/null
+// run-pass
+
+#![feature(const_fn)]
+#![feature(const_unreachable_unchecked)]
+
+const unsafe fn foo(x: bool) -> bool {
+ match x {
+ true => true,
+ false => std::hint::unreachable_unchecked(),
+ }
+}
+
+const BAR: bool = unsafe { foo(true) };
+
+fn main() {
+ assert_eq!(BAR, true);
+}
--- /dev/null
+// build-fail
+
+#![feature(const_fn)]
+#![feature(const_unreachable_unchecked)]
+
+const unsafe fn foo(x: bool) -> bool {
+ match x {
+ true => true,
+ false => std::hint::unreachable_unchecked(),
+ }
+}
+
+#[warn(const_err)]
+const BAR: bool = unsafe { foo(false) };
+
+fn main() {
+ assert_eq!(BAR, true);
+ //~^ ERROR E0080
+ //~| ERROR erroneous constant
+}
--- /dev/null
+warning: any use of this value will cause an error
+ --> $SRC_DIR/libcore/hint.rs:LL:COL
+ |
+LL | unsafe { intrinsics::unreachable() }
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^
+ | |
+ | entering unreachable code
+ | inside `std::hint::unreachable_unchecked` at $SRC_DIR/libcore/hint.rs:LL:COL
+ | inside `foo` at $DIR/const_unsafe_unreachable_ub.rs:9:18
+ | inside `BAR` at $DIR/const_unsafe_unreachable_ub.rs:14:28
+ |
+ ::: $DIR/const_unsafe_unreachable_ub.rs:14:1
+ |
+LL | const BAR: bool = unsafe { foo(false) };
+ | ----------------------------------------
+ |
+note: the lint level is defined here
+ --> $DIR/const_unsafe_unreachable_ub.rs:13:8
+ |
+LL | #[warn(const_err)]
+ | ^^^^^^^^^
+
+error[E0080]: evaluation of constant expression failed
+ --> $DIR/const_unsafe_unreachable_ub.rs:17:3
+ |
+LL | assert_eq!(BAR, true);
+ | ^^^^^^^^^^^---^^^^^^^^
+ | |
+ | referenced constant has errors
+ |
+ = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: erroneous constant used
+ --> $DIR/const_unsafe_unreachable_ub.rs:17:3
+ |
+LL | assert_eq!(BAR, true);
+ | ^^^^^^^^^^^^^^^^^^^^^^ referenced constant has errors
+ |
+ = note: `#[deny(const_err)]` on by default
+ = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
+
+error: aborting due to 2 previous errors; 1 warning emitted
+
+For more information about this error, try `rustc --explain E0080`.
--- /dev/null
+// run-pass
+
+#![feature(const_panic)]
+#![feature(duration_consts_2)]
+#![feature(div_duration)]
+
+use std::time::Duration;
+
+fn duration() {
+ const ZERO : Duration = Duration::new(0, 0);
+ assert_eq!(ZERO, Duration::from_secs(0));
+
+ const ONE : Duration = Duration::new(0, 1);
+ assert_eq!(ONE, Duration::from_nanos(1));
+
+ const MAX : Duration = Duration::new(u64::MAX, 1_000_000_000 - 1);
+
+ const MAX_ADD_ZERO : Option<Duration> = MAX.checked_add(ZERO);
+ assert_eq!(MAX_ADD_ZERO, Some(MAX));
+
+ const MAX_ADD_ONE : Option<Duration> = MAX.checked_add(ONE);
+ assert_eq!(MAX_ADD_ONE, None);
+
+ const ONE_SUB_ONE : Option<Duration> = ONE.checked_sub(ONE);
+ assert_eq!(ONE_SUB_ONE, Some(ZERO));
+
+ const ZERO_SUB_ONE : Option<Duration> = ZERO.checked_sub(ONE);
+ assert_eq!(ZERO_SUB_ONE, None);
+
+ const ONE_MUL_ONE : Option<Duration> = ONE.checked_mul(1);
+ assert_eq!(ONE_MUL_ONE, Some(ONE));
+
+ const MAX_MUL_TWO : Option<Duration> = MAX.checked_mul(2);
+ assert_eq!(MAX_MUL_TWO, None);
+
+ const ONE_DIV_ONE : Option<Duration> = ONE.checked_div(1);
+ assert_eq!(ONE_DIV_ONE, Some(ONE));
+
+ const ONE_DIV_ZERO : Option<Duration> = ONE.checked_div(0);
+ assert_eq!(ONE_DIV_ZERO, None);
+
+ const MAX_AS_F32 : f32 = MAX.as_secs_f32();
+ assert_eq!(MAX_AS_F32, 18446744000000000000.0_f32);
+
+ const MAX_AS_F64 : f64 = MAX.as_secs_f64();
+ assert_eq!(MAX_AS_F64, 18446744073709552000.0_f64);
+
+ const ONE_AS_F32 : f32 = ONE.div_duration_f32(ONE);
+ assert_eq!(ONE_AS_F32, 1.0_f32);
+
+ const ONE_AS_F64 : f64 = ONE.div_duration_f64(ONE);
+ assert_eq!(ONE_AS_F64, 1.0_f64);
+}
+
+fn main() {
+ duration();
+}
mod y {
use Foo;
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn use_char_assoc() {
// Careful here: in the representation, <char as Foo>::T gets
// normalized away, so at a certain point we had no edge to
error: OK
--> $DIR/dep-graph-assoc-type-codegen.rs:28:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to previous error
use x;
// These dependencies SHOULD exist:
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn y() {
x::x();
}
// These are expected to yield errors, because changes to `x`
// affect the BODY of `y`, but not its signature.
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR no path
+ #[rustc_then_this_would_need(typeck)] //~ ERROR no path
pub fn z() {
y::y();
}
error: OK
--> $DIR/dep-graph-caller-callee.rs:20:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: no path from `x::x` to `typeck_tables_of`
+error: no path from `x::x` to `typeck`
--> $DIR/dep-graph-caller-callee.rs:31:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 2 previous errors
}
#[rustc_then_this_would_need(fn_sig)] //~ ERROR OK
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
fn some_fn(x: WillChange) { }
#[rustc_then_this_would_need(fn_sig)] //~ ERROR OK
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
fn new_foo(x: u32, y: u32) -> WillChange {
WillChange { x: x, y: y }
}
#[rustc_then_this_would_need(type_of)] //~ ERROR OK
impl WillChange {
#[rustc_then_this_would_need(fn_sig)] //~ ERROR OK
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
fn new(x: u32, y: u32) -> WillChange { loop { } }
}
#[rustc_then_this_would_need(type_of)] //~ ERROR OK
impl WillChange {
#[rustc_then_this_would_need(fn_sig)] //~ ERROR OK
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
fn method(&self, x: u32) { }
}
fn b(x: WontChange) { }
#[rustc_then_this_would_need(fn_sig)] //~ ERROR no path from `WillChange`
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR no path from `WillChange`
+ #[rustc_then_this_would_need(typeck)] //~ ERROR no path from `WillChange`
fn c(x: u32) { }
}
error: OK
--> $DIR/dep-graph-struct-signature.rs:36:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-struct-signature.rs:39:5
error: OK
--> $DIR/dep-graph-struct-signature.rs:40:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-struct-signature.rs:45:5
LL | #[rustc_then_this_would_need(fn_sig)]
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: no path from `WillChange` to `typeck_tables_of`
+error: no path from `WillChange` to `typeck`
--> $DIR/dep-graph-struct-signature.rs:84:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-struct-signature.rs:31:9
error: OK
--> $DIR/dep-graph-struct-signature.rs:48:9
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-struct-signature.rs:54:9
error: OK
--> $DIR/dep-graph-struct-signature.rs:55:9
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 22 previous errors
mod y {
use {Foo, Bar};
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn with_char() {
char::method('a');
}
mod z {
use y;
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR no path
+ #[rustc_then_this_would_need(typeck)] //~ ERROR no path
pub fn z() {
y::with_char();
}
error: OK
--> $DIR/dep-graph-trait-impl-two-traits-same-method.rs:32:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: no path from `x::<impl Foo for u32>` to `typeck_tables_of`
+error: no path from `x::<impl Foo for u32>` to `typeck`
--> $DIR/dep-graph-trait-impl-two-traits-same-method.rs:41:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 2 previous errors
mod y {
use {Foo, Bar};
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR no path
+ #[rustc_then_this_would_need(typeck)] //~ ERROR no path
pub fn call_bar() {
char::bar('a');
}
mod z {
use y;
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR no path
+ #[rustc_then_this_would_need(typeck)] //~ ERROR no path
pub fn z() {
y::call_bar();
}
-error: no path from `x::<impl Foo for char>` to `typeck_tables_of`
+error: no path from `x::<impl Foo for char>` to `typeck`
--> $DIR/dep-graph-trait-impl-two-traits.rs:31:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: no path from `x::<impl Foo for char>` to `typeck_tables_of`
+error: no path from `x::<impl Foo for char>` to `typeck`
--> $DIR/dep-graph-trait-impl-two-traits.rs:40:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 2 previous errors
mod y {
use Foo;
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn with_char() {
char::method('a');
}
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn take_foo_with_char() {
take_foo::<char>('a');
}
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn with_u32() {
u32::method(22);
}
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
pub fn take_foo_with_u32() {
take_foo::<u32>(22);
}
// These are expected to yield errors, because changes to `x`
// affect the BODY of `y`, but not its signature.
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR no path
+ #[rustc_then_this_would_need(typeck)] //~ ERROR no path
pub fn z() {
y::with_char();
y::with_u32();
error: OK
--> $DIR/dep-graph-trait-impl.rs:27:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-trait-impl.rs:32:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-trait-impl.rs:37:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-trait-impl.rs:42:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
-error: no path from `x::<impl Foo for char>` to `typeck_tables_of`
+error: no path from `x::<impl Foo for char>` to `typeck`
--> $DIR/dep-graph-trait-impl.rs:55:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 5 previous errors
#[rustc_then_this_would_need(type_of)] //~ ERROR no path
impl SomeType {
#[rustc_then_this_would_need(fn_sig)] //~ ERROR OK
- #[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+ #[rustc_then_this_would_need(typeck)] //~ ERROR OK
fn method(&self, _: TypeAlias) {}
}
type TypeAlias2 = TypeAlias;
#[rustc_then_this_would_need(fn_sig)] //~ ERROR OK
-#[rustc_then_this_would_need(typeck_tables_of)] //~ ERROR OK
+#[rustc_then_this_would_need(typeck)] //~ ERROR OK
fn function(_: TypeAlias) {
}
error: OK
--> $DIR/dep-graph-type-alias.rs:52:1
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: OK
--> $DIR/dep-graph-type-alias.rs:35:5
error: OK
--> $DIR/dep-graph-type-alias.rs:44:5
|
-LL | #[rustc_then_this_would_need(typeck_tables_of)]
- | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+LL | #[rustc_then_this_would_need(typeck)]
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: aborting due to 12 previous errors
--- /dev/null
+#![feature(const_generics)]
+//~^ WARN the feature `const_generics` is incomplete
+
+fn function_with_str<'a, const STRING: &'a str>() {} //~ ERROR E0771
+
+fn main() {
+ function_with_str::<"Hello, world!">()
+}
--- /dev/null
+warning: the feature `const_generics` is incomplete and may not be safe to use and/or cause compiler crashes
+ --> $DIR/E0771.rs:1:12
+ |
+LL | #![feature(const_generics)]
+ | ^^^^^^^^^^^^^^
+ |
+ = note: `#[warn(incomplete_features)]` on by default
+ = note: see issue #44580 <https://github.com/rust-lang/rust/issues/44580> for more information
+
+error[E0771]: use of non-static lifetime `'a` in const generic
+ --> $DIR/E0771.rs:4:41
+ |
+LL | fn function_with_str<'a, const STRING: &'a str>() {}
+ | ^^
+ |
+ = note: for more information, see issue #74052 <https://github.com/rust-lang/rust/issues/74052>
+
+error: aborting due to previous error; 1 warning emitted
+
+For more information about this error, try `rustc --explain E0771`.
// run-pass
#![allow(unused_variables)]
-// compile-flags: --extern LooksLikeExternCrate
+// compile-flags: --extern LooksLikeExternCrate=/path/to/nowhere
mod m {
pub struct LooksLikeExternCrate;
--- /dev/null
+#[doc(spotlight)] //~ ERROR: `#[doc(spotlight)]` is experimental
+trait SomeTrait {}
+
+fn main() {}
--- /dev/null
+error[E0658]: `#[doc(spotlight)]` is experimental
+ --> $DIR/feature-gate-doc_spotlight.rs:1:1
+ |
+LL | #[doc(spotlight)]
+ | ^^^^^^^^^^^^^^^^^
+ |
+ = note: see issue #45040 <https://github.com/rust-lang/rust/issues/45040> for more information
+ = help: add `#![feature(doc_spotlight)]` to the crate attributes to enable
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0658`.
}
fn cycle(
- gen: impl Generator<()> + Unpin + DiscriminantKind<Discriminant = i32>,
- expected_max_discr: i32
+ gen: impl Generator<()> + Unpin + DiscriminantKind<Discriminant = u32>,
+ expected_max_discr: u32
) {
let mut gen = Box::pin(gen);
let mut max_discr = 0;
fn main () {
let sr: Vec<(u32, _, _) = vec![];
//~^ ERROR expected one of `,` or `>`, found `=`
- //~| ERROR expected value, found struct `Vec`
- //~| ERROR mismatched types
- //~| ERROR invalid left-hand side of assignment
- //~| ERROR expected expression, found reserved identifier `_`
- //~| ERROR expected expression, found reserved identifier `_`
let sr2: Vec<(u32, _, _)> = sr.iter().map(|(faction, th_sender, th_receiver)| {}).collect();
- //~^ ERROR no method named `iter` found
+ //~^ ERROR a value of type `std::vec::Vec<(u32, _, _)>` cannot be built
}
-error: expected expression, found reserved identifier `_`
- --> $DIR/issue-34334.rs:2:23
- |
-LL | let sr: Vec<(u32, _, _) = vec![];
- | ^ expected expression
-
-error: expected expression, found reserved identifier `_`
- --> $DIR/issue-34334.rs:2:26
- |
-LL | let sr: Vec<(u32, _, _) = vec![];
- | ^ expected expression
-
error: expected one of `,` or `>`, found `=`
--> $DIR/issue-34334.rs:2:29
|
LL | let sr: Vec<(u32, _, _) = vec![];
- | --- ^ expected one of `,` or `>`
- | | |
- | | help: use `=` if you meant to assign
+ | -- ^ expected one of `,` or `>`
+ | |
| while parsing the type for `sr`
-error[E0423]: expected value, found struct `Vec`
- --> $DIR/issue-34334.rs:2:13
- |
-LL | let sr: Vec<(u32, _, _) = vec![];
- | ^^^ help: use struct literal syntax instead: `Vec { buf: val, len: val }`
-
-error[E0308]: mismatched types
- --> $DIR/issue-34334.rs:2:31
- |
-LL | let sr: Vec<(u32, _, _) = vec![];
- | ^^^^^^ expected `bool`, found struct `std::vec::Vec`
- |
- = note: expected type `bool`
- found struct `std::vec::Vec<_>`
- = note: this error originates in a macro (in Nightly builds, run with -Z macro-backtrace for more info)
-
-error[E0070]: invalid left-hand side of assignment
- --> $DIR/issue-34334.rs:2:29
- |
-LL | let sr: Vec<(u32, _, _) = vec![];
- | --------------- ^
- | |
- | cannot assign to this expression
-
-error[E0599]: no method named `iter` found for unit type `()` in the current scope
- --> $DIR/issue-34334.rs:9:36
+error[E0277]: a value of type `std::vec::Vec<(u32, _, _)>` cannot be built from an iterator over elements of type `()`
+ --> $DIR/issue-34334.rs:4:87
|
LL | let sr2: Vec<(u32, _, _)> = sr.iter().map(|(faction, th_sender, th_receiver)| {}).collect();
- | ^^^^ method not found in `()`
+ | ^^^^^^^ value of type `std::vec::Vec<(u32, _, _)>` cannot be built from `std::iter::Iterator<Item=()>`
+ |
+ = help: the trait `std::iter::FromIterator<()>` is not implemented for `std::vec::Vec<(u32, _, _)>`
-error: aborting due to 7 previous errors
+error: aborting due to 2 previous errors
-Some errors have detailed explanations: E0070, E0308, E0423, E0599.
-For more information about an error, try `rustc --explain E0070`.
+For more information about this error, try `rustc --explain E0277`.
// edition:2018
// run-pass
// ignore-emscripten no threads support
-// ignore-sgx no thread sleep support
use std::thread;
use std::time::Duration;
#![allow(unused_must_use)]
#![allow(deprecated)]
// ignore-emscripten no threads support
-// ignore-sgx no thread sleep support
use std::sync::mpsc::{TryRecvError, channel};
use std::thread;
--- /dev/null
+// check-pass
+
+trait Trait<T> {
+ const ASSOC_CONST: usize = 0;
+}
+
+impl Trait<()> for u8 {}
+
+// `u8::ASSOC_CONST` is resolved today, but will be ambiguous
+// under lazy normalization.
+fn foo<T, U>() -> [(T, U); u8::ASSOC_CONST]
+where
+ u8: Trait<T> + Trait<U>,
+{
+ todo!()
+}
+
+fn main() {}
--- /dev/null
+// check-pass
+
+// If we allow the parent generics here without using lazy normalization
+// this results in a cycle error.
+struct Foo<T, U>(T, U);
+
+impl<T> From<[u8; 1 + 1]> for Foo<T, [u8; 1 + 1]> {
+ fn from(value: [u8; 1 + 1]) -> Foo<T, [u8; 1 + 1]> {
+ todo!();
+ }
+}
+
+fn break_me<T>()
+where
+ [u8; 1 + 1]: From<[u8; 1 + 1]>
+{}
+
+fn main() {}
//~^ ERROR: uses type `str`
pub extern "C" fn box_type(p: Box<u32>) { }
-//~^ ERROR uses type `std::boxed::Box<u32>`
+
+pub extern "C" fn opt_box_type(p: Option<Box<u32>>) { }
pub extern "C" fn char_type(p: char) { }
//~^ ERROR uses type `char`
//~^ ERROR uses type `fn()`
pub extern "C" fn fn_contained(p: RustBadRet) { }
-//~^ ERROR: uses type `std::boxed::Box<u32>`
pub extern "C" fn transparent_i128(p: TransparentI128) { }
//~^ ERROR: uses type `i128`
//~^ ERROR: uses type `str`
pub extern "C" fn transparent_fn(p: TransparentBadFn) { }
-//~^ ERROR: uses type `std::boxed::Box<u32>`
pub extern "C" fn good3(fptr: Option<extern fn()>) { }
= help: consider using `*const u8` and a length instead
= note: string slices have no C equivalent
-error: `extern` fn uses type `std::boxed::Box<u32>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:73:31
- |
-LL | pub extern "C" fn box_type(p: Box<u32>) { }
- | ^^^^^^^^ not FFI-safe
- |
- = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
- = note: this struct has unspecified layout
-
error: `extern` fn uses type `char`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:76:32
+ --> $DIR/lint-ctypes-fn.rs:77:32
|
LL | pub extern "C" fn char_type(p: char) { }
| ^^^^ not FFI-safe
= note: the `char` type has no C equivalent
error: `extern` fn uses type `i128`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:79:32
+ --> $DIR/lint-ctypes-fn.rs:80:32
|
LL | pub extern "C" fn i128_type(p: i128) { }
| ^^^^ not FFI-safe
= note: 128-bit integers don't currently have a known stable ABI
error: `extern` fn uses type `u128`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:82:32
+ --> $DIR/lint-ctypes-fn.rs:83:32
|
LL | pub extern "C" fn u128_type(p: u128) { }
| ^^^^ not FFI-safe
= note: 128-bit integers don't currently have a known stable ABI
error: `extern` fn uses type `(i32, i32)`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:85:33
+ --> $DIR/lint-ctypes-fn.rs:86:33
|
LL | pub extern "C" fn tuple_type(p: (i32, i32)) { }
| ^^^^^^^^^^ not FFI-safe
= note: tuples have unspecified layout
error: `extern` fn uses type `(i32, i32)`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:88:34
+ --> $DIR/lint-ctypes-fn.rs:89:34
|
LL | pub extern "C" fn tuple_type2(p: I32Pair) { }
| ^^^^^^^ not FFI-safe
= note: tuples have unspecified layout
error: `extern` fn uses type `ZeroSize`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:91:32
+ --> $DIR/lint-ctypes-fn.rs:92:32
|
LL | pub extern "C" fn zero_size(p: ZeroSize) { }
| ^^^^^^^^ not FFI-safe
| ^^^^^^^^^^^^^^^^^^^^
error: `extern` fn uses type `ZeroSizeWithPhantomData`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:94:40
+ --> $DIR/lint-ctypes-fn.rs:95:40
|
LL | pub extern "C" fn zero_size_phantom(p: ZeroSizeWithPhantomData) { }
| ^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: `extern` fn uses type `std::marker::PhantomData<bool>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:97:51
+ --> $DIR/lint-ctypes-fn.rs:98:51
|
LL | pub extern "C" fn zero_size_phantom_toplevel() -> PhantomData<bool> {
| ^^^^^^^^^^^^^^^^^ not FFI-safe
= note: composed only of `PhantomData`
error: `extern` fn uses type `fn()`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:102:30
+ --> $DIR/lint-ctypes-fn.rs:103:30
|
LL | pub extern "C" fn fn_type(p: RustFn) { }
| ^^^^^^ not FFI-safe
= note: this function pointer has Rust-specific calling convention
error: `extern` fn uses type `fn()`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:105:31
+ --> $DIR/lint-ctypes-fn.rs:106:31
|
LL | pub extern "C" fn fn_type2(p: fn()) { }
| ^^^^ not FFI-safe
= help: consider using an `extern fn(...) -> ...` function pointer instead
= note: this function pointer has Rust-specific calling convention
-error: `extern` fn uses type `std::boxed::Box<u32>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:108:35
- |
-LL | pub extern "C" fn fn_contained(p: RustBadRet) { }
- | ^^^^^^^^^^ not FFI-safe
- |
- = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
- = note: this struct has unspecified layout
-
error: `extern` fn uses type `i128`, which is not FFI-safe
--> $DIR/lint-ctypes-fn.rs:111:39
|
= help: consider using `*const u8` and a length instead
= note: string slices have no C equivalent
-error: `extern` fn uses type `std::boxed::Box<u32>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:117:37
- |
-LL | pub extern "C" fn transparent_fn(p: TransparentBadFn) { }
- | ^^^^^^^^^^^^^^^^ not FFI-safe
- |
- = help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
- = note: this struct has unspecified layout
-
error: `extern` fn uses type `std::marker::PhantomData<bool>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:161:43
+ --> $DIR/lint-ctypes-fn.rs:160:43
|
LL | pub extern "C" fn unused_generic2<T>() -> PhantomData<bool> {
| ^^^^^^^^^^^^^^^^^ not FFI-safe
= note: composed only of `PhantomData`
error: `extern` fn uses type `std::vec::Vec<T>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:174:39
+ --> $DIR/lint-ctypes-fn.rs:173:39
|
LL | pub extern "C" fn used_generic4<T>(x: Vec<T>) { }
| ^^^^^^ not FFI-safe
= note: this struct has unspecified layout
error: `extern` fn uses type `std::vec::Vec<T>`, which is not FFI-safe
- --> $DIR/lint-ctypes-fn.rs:177:41
+ --> $DIR/lint-ctypes-fn.rs:176:41
|
LL | pub extern "C" fn used_generic5<T>() -> Vec<T> {
| ^^^^^^ not FFI-safe
= help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
= note: this struct has unspecified layout
-error: aborting due to 20 previous errors
+error: aborting due to 17 previous errors
use std::marker::PhantomData;
+trait Bar { }
trait Mirror { type It: ?Sized; }
impl<T: ?Sized> Mirror for T { type It = Self; }
#[repr(C)]
pub fn slice_type(p: &[u32]); //~ ERROR: uses type `[u32]`
pub fn str_type(p: &str); //~ ERROR: uses type `str`
pub fn box_type(p: Box<u32>); //~ ERROR uses type `std::boxed::Box<u32>`
+ pub fn opt_box_type(p: Option<Box<u32>>);
+ //~^ ERROR uses type `std::option::Option<std::boxed::Box<u32>>`
pub fn char_type(p: char); //~ ERROR uses type `char`
pub fn i128_type(p: i128); //~ ERROR uses type `i128`
pub fn u128_type(p: u128); //~ ERROR uses type `u128`
- pub fn trait_type(p: &dyn Clone); //~ ERROR uses type `dyn std::clone::Clone`
+ pub fn trait_type(p: &dyn Bar); //~ ERROR uses type `dyn Bar`
pub fn tuple_type(p: (i32, i32)); //~ ERROR uses type `(i32, i32)`
pub fn tuple_type2(p: I32Pair); //~ ERROR uses type `(i32, i32)`
pub fn zero_size(p: ZeroSize); //~ ERROR uses type `ZeroSize`
error: `extern` block uses type `Foo`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:46:28
+ --> $DIR/lint-ctypes.rs:47:28
|
LL | pub fn ptr_type1(size: *const Foo);
| ^^^^^^^^^^ not FFI-safe
= help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
= note: this struct has unspecified layout
note: the type is defined here
- --> $DIR/lint-ctypes.rs:24:1
+ --> $DIR/lint-ctypes.rs:25:1
|
LL | pub struct Foo;
| ^^^^^^^^^^^^^^^
error: `extern` block uses type `Foo`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:47:28
+ --> $DIR/lint-ctypes.rs:48:28
|
LL | pub fn ptr_type2(size: *const Foo);
| ^^^^^^^^^^ not FFI-safe
= help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
= note: this struct has unspecified layout
note: the type is defined here
- --> $DIR/lint-ctypes.rs:24:1
+ --> $DIR/lint-ctypes.rs:25:1
|
LL | pub struct Foo;
| ^^^^^^^^^^^^^^^
error: `extern` block uses type `[u32]`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:48:26
+ --> $DIR/lint-ctypes.rs:49:26
|
LL | pub fn slice_type(p: &[u32]);
| ^^^^^^ not FFI-safe
= note: slices have no C equivalent
error: `extern` block uses type `str`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:49:24
+ --> $DIR/lint-ctypes.rs:50:24
|
LL | pub fn str_type(p: &str);
| ^^^^ not FFI-safe
= note: string slices have no C equivalent
error: `extern` block uses type `std::boxed::Box<u32>`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:50:24
+ --> $DIR/lint-ctypes.rs:51:24
|
LL | pub fn box_type(p: Box<u32>);
| ^^^^^^^^ not FFI-safe
= help: consider adding a `#[repr(C)]` or `#[repr(transparent)]` attribute to this struct
= note: this struct has unspecified layout
+error: `extern` block uses type `std::option::Option<std::boxed::Box<u32>>`, which is not FFI-safe
+ --> $DIR/lint-ctypes.rs:52:28
+ |
+LL | pub fn opt_box_type(p: Option<Box<u32>>);
+ | ^^^^^^^^^^^^^^^^ not FFI-safe
+ |
+ = help: consider adding a `#[repr(C)]`, `#[repr(transparent)]`, or integer `#[repr(...)]` attribute to this enum
+ = note: enum has no representation hint
+
error: `extern` block uses type `char`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:51:25
+ --> $DIR/lint-ctypes.rs:54:25
|
LL | pub fn char_type(p: char);
| ^^^^ not FFI-safe
= note: the `char` type has no C equivalent
error: `extern` block uses type `i128`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:52:25
+ --> $DIR/lint-ctypes.rs:55:25
|
LL | pub fn i128_type(p: i128);
| ^^^^ not FFI-safe
= note: 128-bit integers don't currently have a known stable ABI
error: `extern` block uses type `u128`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:53:25
+ --> $DIR/lint-ctypes.rs:56:25
|
LL | pub fn u128_type(p: u128);
| ^^^^ not FFI-safe
|
= note: 128-bit integers don't currently have a known stable ABI
-error: `extern` block uses type `dyn std::clone::Clone`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:54:26
+error: `extern` block uses type `dyn Bar`, which is not FFI-safe
+ --> $DIR/lint-ctypes.rs:57:26
|
-LL | pub fn trait_type(p: &dyn Clone);
- | ^^^^^^^^^^ not FFI-safe
+LL | pub fn trait_type(p: &dyn Bar);
+ | ^^^^^^^^ not FFI-safe
|
= note: trait objects have no C equivalent
error: `extern` block uses type `(i32, i32)`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:55:26
+ --> $DIR/lint-ctypes.rs:58:26
|
LL | pub fn tuple_type(p: (i32, i32));
| ^^^^^^^^^^ not FFI-safe
= note: tuples have unspecified layout
error: `extern` block uses type `(i32, i32)`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:56:27
+ --> $DIR/lint-ctypes.rs:59:27
|
LL | pub fn tuple_type2(p: I32Pair);
| ^^^^^^^ not FFI-safe
= note: tuples have unspecified layout
error: `extern` block uses type `ZeroSize`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:57:25
+ --> $DIR/lint-ctypes.rs:60:25
|
LL | pub fn zero_size(p: ZeroSize);
| ^^^^^^^^ not FFI-safe
= help: consider adding a member to this struct
= note: this struct has no fields
note: the type is defined here
- --> $DIR/lint-ctypes.rs:20:1
+ --> $DIR/lint-ctypes.rs:21:1
|
LL | pub struct ZeroSize;
| ^^^^^^^^^^^^^^^^^^^^
error: `extern` block uses type `ZeroSizeWithPhantomData`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:58:33
+ --> $DIR/lint-ctypes.rs:61:33
|
LL | pub fn zero_size_phantom(p: ZeroSizeWithPhantomData);
| ^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe
|
= note: composed only of `PhantomData`
note: the type is defined here
- --> $DIR/lint-ctypes.rs:43:1
+ --> $DIR/lint-ctypes.rs:44:1
|
LL | pub struct ZeroSizeWithPhantomData(::std::marker::PhantomData<i32>);
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
error: `extern` block uses type `std::marker::PhantomData<bool>`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:61:12
+ --> $DIR/lint-ctypes.rs:64:12
|
LL | -> ::std::marker::PhantomData<bool>;
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ not FFI-safe
= note: composed only of `PhantomData`
error: `extern` block uses type `fn()`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:62:23
+ --> $DIR/lint-ctypes.rs:65:23
|
LL | pub fn fn_type(p: RustFn);
| ^^^^^^ not FFI-safe
= note: this function pointer has Rust-specific calling convention
error: `extern` block uses type `fn()`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:63:24
+ --> $DIR/lint-ctypes.rs:66:24
|
LL | pub fn fn_type2(p: fn());
| ^^^^ not FFI-safe
= note: this function pointer has Rust-specific calling convention
error: `extern` block uses type `std::boxed::Box<u32>`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:64:28
+ --> $DIR/lint-ctypes.rs:67:28
|
LL | pub fn fn_contained(p: RustBadRet);
| ^^^^^^^^^^ not FFI-safe
= note: this struct has unspecified layout
error: `extern` block uses type `i128`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:65:32
+ --> $DIR/lint-ctypes.rs:68:32
|
LL | pub fn transparent_i128(p: TransparentI128);
| ^^^^^^^^^^^^^^^ not FFI-safe
= note: 128-bit integers don't currently have a known stable ABI
error: `extern` block uses type `str`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:66:31
+ --> $DIR/lint-ctypes.rs:69:31
|
LL | pub fn transparent_str(p: TransparentStr);
| ^^^^^^^^^^^^^^ not FFI-safe
= note: string slices have no C equivalent
error: `extern` block uses type `std::boxed::Box<u32>`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:67:30
+ --> $DIR/lint-ctypes.rs:70:30
|
LL | pub fn transparent_fn(p: TransparentBadFn);
| ^^^^^^^^^^^^^^^^ not FFI-safe
= note: this struct has unspecified layout
error: `extern` block uses type `[u8; 8]`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:68:27
+ --> $DIR/lint-ctypes.rs:71:27
|
LL | pub fn raw_array(arr: [u8; 8]);
| ^^^^^^^ not FFI-safe
= note: passing raw arrays by value is not FFI-safe
error: `extern` block uses type `u128`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:70:34
+ --> $DIR/lint-ctypes.rs:73:34
|
LL | pub static static_u128_type: u128;
| ^^^^ not FFI-safe
= note: 128-bit integers don't currently have a known stable ABI
error: `extern` block uses type `u128`, which is not FFI-safe
- --> $DIR/lint-ctypes.rs:71:40
+ --> $DIR/lint-ctypes.rs:74:40
|
LL | pub static static_u128_array_type: [u128; 16];
| ^^^^^^^^^^ not FFI-safe
|
= note: 128-bit integers don't currently have a known stable ABI
-error: aborting due to 23 previous errors
+error: aborting due to 24 previous errors
#[repr(transparent)]
pub(crate) struct NonBig(u64);
+/// A two-variant enum, thus needs a tag and may not remain uninitialized.
+enum Fruit {
+ Apple,
+ Banana,
+}
+
+/// Looks like two variants but really only has one.
+enum OneFruit {
+ Apple(!),
+ Banana,
+}
+
#[allow(unused)]
fn generic<T: 'static>() {
unsafe {
let _val: NonBig = mem::zeroed();
let _val: NonBig = mem::uninitialized(); //~ ERROR: does not permit being left uninitialized
+ let _val: Fruit = mem::zeroed();
+ let _val: Fruit = mem::uninitialized(); //~ ERROR: does not permit being left uninitialized
+
// Transmute-from-0
let _val: &'static i32 = mem::transmute(0usize); //~ ERROR: does not permit zero-initialization
let _val: &'static [i32] = mem::transmute((0usize, 0usize)); //~ ERROR: does not permit zero-initialization
let _val: MaybeUninit<&'static i32> = mem::zeroed();
let _val: i32 = mem::zeroed();
let _val: bool = MaybeUninit::zeroed().assume_init();
+ // Some things that happen to work due to rustc implementation details,
+ // but are not guaranteed to keep working.
+ let _val: i32 = mem::uninitialized();
+ let _val: OneFruit = mem::uninitialized();
}
}
error: the type `&T` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:29:32
+ --> $DIR/uninitialized-zeroed.rs:41:32
|
LL | let _val: &'static T = mem::zeroed();
| ^^^^^^^^^^^^^
= note: references must be non-null
error: the type `&T` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:30:32
+ --> $DIR/uninitialized-zeroed.rs:42:32
|
LL | let _val: &'static T = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: references must be non-null
error: the type `Wrap<&T>` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:32:38
+ --> $DIR/uninitialized-zeroed.rs:44:38
|
LL | let _val: Wrap<&'static T> = mem::zeroed();
| ^^^^^^^^^^^^^
| ^^^^^^^^^^
error: the type `Wrap<&T>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:33:38
+ --> $DIR/uninitialized-zeroed.rs:45:38
|
LL | let _val: Wrap<&'static T> = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^
error: the type `!` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:40:23
+ --> $DIR/uninitialized-zeroed.rs:52:23
|
LL | let _val: ! = mem::zeroed();
| ^^^^^^^^^^^^^
= note: the `!` type has no valid value
error: the type `!` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:41:23
+ --> $DIR/uninitialized-zeroed.rs:53:23
|
LL | let _val: ! = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: the `!` type has no valid value
error: the type `(i32, !)` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:43:30
+ --> $DIR/uninitialized-zeroed.rs:55:30
|
LL | let _val: (i32, !) = mem::zeroed();
| ^^^^^^^^^^^^^
= note: the `!` type has no valid value
error: the type `(i32, !)` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:44:30
+ --> $DIR/uninitialized-zeroed.rs:56:30
|
LL | let _val: (i32, !) = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: the `!` type has no valid value
error: the type `Void` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:46:26
+ --> $DIR/uninitialized-zeroed.rs:58:26
|
LL | let _val: Void = mem::zeroed();
| ^^^^^^^^^^^^^
= note: enums with no variants have no valid value
error: the type `Void` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:47:26
+ --> $DIR/uninitialized-zeroed.rs:59:26
|
LL | let _val: Void = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: enums with no variants have no valid value
error: the type `&i32` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:49:34
+ --> $DIR/uninitialized-zeroed.rs:61:34
|
LL | let _val: &'static i32 = mem::zeroed();
| ^^^^^^^^^^^^^
= note: references must be non-null
error: the type `&i32` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:50:34
+ --> $DIR/uninitialized-zeroed.rs:62:34
|
LL | let _val: &'static i32 = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: references must be non-null
error: the type `Ref` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:52:25
+ --> $DIR/uninitialized-zeroed.rs:64:25
|
LL | let _val: Ref = mem::zeroed();
| ^^^^^^^^^^^^^
| ^^^^^^^^^^^^
error: the type `Ref` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:53:25
+ --> $DIR/uninitialized-zeroed.rs:65:25
|
LL | let _val: Ref = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^
error: the type `fn()` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:55:26
+ --> $DIR/uninitialized-zeroed.rs:67:26
|
LL | let _val: fn() = mem::zeroed();
| ^^^^^^^^^^^^^
= note: function pointers must be non-null
error: the type `fn()` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:56:26
+ --> $DIR/uninitialized-zeroed.rs:68:26
|
LL | let _val: fn() = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: function pointers must be non-null
error: the type `Wrap<fn()>` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:58:32
+ --> $DIR/uninitialized-zeroed.rs:70:32
|
LL | let _val: Wrap<fn()> = mem::zeroed();
| ^^^^^^^^^^^^^
| ^^^^^^^^^^
error: the type `Wrap<fn()>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:59:32
+ --> $DIR/uninitialized-zeroed.rs:71:32
|
LL | let _val: Wrap<fn()> = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^
error: the type `WrapEnum<fn()>` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:61:36
+ --> $DIR/uninitialized-zeroed.rs:73:36
|
LL | let _val: WrapEnum<fn()> = mem::zeroed();
| ^^^^^^^^^^^^^
| ^
error: the type `WrapEnum<fn()>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:62:36
+ --> $DIR/uninitialized-zeroed.rs:74:36
|
LL | let _val: WrapEnum<fn()> = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
| ^
error: the type `Wrap<(RefPair, i32)>` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:64:42
+ --> $DIR/uninitialized-zeroed.rs:76:42
|
LL | let _val: Wrap<(RefPair, i32)> = mem::zeroed();
| ^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^
error: the type `Wrap<(RefPair, i32)>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:65:42
+ --> $DIR/uninitialized-zeroed.rs:77:42
|
LL | let _val: Wrap<(RefPair, i32)> = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^^^^^^^^^^
error: the type `std::ptr::NonNull<i32>` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:67:34
+ --> $DIR/uninitialized-zeroed.rs:79:34
|
LL | let _val: NonNull<i32> = mem::zeroed();
| ^^^^^^^^^^^^^
= note: `std::ptr::NonNull<i32>` must be non-null
error: the type `std::ptr::NonNull<i32>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:68:34
+ --> $DIR/uninitialized-zeroed.rs:80:34
|
LL | let _val: NonNull<i32> = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: `std::ptr::NonNull<i32>` must be non-null
error: the type `*const dyn std::marker::Send` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:70:37
+ --> $DIR/uninitialized-zeroed.rs:82:37
|
LL | let _val: *const dyn Send = mem::zeroed();
| ^^^^^^^^^^^^^
= note: the vtable of a wide raw pointer must be non-null
error: the type `*const dyn std::marker::Send` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:71:37
+ --> $DIR/uninitialized-zeroed.rs:83:37
|
LL | let _val: *const dyn Send = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: the vtable of a wide raw pointer must be non-null
error: the type `bool` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:75:26
+ --> $DIR/uninitialized-zeroed.rs:87:26
|
LL | let _val: bool = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
= note: booleans must be either `true` or `false`
error: the type `Wrap<char>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:78:32
+ --> $DIR/uninitialized-zeroed.rs:90:32
|
LL | let _val: Wrap<char> = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
| ^^^^^^^^^^
error: the type `NonBig` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:81:28
+ --> $DIR/uninitialized-zeroed.rs:93:28
|
LL | let _val: NonBig = mem::uninitialized();
| ^^^^^^^^^^^^^^^^^^^^
|
= note: `NonBig` must be initialized inside its custom valid range
+error: the type `Fruit` does not permit being left uninitialized
+ --> $DIR/uninitialized-zeroed.rs:96:27
+ |
+LL | let _val: Fruit = mem::uninitialized();
+ | ^^^^^^^^^^^^^^^^^^^^
+ | |
+ | this code causes undefined behavior when executed
+ | help: use `MaybeUninit<T>` instead, and only call `assume_init` after initialization is done
+ |
+note: enums have to be initialized to a variant
+ --> $DIR/uninitialized-zeroed.rs:27:1
+ |
+LL | / enum Fruit {
+LL | | Apple,
+LL | | Banana,
+LL | | }
+ | |_^
+
error: the type `&i32` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:84:34
+ --> $DIR/uninitialized-zeroed.rs:99:34
|
LL | let _val: &'static i32 = mem::transmute(0usize);
| ^^^^^^^^^^^^^^^^^^^^^^
= note: references must be non-null
error: the type `&[i32]` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:85:36
+ --> $DIR/uninitialized-zeroed.rs:100:36
|
LL | let _val: &'static [i32] = mem::transmute((0usize, 0usize));
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: references must be non-null
error: the type `std::num::NonZeroU32` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:86:32
+ --> $DIR/uninitialized-zeroed.rs:101:32
|
LL | let _val: NonZeroU32 = mem::transmute(0);
| ^^^^^^^^^^^^^^^^^
= note: `std::num::NonZeroU32` must be non-null
error: the type `std::ptr::NonNull<i32>` does not permit zero-initialization
- --> $DIR/uninitialized-zeroed.rs:89:34
+ --> $DIR/uninitialized-zeroed.rs:104:34
|
LL | let _val: NonNull<i32> = MaybeUninit::zeroed().assume_init();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: `std::ptr::NonNull<i32>` must be non-null
error: the type `std::ptr::NonNull<i32>` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:90:34
+ --> $DIR/uninitialized-zeroed.rs:105:34
|
LL | let _val: NonNull<i32> = MaybeUninit::uninit().assume_init();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
= note: `std::ptr::NonNull<i32>` must be non-null
error: the type `bool` does not permit being left uninitialized
- --> $DIR/uninitialized-zeroed.rs:91:26
+ --> $DIR/uninitialized-zeroed.rs:106:26
|
LL | let _val: bool = MaybeUninit::uninit().assume_init();
| ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
= note: booleans must be either `true` or `false`
-error: aborting due to 35 previous errors
+error: aborting due to 36 previous errors
// run-pass
// compile-flags:--test
// ignore-emscripten
-// ignore-sgx no thread sleep support
use std::sync::mpsc::channel;
use std::sync::mpsc::TryRecvError;
fn wait(self) {
self.shared.fetch_add(1, Ordering::SeqCst);
while self.shared.load(Ordering::SeqCst) != self.count {
+ #[cfg(target_env = "sgx")]
+ thread::yield_now();
}
}
}
note: the compiler unexpectedly panicked. this is a bug.
-note: we would appreciate a bug report: https://github.com/rust-lang/rust/blob/master/CONTRIBUTING.md#bug-reports
+note: we would appreciate a bug report: https://github.com/rust-lang/rust/issues/new?labels=C-bug%2C+I-ICE%2C+T-compiler&template=ice.md
note: rustc VERSION running on TARGET
print-type-size field `.0`: 12 bytes
print-type-size variant `None`: 0 bytes
print-type-size type: `EmbeddedDiscr`: 8 bytes, alignment: 4 bytes
+print-type-size discriminant: 1 bytes
print-type-size variant `Record`: 7 bytes
-print-type-size field `.val`: 4 bytes
-print-type-size field `.post`: 2 bytes
print-type-size field `.pre`: 1 bytes
+print-type-size field `.post`: 2 bytes
+print-type-size field `.val`: 4 bytes
print-type-size variant `None`: 0 bytes
-print-type-size end padding: 1 bytes
print-type-size type: `MyOption<Union1<std::num::NonZeroU32>>`: 8 bytes, alignment: 4 bytes
print-type-size discriminant: 4 bytes
print-type-size variant `Some`: 4 bytes
#![feature(non_ascii_idents)]
extern crate ьаг; //~ ERROR cannot load a crate with a non-ascii name `ьаг`
-//~| ERROR can't find crate for `ьаг`
fn main() {}
LL | extern crate ьаг;
| ^^^^^^^^^^^^^^^^^
-error[E0463]: can't find crate for `ьаг`
- --> $DIR/crate_name_nonascii_forbidden-1.rs:3:1
- |
-LL | extern crate ьаг;
- | ^^^^^^^^^^^^^^^^^ can't find crate
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-For more information about this error, try `rustc --explain E0463`.
#![feature(non_ascii_idents)]
use му_сгате::baz; //~ ERROR cannot load a crate with a non-ascii name `му_сгате`
- //~| can't find crate for `му_сгате`
-
fn main() {}
LL | use му_сгате::baz;
| ^^^^^^^^
-error[E0463]: can't find crate for `му_сгате`
- --> $DIR/crate_name_nonascii_forbidden-2.rs:5:5
- |
-LL | use му_сгате::baz;
- | ^^^^^^^^ can't find crate
-
-error: aborting due to 2 previous errors
+error: aborting due to previous error
-For more information about this error, try `rustc --explain E0463`.
// run-pass
// ignore-emscripten no threads support
-// ignore-sgx no thread sleep support
use std::thread::{self, sleep};
use std::time::Duration;
--- /dev/null
+#![feature(min_specialization)]
+
+trait Trait {}
+impl Trait for NonExistent {}
+//~^ ERROR cannot find type `NonExistent` in this scope
+
+fn main() {}
--- /dev/null
+error[E0412]: cannot find type `NonExistent` in this scope
+ --> $DIR/impl-on-nonexisting.rs:4:16
+ |
+LL | impl Trait for NonExistent {}
+ | ^^^^^^^^^^^ not found in this scope
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0412`.
// ignore-emscripten no threads or sockets support
// ignore-netbsd system ulimit (Too many open files)
// ignore-openbsd system ulimit (Too many open files)
-// ignore-sgx no thread sleep support
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
--- /dev/null
+// Regression test for #72410, this should be used with debug assertion enabled.
+
+// should be fine
+pub trait Foo {
+ fn map()
+ where
+ Self: Sized,
+ for<'a> &'a mut [u8]: ;
+}
+
+// should fail
+pub trait Bar {
+ fn map()
+ where for<'a> &'a mut [dyn Bar]: ;
+ //~^ ERROR: the trait `Bar` cannot be made into an object
+}
+
+fn main() {}
--- /dev/null
+error[E0038]: the trait `Bar` cannot be made into an object
+ --> $DIR/issue-72410.rs:14:19
+ |
+LL | pub trait Bar {
+ | --- this trait cannot be made into an object...
+LL | fn map()
+ | --- ...because associated function `map` has no `self` parameter
+LL | where for<'a> &'a mut [dyn Bar]: ;
+ | ^^^^^^^^^^^^^^^^^ the trait `Bar` cannot be made into an object
+ |
+help: consider turning `map` into a method by giving it a `&self` argument or constraining it so it does not apply to trait objects
+ |
+LL | where for<'a> &'a mut [dyn Bar]:, Self: Sized ;
+ | ^^^^^^^^^^^^^
+
+error: aborting due to previous error
+
+For more information about this error, try `rustc --explain E0038`.
#![feature(never_type)]
use std::mem::size_of;
+use std::num::NonZeroU8;
struct t {a: u8, b: i8}
struct u {a: u8, b: i8, c: u8}
None
}
+// Two layouts are considered for `CanBeNicheFilledButShouldnt`:
+// Niche-filling:
+// { u32 (4 bytes), NonZeroU8 + tag in niche (1 byte), padding (3 bytes) }
+// Tagged:
+// { tag (1 byte), NonZeroU8 (1 byte), padding (2 bytes), u32 (4 bytes) }
+// Both are the same size (due to padding),
+// but the tagged layout is better as the tag creates a niche with 254 invalid values,
+// allowing types like `Option<Option<CanBeNicheFilledButShouldnt>>` to fit into 8 bytes.
+pub enum CanBeNicheFilledButShouldnt {
+ A(NonZeroU8, u32),
+ B
+}
+pub enum AlwaysTaggedBecauseItHasNoNiche {
+ A(u8, u32),
+ B
+}
+
pub fn main() {
assert_eq!(size_of::<u8>(), 1 as usize);
assert_eq!(size_of::<u32>(), 4 as usize);
assert_eq!(size_of::<Option<Option<(&(), bool)>>>(), size_of::<(bool, &())>());
assert_eq!(size_of::<Option<Option2<bool, &()>>>(), size_of::<(bool, &())>());
assert_eq!(size_of::<Option<Option2<&(), bool>>>(), size_of::<(bool, &())>());
+
+ assert_eq!(size_of::<CanBeNicheFilledButShouldnt>(), 8);
+ assert_eq!(size_of::<Option<CanBeNicheFilledButShouldnt>>(), 8);
+ assert_eq!(size_of::<Option<Option<CanBeNicheFilledButShouldnt>>>(), 8);
+ assert_eq!(size_of::<AlwaysTaggedBecauseItHasNoNiche>(), 8);
+ assert_eq!(size_of::<Option<AlwaysTaggedBecauseItHasNoNiche>>(), 8);
+ assert_eq!(size_of::<Option<Option<AlwaysTaggedBecauseItHasNoNiche>>>(), 8);
}
--- /dev/null
+pub trait Unsatisfied {}
+
+#[repr(transparent)]
+pub struct Bar<T: Unsatisfied>(T);
+
+pub trait Foo {
+ type Assoc;
+}
+
+extern "C" {
+ pub fn lint_me() -> <() as Foo>::Assoc;
+ //~^ ERROR: the trait bound `(): Foo` is not satisfied [E0277]
+
+ pub fn lint_me_aswell() -> Bar<u32>;
+ //~^ ERROR: the trait bound `u32: Unsatisfied` is not satisfied [E0277]
+}
+
+fn main() {}
--- /dev/null
+error[E0277]: the trait bound `(): Foo` is not satisfied
+ --> $DIR/wf-foreign-fn-decl-ret.rs:11:5
+ |
+LL | pub fn lint_me() -> <() as Foo>::Assoc;
+ | ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ the trait `Foo` is not implemented for `()`
+
+error[E0277]: the trait bound `u32: Unsatisfied` is not satisfied
+ --> $DIR/wf-foreign-fn-decl-ret.rs:14:32
+ |
+LL | pub struct Bar<T: Unsatisfied>(T);
+ | ----------- required by this bound in `Bar`
+...
+LL | pub fn lint_me_aswell() -> Bar<u32>;
+ | ^^^^^^^^ the trait `Unsatisfied` is not implemented for `u32`
+
+error: aborting due to 2 previous errors
+
+For more information about this error, try `rustc --explain E0277`.
"powerpc-unknown-linux-gnu",
"powerpc64-unknown-linux-gnu",
"powerpc64le-unknown-linux-gnu",
+ "riscv64gc-unknown-linux-gnu",
"s390x-unknown-linux-gnu",
"x86_64-apple-darwin",
"x86_64-pc-windows-gnu",
_ => (),
}
- let (l_ty, r_ty) = (cx.tables().expr_ty(l), cx.tables().expr_ty(r));
+ let (l_ty, r_ty) = (cx.typeck_results().expr_ty(l), cx.typeck_results().expr_ty(r));
if l_ty.peel_refs().is_integral() && r_ty.peel_refs().is_integral() {
span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
self.expr_span = Some(expr.span);
}
},
hir::ExprKind::Unary(hir::UnOp::UnNeg, arg) => {
- let ty = cx.tables().expr_ty(arg);
- if constant_simple(cx, cx.tables(), expr).is_none() {
+ let ty = cx.typeck_results().expr_ty(arg);
+ if constant_simple(cx, cx.typeck_results(), expr).is_none() {
if ty.is_integral() {
span_lint(cx, INTEGER_ARITHMETIC, expr.span, "integer arithmetic detected");
self.expr_span = Some(expr.span);
}
if_chain! {
if let ExprKind::Unary(_, ref lit) = e.kind;
- if let Some((Constant::Bool(is_true), _)) = constant(cx, cx.tables(), lit);
+ if let Some((Constant::Bool(is_true), _)) = constant(cx, cx.typeck_results(), lit);
if is_true;
then {
lint_true(true);
if let ExprKind::DropTemps(ref expr) = expr.kind;
if let ExprKind::Unary(UnOp::UnNot, ref expr) = expr.kind;
// bind the first argument of the `assert!` macro
- if let Some((Constant::Bool(is_true), _)) = constant(cx, cx.tables(), expr);
+ if let Some((Constant::Bool(is_true), _)) = constant(cx, cx.typeck_results(), expr);
// arm 1 pattern
if let PatKind::Lit(ref lit_expr) = arms[0].pat.kind;
if let ExprKind::Lit(ref lit) = lit_expr.kind;
hir::ExprKind::Assign(assignee, e, _) => {
if let hir::ExprKind::Binary(op, l, r) = &e.kind {
let lint = |assignee: &hir::Expr<'_>, rhs: &hir::Expr<'_>| {
- let ty = cx.tables().expr_ty(assignee);
- let rty = cx.tables().expr_ty(rhs);
+ let ty = cx.typeck_results().expr_ty(assignee);
+ let rty = cx.typeck_results().expr_ty(rhs);
macro_rules! ops {
($op:expr,
$cx:expr,
// a = b commutative_op a
// Limited to primitive type as these ops are know to be commutative
if SpanlessEq::new(cx).ignore_fn().eq_expr(assignee, r)
- && cx.tables().expr_ty(assignee).is_primitive_ty()
+ && cx.typeck_results().expr_ty(assignee).is_primitive_ty()
{
match op.node {
hir::BinOpKind::Add
];
fn type_is_atomic(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- if let ty::Adt(&ty::AdtDef { did, .. }, _) = cx.tables().expr_ty(expr).kind {
+ if let ty::Adt(&ty::AdtDef { did, .. }, _) = cx.typeck_results().expr_ty(expr).kind {
ATOMIC_TYPES
.iter()
.any(|ty| match_def_path(cx, did, &["core", "sync", "atomic", ty]))
fn is_relevant_item(cx: &LateContext<'_>, item: &Item<'_>) -> bool {
if let ItemKind::Fn(_, _, eid) = item.kind {
- is_relevant_expr(cx, cx.tcx.body_tables(eid), &cx.tcx.hir().body(eid).value)
+ is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value)
} else {
true
}
fn is_relevant_impl(cx: &LateContext<'_>, item: &ImplItem<'_>) -> bool {
match item.kind {
- ImplItemKind::Fn(_, eid) => is_relevant_expr(cx, cx.tcx.body_tables(eid), &cx.tcx.hir().body(eid).value),
+ ImplItemKind::Fn(_, eid) => is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value),
_ => false,
}
}
match item.kind {
TraitItemKind::Fn(_, TraitFn::Required(_)) => true,
TraitItemKind::Fn(_, TraitFn::Provided(eid)) => {
- is_relevant_expr(cx, cx.tcx.body_tables(eid), &cx.tcx.hir().body(eid).value)
+ is_relevant_expr(cx, cx.tcx.typeck_body(eid), &cx.tcx.hir().body(eid).value)
},
_ => false,
}
}
-fn is_relevant_block(cx: &LateContext<'_>, tables: &ty::TypeckTables<'_>, block: &Block<'_>) -> bool {
+fn is_relevant_block(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_>, block: &Block<'_>) -> bool {
block.stmts.first().map_or(
- block.expr.as_ref().map_or(false, |e| is_relevant_expr(cx, tables, e)),
+ block
+ .expr
+ .as_ref()
+ .map_or(false, |e| is_relevant_expr(cx, typeck_results, e)),
|stmt| match &stmt.kind {
StmtKind::Local(_) => true,
- StmtKind::Expr(expr) | StmtKind::Semi(expr) => is_relevant_expr(cx, tables, expr),
+ StmtKind::Expr(expr) | StmtKind::Semi(expr) => is_relevant_expr(cx, typeck_results, expr),
_ => false,
},
)
}
-fn is_relevant_expr(cx: &LateContext<'_>, tables: &ty::TypeckTables<'_>, expr: &Expr<'_>) -> bool {
+fn is_relevant_expr(cx: &LateContext<'_>, typeck_results: &ty::TypeckResults<'_>, expr: &Expr<'_>) -> bool {
match &expr.kind {
- ExprKind::Block(block, _) => is_relevant_block(cx, tables, block),
- ExprKind::Ret(Some(e)) => is_relevant_expr(cx, tables, e),
+ ExprKind::Block(block, _) => is_relevant_block(cx, typeck_results, block),
+ ExprKind::Ret(Some(e)) => is_relevant_expr(cx, typeck_results, e),
ExprKind::Ret(None) | ExprKind::Break(_, None) => false,
ExprKind::Call(path_expr, _) => {
if let ExprKind::Path(qpath) = &path_expr.kind {
- tables
+ typeck_results
.qpath_res(qpath, path_expr.hir_id)
.opt_def_id()
.map_or(true, |fun_id| !match_def_path(cx, fun_id, &paths::BEGIN_PANIC))
hir_id: body.value.hir_id,
};
let def_id = cx.tcx.hir().body_owner_def_id(body_id);
- let tables = cx.tcx.typeck_tables_of(def_id);
- check_interior_types(cx, &tables.generator_interior_types, body.value.span);
+ let typeck_results = cx.tcx.typeck(def_id);
+ check_interior_types(cx, &typeck_results.generator_interior_types, body.value.span);
}
}
}
}
fn fetch_int_literal(cx: &LateContext<'_>, lit: &Expr<'_>) -> Option<u128> {
- match constant(cx, cx.tables(), lit)?.0 {
+ match constant(cx, cx.typeck_results(), lit)?.0 {
Constant::Int(n) => Some(n),
_ => None,
}
match &e.kind {
ExprKind::Unary(UnOp::UnNot, inner) => return Ok(Bool::Not(box self.run(inner)?)),
ExprKind::Binary(binop, lhs, rhs) => match &binop.node {
- BinOpKind::Or => return Ok(Bool::Or(self.extract(BinOpKind::Or, &[lhs, rhs], Vec::new())?)),
- BinOpKind::And => return Ok(Bool::And(self.extract(BinOpKind::And, &[lhs, rhs], Vec::new())?)),
+ BinOpKind::Or => {
+ return Ok(Bool::Or(self.extract(BinOpKind::Or, &[lhs, rhs], Vec::new())?));
+ },
+ BinOpKind::And => {
+ return Ok(Bool::And(self.extract(BinOpKind::And, &[lhs, rhs], Vec::new())?));
+ },
_ => (),
},
ExprKind::Lit(lit) => match lit.node {
})
},
ExprKind::MethodCall(path, _, args, _) if args.len() == 1 => {
- let type_of_receiver = cx.tables().expr_ty(&args[0]);
+ let type_of_receiver = cx.typeck_results().expr_ty(&args[0]);
if !is_type_diagnostic_item(cx, type_of_receiver, sym!(option_type))
&& !is_type_diagnostic_item(cx, type_of_receiver, sym!(result_type))
{
self.bool_expr(e)
},
ExprKind::Unary(UnOp::UnNot, inner) => {
- if self.cx.tables().node_types()[inner.hir_id].is_bool() {
+ if self.cx.typeck_results().node_types()[inner.hir_id].is_bool() {
self.bool_expr(e);
} else {
walk_expr(self, e);
}
fn implements_ord<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
get_trait_def_id(cx, &paths::ORD).map_or(false, |id| implements_trait(cx, ty, id, &[]))
}
if let ExprKind::Binary(ref op, ref l, ref r) = body.value.kind;
if op.node == BinOpKind::Eq;
if match_type(cx,
- walk_ptrs_ty(cx.tables().expr_ty(&filter_args[0])),
+ walk_ptrs_ty(cx.typeck_results().expr_ty(&filter_args[0])),
&paths::SLICE_ITER);
then {
let needle = match get_path_name(l) {
_ => { return; }
}
};
- if ty::Uint(UintTy::U8) != walk_ptrs_ty(cx.tables().expr_ty(needle)).kind {
+ if ty::Uint(UintTy::U8) != walk_ptrs_ty(cx.typeck_results().expr_ty(needle)).kind {
return;
}
let haystack = if let ExprKind::MethodCall(ref path, _, ref args, _) =
let mut helper = CCHelper { cc: 1, returns: 0 };
helper.visit_expr(expr);
let CCHelper { cc, returns } = helper;
- let ret_ty = cx.tables().node_type(expr.hir_id);
+ let ret_ty = cx.typeck_results().node_type(expr.hir_id);
let ret_adjust = if is_type_diagnostic_item(cx, ret_ty, sym!(result_type)) {
returns
} else {
}
// Check that the type being compared implements `core::cmp::Ord`
- let ty = cx.tables().expr_ty(lhs1);
+ let ty = cx.typeck_results().expr_ty(lhs1);
let is_ord = get_trait_def_id(cx, &paths::ORD).map_or(false, |id| implements_trait(cx, ty, id, &[]));
if !is_ord {
pub fn constant<'tcx>(
lcx: &LateContext<'tcx>,
- tables: &ty::TypeckTables<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
e: &Expr<'_>,
) -> Option<(Constant, bool)> {
let mut cx = ConstEvalLateContext {
lcx,
- tables,
+ typeck_results,
param_env: lcx.param_env,
needed_resolution: false,
substs: lcx.tcx.intern_substs(&[]),
pub fn constant_simple<'tcx>(
lcx: &LateContext<'tcx>,
- tables: &ty::TypeckTables<'tcx>,
+ typeck_results: &ty::TypeckResults<'tcx>,
e: &Expr<'_>,
) -> Option<Constant> {
- constant(lcx, tables, e).and_then(|(cst, res)| if res { None } else { Some(cst) })
+ constant(lcx, typeck_results, e).and_then(|(cst, res)| if res { None } else { Some(cst) })
}
-/// Creates a `ConstEvalLateContext` from the given `LateContext` and `TypeckTables`.
+/// Creates a `ConstEvalLateContext` from the given `LateContext` and `TypeckResults`.
pub fn constant_context<'a, 'tcx>(
lcx: &'a LateContext<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
) -> ConstEvalLateContext<'a, 'tcx> {
ConstEvalLateContext {
lcx,
- tables,
+ typeck_results,
param_env: lcx.param_env,
needed_resolution: false,
substs: lcx.tcx.intern_substs(&[]),
pub struct ConstEvalLateContext<'a, 'tcx> {
lcx: &'a LateContext<'tcx>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
param_env: ty::ParamEnv<'tcx>,
needed_resolution: bool,
substs: SubstsRef<'tcx>,
return self.ifthenelse(cond, then, otherwise);
}
match e.kind {
- ExprKind::Path(ref qpath) => self.fetch_path(qpath, e.hir_id, self.tables.expr_ty(e)),
+ ExprKind::Path(ref qpath) => self.fetch_path(qpath, e.hir_id, self.typeck_results.expr_ty(e)),
ExprKind::Block(ref block, _) => self.block(block),
- ExprKind::Lit(ref lit) => Some(lit_to_constant(&lit.node, self.tables.expr_ty_opt(e))),
+ ExprKind::Lit(ref lit) => Some(lit_to_constant(&lit.node, self.typeck_results.expr_ty_opt(e))),
ExprKind::Array(ref vec) => self.multi(vec).map(Constant::Vec),
ExprKind::Tup(ref tup) => self.multi(tup).map(Constant::Tuple),
ExprKind::Repeat(ref value, _) => {
- let n = match self.tables.expr_ty(e).kind {
+ let n = match self.typeck_results.expr_ty(e).kind {
ty::Array(_, n) => n.try_eval_usize(self.lcx.tcx, self.lcx.param_env)?,
_ => span_bug!(e.span, "typeck error"),
};
self.expr(value).map(|v| Constant::Repeat(Box::new(v), n))
},
ExprKind::Unary(op, ref operand) => self.expr(operand).and_then(|o| match op {
- UnOp::UnNot => self.constant_not(&o, self.tables.expr_ty(e)),
- UnOp::UnNeg => self.constant_negate(&o, self.tables.expr_ty(e)),
+ UnOp::UnNot => self.constant_not(&o, self.typeck_results.expr_ty(e)),
+ UnOp::UnNeg => self.constant_negate(&o, self.typeck_results.expr_ty(e)),
UnOp::UnDeref => Some(o),
}),
ExprKind::Binary(op, ref left, ref right) => self.binop(op, left, right),
if_chain! {
if args.is_empty();
if let ExprKind::Path(qpath) = &callee.kind;
- let res = self.tables.qpath_res(qpath, callee.hir_id);
+ let res = self.typeck_results.qpath_res(qpath, callee.hir_id);
if let Some(def_id) = res.opt_def_id();
let def_path: Vec<_> = self.lcx.get_def_path(def_id).into_iter().map(Symbol::as_str).collect();
let def_path: Vec<&str> = def_path.iter().take(4).map(|s| &**s).collect();
/// Lookup a possibly constant expression from a `ExprKind::Path`.
fn fetch_path(&mut self, qpath: &QPath<'_>, id: HirId, ty: Ty<'tcx>) -> Option<Constant> {
- let res = self.tables.qpath_res(qpath, id);
+ let res = self.typeck_results.qpath_res(qpath, id);
match res {
Res::Def(DefKind::Const | DefKind::AssocConst, def_id) => {
- let substs = self.tables.node_substs(id);
+ let substs = self.typeck_results.node_substs(id);
let substs = if self.substs.is_empty() {
substs
} else {
let result = self
.lcx
.tcx
- .const_eval_resolve(self.param_env, ty::WithOptConstParam::unknown(def_id), substs, None, None)
+ .const_eval_resolve(
+ self.param_env,
+ ty::WithOptConstParam::unknown(def_id),
+ substs,
+ None,
+ None,
+ )
.ok()
.map(|val| rustc_middle::ty::Const::from_value(self.lcx.tcx, val, ty))?;
let result = miri_to_const(&result);
let l = self.expr(left)?;
let r = self.expr(right);
match (l, r) {
- (Constant::Int(l), Some(Constant::Int(r))) => match self.tables.expr_ty_opt(left)?.kind {
+ (Constant::Int(l), Some(Constant::Int(r))) => match self.typeck_results.expr_ty_opt(left)?.kind {
ty::Int(ity) => {
let l = sext(self.lcx.tcx, l, ity);
let r = sext(self.lcx.tcx, r, ity);
pub fn miri_to_const(result: &ty::Const<'_>) -> Option<Constant> {
use rustc_middle::mir::interpret::{ConstValue, Scalar};
match result.val {
- ty::ConstKind::Value(ConstValue::Scalar(Scalar::Raw { data: d, .. })) => match result.ty.kind {
- ty::Bool => Some(Constant::Bool(d == 1)),
- ty::Uint(_) | ty::Int(_) => Some(Constant::Int(d)),
- ty::Float(FloatTy::F32) => Some(Constant::F32(f32::from_bits(
- d.try_into().expect("invalid f32 bit representation"),
- ))),
- ty::Float(FloatTy::F64) => Some(Constant::F64(f64::from_bits(
- d.try_into().expect("invalid f64 bit representation"),
- ))),
- ty::RawPtr(type_and_mut) => {
- if let ty::Uint(_) = type_and_mut.ty.kind {
- return Some(Constant::RawPtr(d));
- }
- None
- },
- // FIXME: implement other conversions.
- _ => None,
+ ty::ConstKind::Value(ConstValue::Scalar(Scalar::Raw { data: d, .. })) => {
+ match result.ty.kind {
+ ty::Bool => Some(Constant::Bool(d == 1)),
+ ty::Uint(_) | ty::Int(_) => Some(Constant::Int(d)),
+ ty::Float(FloatTy::F32) => Some(Constant::F32(f32::from_bits(
+ d.try_into().expect("invalid f32 bit representation"),
+ ))),
+ ty::Float(FloatTy::F64) => Some(Constant::F64(f64::from_bits(
+ d.try_into().expect("invalid f64 bit representation"),
+ ))),
+ ty::RawPtr(type_and_mut) => {
+ if let ty::Uint(_) = type_and_mut.ty.kind {
+ return Some(Constant::RawPtr(d));
+ }
+ None
+ },
+ // FIXME: implement other conversions.
+ _ => None,
+ }
},
ty::ConstKind::Value(ConstValue::Slice { data, start, end }) => match result.ty.kind {
ty::Ref(_, tam, _) => match tam.kind {
},
PatKind::Binding(.., ident, ref as_pat) => {
if let Entry::Vacant(v) = map.entry(ident.name) {
- v.insert(cx.tables().pat_ty(pat));
+ v.insert(cx.typeck_results().pat_ty(pat));
}
if let Some(ref as_pat) = *as_pat {
bindings_impl(cx, as_pat, map);
// TODO: Work out a way to put "whatever the imported way of referencing
// this type in this file" rather than a fully-qualified type.
- let expr_ty = cx.tables().expr_ty(expr);
+ let expr_ty = cx.typeck_results().expr_ty(expr);
if let ty::Adt(..) = expr_ty.kind {
let replacement = format!("{}::default()", expr_ty);
span_lint_and_sugg(
match method_name {
"deref" => {
let impls_deref_trait = cx.tcx.lang_items().deref_trait().map_or(false, |id| {
- implements_trait(cx, cx.tables().expr_ty(&call_expr), id, &[])
+ implements_trait(cx, cx.typeck_results().expr_ty(&call_expr), id, &[])
});
if impls_deref_trait {
span_lint_and_sugg(
},
"deref_mut" => {
let impls_deref_mut_trait = cx.tcx.lang_items().deref_mut_trait().map_or(false, |id| {
- implements_trait(cx, cx.tables().expr_ty(&call_expr), id, &[])
+ implements_trait(cx, cx.typeck_results().expr_ty(&call_expr), id, &[])
});
if impls_deref_mut_trait {
span_lint_and_sugg(
let lint;
let msg;
let arg = &args[0];
- let arg_ty = cx.tables().expr_ty(arg);
+ let arg_ty = cx.typeck_results().expr_ty(arg);
if let ty::Ref(..) = arg_ty.kind {
if match_def_path(cx, def_id, &paths::DROP) {
if_chain! {
if let ExprKind::Binary(Spanned { node: BinOpKind::Div, .. }, ref left, ref right) = expr.kind;
if let ExprKind::MethodCall(ref method_path, _ , ref args, _) = left.kind;
- if match_type(cx, walk_ptrs_ty(cx.tables().expr_ty(&args[0])), &paths::DURATION);
- if let Some((Constant::Int(divisor), _)) = constant(cx, cx.tables(), right);
+ if match_type(cx, walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0])), &paths::DURATION);
+ if let Some((Constant::Int(divisor), _)) = constant(cx, cx.typeck_results(), right);
then {
let suggested_fn = match (method_path.ident.as_str().as_ref(), divisor) {
("subsec_micros", 1_000) | ("subsec_nanos", 1_000_000) => "subsec_millis",
if let ExprKind::AddrOf(BorrowKind::Ref, _, ref key) = params[1].kind;
then {
let map = ¶ms[0];
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(map));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(map));
return if match_type(cx, obj_ty, &paths::BTREEMAP) {
Some(("BTreeMap", map, key))
(&ExprKind::Lit(..), _) | (_, &ExprKind::Lit(..)) => {},
// &foo == &bar
(&ExprKind::AddrOf(BorrowKind::Ref, _, ref l), &ExprKind::AddrOf(BorrowKind::Ref, _, ref r)) => {
- let lty = cx.tables().expr_ty(l);
- let rty = cx.tables().expr_ty(r);
+ let lty = cx.typeck_results().expr_ty(l);
+ let rty = cx.typeck_results().expr_ty(r);
let lcpy = is_copy(cx, lty);
let rcpy = is_copy(cx, rty);
// either operator autorefs or both args are copyable
)
} else if lcpy
&& !rcpy
- && implements_trait(cx, lty, trait_id, &[cx.tables().expr_ty(right).into()])
+ && implements_trait(cx, lty, trait_id, &[cx.typeck_results().expr_ty(right).into()])
{
span_lint_and_then(
cx,
)
} else if !lcpy
&& rcpy
- && implements_trait(cx, cx.tables().expr_ty(left), trait_id, &[rty.into()])
+ && implements_trait(cx, cx.typeck_results().expr_ty(left), trait_id, &[rty.into()])
{
span_lint_and_then(
cx,
},
// &foo == bar
(&ExprKind::AddrOf(BorrowKind::Ref, _, ref l), _) => {
- let lty = cx.tables().expr_ty(l);
+ let lty = cx.typeck_results().expr_ty(l);
let lcpy = is_copy(cx, lty);
if (requires_ref || lcpy)
- && implements_trait(cx, lty, trait_id, &[cx.tables().expr_ty(right).into()])
+ && implements_trait(cx, lty, trait_id, &[cx.typeck_results().expr_ty(right).into()])
{
span_lint_and_then(
cx,
},
// foo == &bar
(_, &ExprKind::AddrOf(BorrowKind::Ref, _, ref r)) => {
- let rty = cx.tables().expr_ty(r);
+ let rty = cx.typeck_results().expr_ty(r);
let rcpy = is_copy(cx, rty);
if (requires_ref || rcpy)
- && implements_trait(cx, cx.tables().expr_ty(left), trait_id, &[rty.into()])
+ && implements_trait(cx, cx.typeck_results().expr_ty(left), trait_id, &[rty.into()])
{
span_lint_and_then(cx, OP_REF, e.span, "taken reference of right operand", |diag| {
let rsnip = snippet(cx, r.span, "...").to_string();
}
fn check(cx: &LateContext<'_>, e: &Expr<'_>, span: Span) {
- if let Some(Constant::Int(0)) = constant_simple(cx, cx.tables(), e) {
+ if let Some(Constant::Int(0)) = constant_simple(cx, cx.typeck_results(), e) {
span_lint(
cx,
ERASING_OP,
let fn_def_id = cx.tcx.hir().local_def_id(hir_id);
cx.tcx.infer_ctxt().enter(|infcx| {
- ExprUseVisitor::new(&mut v, &infcx, fn_def_id, cx.param_env, cx.tables()).consume_body(body);
+ ExprUseVisitor::new(&mut v, &infcx, fn_def_id, cx.param_env, cx.typeck_results()).consume_body(body);
});
for node in v.set {
// Are the expression or the arguments type-adjusted? Then we need the closure
if !(is_adjusted(cx, ex) || args.iter().any(|arg| is_adjusted(cx, arg)));
- let fn_ty = cx.tables().expr_ty(caller);
+ let fn_ty = cx.typeck_results().expr_ty(caller);
if matches!(fn_ty.kind, ty::FnDef(_, _) | ty::FnPtr(_) | ty::Closure(_, _));
// Are the expression or the arguments type-adjusted? Then we need the closure
if !(is_adjusted(cx, ex) || args.iter().skip(1).any(|arg| is_adjusted(cx, arg)));
- let method_def_id = cx.tables().type_dependent_def_id(ex.hir_id).unwrap();
+ let method_def_id = cx.typeck_results().type_dependent_def_id(ex.hir_id).unwrap();
if !type_is_unsafe_function(cx, cx.tcx.type_of(method_def_id));
if compare_inputs(&mut iter_input_pats(decl, body), &mut args.iter());
/// Tries to determine the type for universal function call to be used instead of the closure
fn get_ufcs_type_name(cx: &LateContext<'_>, method_def_id: def_id::DefId, self_arg: &Expr<'_>) -> Option<String> {
let expected_type_of_self = &cx.tcx.fn_sig(method_def_id).inputs_and_output().skip_binder()[0];
- let actual_type_of_self = &cx.tables().node_type(self_arg.hir_id);
+ let actual_type_of_self = &cx.typeck_results().node_type(self_arg.hir_id);
if let Some(trait_id) = cx.tcx.trait_of_item(method_def_id) {
if match_borrow_depth(expected_type_of_self, &actual_type_of_self)
match e.kind {
ExprKind::Continue(_) | ExprKind::Break(_, _) | ExprKind::Ret(_) => self.report_diverging_sub_expr(e),
ExprKind::Call(ref func, _) => {
- let typ = self.cx.tables().expr_ty(func);
+ let typ = self.cx.typeck_results().expr_ty(func);
match typ.kind {
ty::FnDef(..) | ty::FnPtr(_) => {
let sig = typ.fn_sig(self.cx.tcx);
}
},
ExprKind::MethodCall(..) => {
- let borrowed_table = self.cx.tables();
+ let borrowed_table = self.cx.typeck_results();
if borrowed_table.expr_ty(e).is_never() {
self.report_diverging_sub_expr(e);
}
struct FindPanicUnwrap<'a, 'tcx> {
lcx: &'a LateContext<'tcx>,
- tables: &'tcx ty::TypeckTables<'tcx>,
+ typeck_results: &'tcx ty::TypeckResults<'tcx>,
result: Vec<Span>,
}
// check for `unwrap`
if let Some(arglists) = method_chain_args(expr, &["unwrap"]) {
- let reciever_ty = walk_ptrs_ty(self.tables.expr_ty(&arglists[0][0]));
+ let reciever_ty = walk_ptrs_ty(self.typeck_results.expr_ty(&arglists[0][0]));
if is_type_diagnostic_item(self.lcx, reciever_ty, sym!(option_type))
|| is_type_diagnostic_item(self.lcx, reciever_ty, sym!(result_type))
{
let impl_item_def_id = cx.tcx.hir().local_def_id(impl_item.id.hir_id);
let mut fpu = FindPanicUnwrap {
lcx: cx,
- tables: cx.tcx.typeck_tables_of(impl_item_def_id),
+ typeck_results: cx.tcx.typeck(impl_item_def_id),
result: Vec::new(),
};
fpu.visit_expr(&body.value);
impl<'tcx> LateLintPass<'tcx> for FloatLiteral {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>) {
if_chain! {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if let ty::Float(fty) = ty.kind;
if let hir::ExprKind::Lit(ref lit) = expr.kind;
if let LitKind::Float(sym, lit_float_ty) = lit.node;
// Returns the specialized log method for a given base if base is constant
// and is one of 2, 10 and e
fn get_specialized_log_method(cx: &LateContext<'_>, base: &Expr<'_>) -> Option<&'static str> {
- if let Some((value, _)) = constant(cx, cx.tables(), base) {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), base) {
if F32(2.0) == value || F64(2.0) == value {
return Some("log2");
} else if F32(10.0) == value || F64(10.0) == value {
if_chain! {
// if the expression is a float literal and it is unsuffixed then
// add a suffix so the suggestion is valid and unambiguous
- if let ty::Float(float_ty) = cx.tables().expr_ty(expr).kind;
+ if let ty::Float(float_ty) = cx.typeck_results().expr_ty(expr).kind;
if let ExprKind::Lit(lit) = &expr.kind;
if let ast::LitKind::Float(sym, ast::LitFloatType::Unsuffixed) = lit.node;
then {
rhs,
) = &args[0].kind
{
- let recv = match (constant(cx, cx.tables(), lhs), constant(cx, cx.tables(), rhs)) {
+ let recv = match (
+ constant(cx, cx.typeck_results(), lhs),
+ constant(cx, cx.typeck_results(), rhs),
+ ) {
(Some((value, _)), _) if F32(1.0) == value || F64(1.0) == value => rhs,
(_, Some((value, _))) if F32(1.0) == value || F64(1.0) == value => lhs,
_ => return,
fn check_powf(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
// Check receiver
- if let Some((value, _)) = constant(cx, cx.tables(), &args[0]) {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), &args[0]) {
let method = if F32(f32_consts::E) == value || F64(f64_consts::E) == value {
"exp"
} else if F32(2.0) == value || F64(2.0) == value {
}
// Check argument
- if let Some((value, _)) = constant(cx, cx.tables(), &args[1]) {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), &args[1]) {
let (lint, help, suggestion) = if F32(1.0 / 2.0) == value || F64(1.0 / 2.0) == value {
(
SUBOPTIMAL_FLOPS,
}
fn check_powi(cx: &LateContext<'_>, expr: &Expr<'_>, args: &[Expr<'_>]) {
- if let Some((value, _)) = constant(cx, cx.tables(), &args[1]) {
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), &args[1]) {
if value == Int(2) {
if let Some(parent) = get_parent_expr(cx, expr) {
if let Some(grandparent) = get_parent_expr(cx, parent) {
_
) = add_rhs.kind;
if lmethod_name.as_str() == "powi" && rmethod_name.as_str() == "powi";
- if let Some((lvalue, _)) = constant(cx, cx.tables(), &largs[1]);
- if let Some((rvalue, _)) = constant(cx, cx.tables(), &rargs[1]);
+ if let Some((lvalue, _)) = constant(cx, cx.typeck_results(), &largs[1]);
+ if let Some((rvalue, _)) = constant(cx, cx.typeck_results(), &rargs[1]);
if Int(2) == lvalue && Int(2) == rvalue;
then {
return Some(format!("{}.hypot({})", Sugg::hir(cx, &largs[0], ".."), Sugg::hir(cx, &rargs[0], "..")));
fn check_expm1(cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::Binary(Spanned { node: BinOpKind::Sub, .. }, ref lhs, ref rhs) = expr.kind;
- if cx.tables().expr_ty(lhs).is_floating_point();
- if let Some((value, _)) = constant(cx, cx.tables(), rhs);
+ if cx.typeck_results().expr_ty(lhs).is_floating_point();
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), rhs);
if F32(1.0) == value || F64(1.0) == value;
if let ExprKind::MethodCall(ref path, _, ref method_args, _) = lhs.kind;
- if cx.tables().expr_ty(&method_args[0]).is_floating_point();
+ if cx.typeck_results().expr_ty(&method_args[0]).is_floating_point();
if path.ident.name.as_str() == "exp";
then {
span_lint_and_sugg(
fn is_float_mul_expr<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<(&'a Expr<'a>, &'a Expr<'a>)> {
if_chain! {
if let ExprKind::Binary(Spanned { node: BinOpKind::Mul, .. }, ref lhs, ref rhs) = &expr.kind;
- if cx.tables().expr_ty(lhs).is_floating_point();
- if cx.tables().expr_ty(rhs).is_floating_point();
+ if cx.typeck_results().expr_ty(lhs).is_floating_point();
+ if cx.typeck_results().expr_ty(rhs).is_floating_point();
then {
return Some((lhs, rhs));
}
/// Returns true iff expr is some zero literal
fn is_zero(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- match constant_simple(cx, cx.tables(), expr) {
+ match constant_simple(cx, cx.typeck_results(), expr) {
Some(Constant::Int(i)) => i == 0,
Some(Constant::F32(f)) => f == 0.0,
Some(Constant::F64(f)) => f == 0.0,
mul_lhs,
mul_rhs,
) = &div_lhs.kind;
- if let Some((rvalue, _)) = constant(cx, cx.tables(), div_rhs);
- if let Some((lvalue, _)) = constant(cx, cx.tables(), mul_rhs);
+ if let Some((rvalue, _)) = constant(cx, cx.typeck_results(), div_rhs);
+ if let Some((lvalue, _)) = constant(cx, cx.typeck_results(), mul_rhs);
then {
// TODO: also check for constant values near PI/180 or 180/PI
if (F32(f32_consts::PI) == rvalue || F64(f64_consts::PI) == rvalue) &&
impl<'tcx> LateLintPass<'tcx> for FloatingPointArithmetic {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::MethodCall(ref path, _, args, _) = &expr.kind {
- let recv_ty = cx.tables().expr_ty(&args[0]);
+ let recv_ty = cx.typeck_results().expr_ty(&args[0]);
if recv_ty.is_floating_point() {
match &*path.ident.name.as_str() {
if let PatKind::Tuple(ref pats, None) = arms[0].pat.kind;
if pats.len() == 1;
then {
- let ty = walk_ptrs_ty(cx.tables().pat_ty(&pats[0]));
+ let ty = walk_ptrs_ty(cx.typeck_results().pat_ty(&pats[0]));
if ty.kind != rustc_middle::ty::Str && !is_type_diagnostic_item(cx, ty, sym!(string_type)) {
return None;
}
.collect::<FxHashSet<_>>();
if !raw_ptrs.is_empty() {
- let tables = cx.tcx.body_tables(body.id());
+ let typeck_results = cx.tcx.typeck_body(body.id());
let mut v = DerefVisitor {
cx,
ptrs: raw_ptrs,
- tables,
+ typeck_results,
};
intravisit::walk_expr(&mut v, expr);
return false; // ignore `_` patterns
}
let def_id = pat.hir_id.owner.to_def_id();
- if cx.tcx.has_typeck_tables(def_id) {
- is_mutable_ty(
- cx,
- &cx.tcx.typeck_tables_of(def_id.expect_local()).pat_ty(pat),
- pat.span,
- tys,
- )
+ if cx.tcx.has_typeck_results(def_id) {
+ is_mutable_ty(cx, &cx.tcx.typeck(def_id.expect_local()).pat_ty(pat), pat.span, tys)
} else {
false
}
struct DerefVisitor<'a, 'tcx> {
cx: &'a LateContext<'tcx>,
ptrs: FxHashSet<hir::HirId>,
- tables: &'a ty::TypeckTables<'tcx>,
+ typeck_results: &'a ty::TypeckResults<'tcx>,
}
impl<'a, 'tcx> intravisit::Visitor<'tcx> for DerefVisitor<'a, 'tcx> {
fn visit_expr(&mut self, expr: &'tcx hir::Expr<'_>) {
match expr.kind {
hir::ExprKind::Call(ref f, args) => {
- let ty = self.tables.expr_ty(f);
+ let ty = self.typeck_results.expr_ty(f);
if type_is_unsafe_function(self.cx, ty) {
for arg in args {
}
},
hir::ExprKind::MethodCall(_, _, args, _) => {
- let def_id = self.tables.type_dependent_def_id(expr.hir_id).unwrap();
+ let def_id = self.typeck_results.type_dependent_def_id(expr.hir_id).unwrap();
let base_type = self.cx.tcx.type_of(def_id);
if type_is_unsafe_function(self.cx, base_type) {
let mut tys = FxHashSet::default();
for arg in args {
let def_id = arg.hir_id.owner.to_def_id();
- if self.cx.tcx.has_typeck_tables(def_id)
+ if self.cx.tcx.has_typeck_results(def_id)
&& is_mutable_ty(
self.cx,
- self.cx.tcx.typeck_tables_of(def_id.expect_local()).expr_ty(arg),
+ self.cx.tcx.typeck(def_id.expect_local()).expr_ty(arg),
arg.span,
&mut tys,
)
// Argument 0 (the struct we're calling the method on) is a vector
if let Some(struct_calling_on) = args.get(0);
- let struct_ty = cx.tables().expr_ty(struct_calling_on);
+ let struct_ty = cx.typeck_results().expr_ty(struct_calling_on);
if is_type_diagnostic_item(cx, struct_ty, sym!(vec_type));
// Argument to "get" is a subtraction
// `1 << 0` is a common pattern in bit manipulation code
if_chain! {
if let BinOpKind::Shl = cmp.node;
- if let Some(Constant::Int(0)) = constant_simple(cx, cx.tables(), right);
- if let Some(Constant::Int(1)) = constant_simple(cx, cx.tables(), left);
+ if let Some(Constant::Int(0)) = constant_simple(cx, cx.typeck_results(), right);
+ if let Some(Constant::Int(1)) = constant_simple(cx, cx.typeck_results(), left);
then {
return true;
}
#[allow(clippy::cast_possible_wrap)]
fn check(cx: &LateContext<'_>, e: &Expr<'_>, m: i8, span: Span, arg: Span) {
- if let Some(Constant::Int(v)) = constant_simple(cx, cx.tables(), e) {
- let check = match cx.tables().expr_ty(e).kind {
+ if let Some(Constant::Int(v)) = constant_simple(cx, cx.typeck_results(), e) {
+ let check = match cx.typeck_results().expr_ty(e).kind {
ty::Int(ity) => unsext(cx.tcx, -1_i128, ity),
ty::Uint(uty) => clip(cx.tcx, !0, uty),
_ => return,
if_chain! {
if let ExprKind::MethodCall(path, _span, args, _) = &expr.kind;
if path.ident.to_string() == "lock";
- let ty = cx.tables().expr_ty(&args[0]);
+ let ty = cx.typeck_results().expr_ty(&args[0]);
if is_type_diagnostic_item(cx, ty, sym!(mutex_type));
then {
Some(&args[0])
if let ExprKind::MethodCall(_, ok_span, ref result_types, _) = op.kind; //check is expr.ok() has type Result<T,E>.ok(, _)
if let PatKind::TupleStruct(QPath::Resolved(_, ref x), ref y, _) = body[0].pat.kind; //get operation
if method_chain_args(op, &["ok"]).is_some(); //test to see if using ok() methoduse std::marker::Sized;
- if is_type_diagnostic_item(cx, cx.tables().expr_ty(&result_types[0]), sym!(result_type));
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&result_types[0]), sym!(result_type));
if rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_path(x, false)) == "Some";
then {
};
// Check if the variable in the condition statement is an integer
- if !cx.tables().expr_ty(cond_var).is_integral() {
+ if !cx.typeck_results().expr_ty(cond_var).is_integral() {
return;
}
ExprKind::Lit(ref cond_lit) => {
// Check if the constant is zero
if let LitKind::Int(0, _) = cond_lit.node {
- if cx.tables().expr_ty(cond_left).is_signed() {
+ if cx.typeck_results().expr_ty(cond_left).is_signed() {
} else {
print_lint_and_sugg(cx, &var_name, expr);
};
impl<'tcx> LateLintPass<'tcx> for IndexingSlicing {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Index(ref array, ref index) = &expr.kind {
- let ty = cx.tables().expr_ty(array);
+ let ty = cx.typeck_results().expr_ty(array);
if let Some(range) = higher::range(cx, index) {
// Ranged indexes, i.e., &x[n..m], &x[n..], &x[..n] and &x[..]
if let ty::Array(_, s) = ty.kind {
// Catchall non-range index, i.e., [n] or [n << m]
if let ty::Array(..) = ty.kind {
// Index is a constant uint.
- if let Some(..) = constant(cx, cx.tables(), index) {
+ if let Some(..) = constant(cx, cx.typeck_results(), index) {
// Let rustc's `const_err` lint handle constant `usize` indexing on arrays.
return;
}
range: higher::Range<'_>,
array_size: u128,
) -> (Option<u128>, Option<u128>) {
- let s = range.start.map(|expr| constant(cx, cx.tables(), expr).map(|(c, _)| c));
+ let s = range
+ .start
+ .map(|expr| constant(cx, cx.typeck_results(), expr).map(|(c, _)| c));
let start = match s {
Some(Some(Constant::Int(x))) => Some(x),
Some(_) => None,
None => Some(0),
};
- let e = range.end.map(|expr| constant(cx, cx.tables(), expr).map(|(c, _)| c));
+ let e = range
+ .end
+ .map(|expr| constant(cx, cx.typeck_results(), expr).map(|(c, _)| c));
let end = match e {
Some(Some(Constant::Int(x))) => {
if range.limits == RangeLimits::Closed {
}
if method.ident.name == sym!(last) && args.len() == 1 {
let not_double_ended = get_trait_def_id(cx, &paths::DOUBLE_ENDED_ITERATOR).map_or(false, |id| {
- !implements_trait(cx, cx.tables().expr_ty(&args[0]), id, &[])
+ !implements_trait(cx, cx.typeck_results().expr_ty(&args[0]), id, &[])
});
if not_double_ended {
return is_infinite(cx, &args[0]);
}
} else if method.ident.name == sym!(collect) {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if INFINITE_COLLECTORS.iter().any(|path| match_type(cx, ty, path)) {
return is_infinite(cx, &args[0]);
}
if let hir::ExprKind::Binary(binop, left, right) = &expr.kind;
if let hir::BinOpKind::Div = &binop.node;
then {
- let (left_ty, right_ty) = (cx.tables().expr_ty(left), cx.tables().expr_ty(right));
+ let (left_ty, right_ty) = (cx.typeck_results().expr_ty(left), cx.typeck_results().expr_ty(right));
return left_ty.is_integral() && right_ty.is_integral();
}
}
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
if_chain! {
if let ExprKind::Repeat(_, _) = expr.kind;
- if let ty::Array(element_type, cst) = cx.tables().expr_ty(expr).kind;
+ if let ty::Array(element_type, cst) = cx.typeck_results().expr_ty(expr).kind;
if let ConstKind::Value(val) = cst.val;
if let ConstValue::Scalar(element_count) = val;
if let Ok(element_count) = element_count.to_machine_usize(&cx.tcx);
return false;
}
- let ty = &walk_ptrs_ty(cx.tables().expr_ty(expr));
+ let ty = &walk_ptrs_ty(cx.typeck_results().expr_ty(expr));
match ty.kind {
ty::Dynamic(ref tt, ..) => tt.principal().map_or(false, |principal| {
cx.tcx
then {
let span = stmt.span.to(if_.span);
- let has_interior_mutability = !cx.tables().node_type(canonical_id).is_freeze(
+ let has_interior_mutability = !cx.typeck_results().node_type(canonical_id).is_freeze(
cx.tcx.at(span),
cx.param_env,
);
if let PatKind::Wild = local.pat.kind;
if let Some(ref init) = local.init;
then {
- let init_ty = cx.tables().expr_ty(init);
+ let init_ty = cx.typeck_results().expr_ty(init);
let contains_sync_guard = init_ty.walk().any(|inner| match inner.unpack() {
GenericArgKind::Type(inner_ty) => {
SYNC_GUARD_PATHS.iter().any(|path| match_type(cx, inner_ty, path))
"consider using an underscore-prefixed named \
binding or dropping explicitly with `std::mem::drop`"
)
- } else if is_must_use_ty(cx, cx.tables().expr_ty(init)) {
+ } else if is_must_use_ty(cx, cx.typeck_results().expr_ty(init)) {
span_lint_and_help(
cx,
LET_UNDERSCORE_MUST_USE,
if_chain! {
if let ExprKind::MethodCall(..) | ExprKind::Call(..) = iter_expr.kind;
if let Some(iter_def_id) = get_trait_def_id(cx, &paths::ITERATOR);
- if implements_trait(cx, cx.tables().expr_ty(iter_expr), iter_def_id, &[]);
+ if implements_trait(cx, cx.typeck_results().expr_ty(iter_expr), iter_def_id, &[]);
then {
return;
}
if_chain! {
if let ExprKind::Index(seqexpr_left, idx_left) = lhs.kind;
if let ExprKind::Index(seqexpr_right, idx_right) = rhs.kind;
- if is_slice_like(cx, cx.tables().expr_ty(seqexpr_left))
- && is_slice_like(cx, cx.tables().expr_ty(seqexpr_right));
+ if is_slice_like(cx, cx.typeck_results().expr_ty(seqexpr_left))
+ && is_slice_like(cx, cx.typeck_results().expr_ty(seqexpr_right));
if let Some(offset_left) = get_offset(cx, &idx_left, canonical_id);
if let Some(offset_right) = get_offset(cx, &idx_right, canonical_id);
lint_iter_method(cx, args, arg, method_name);
}
} else if method_name == "into_iter" && match_trait_method(cx, arg, &paths::INTO_ITERATOR) {
- let receiver_ty = cx.tables().expr_ty(&args[0]);
- let receiver_ty_adjusted = cx.tables().expr_ty_adjusted(&args[0]);
+ let receiver_ty = cx.typeck_results().expr_ty(&args[0]);
+ let receiver_ty_adjusted = cx.typeck_results().expr_ty_adjusted(&args[0]);
if TyS::same_type(receiver_ty, receiver_ty_adjusted) {
let mut applicability = Applicability::MachineApplicable;
let object = snippet_with_applicability(cx, args[0].span, "_", &mut applicability);
/// Checks for `for` loops over `Option`s and `Result`s.
fn check_arg_type(cx: &LateContext<'_>, pat: &Pat<'_>, arg: &Expr<'_>) {
- let ty = cx.tables().expr_ty(arg);
+ let ty = cx.typeck_results().expr_ty(arg);
if is_type_diagnostic_item(cx, ty, sym!(option_type)) {
span_lint_and_help(
cx,
/// If `arg` was the argument to a `for` loop, return the "cleanest" way of writing the
/// actual `Iterator` that the loop uses.
fn make_iterator_snippet(cx: &LateContext<'_>, arg: &Expr<'_>, applic_ref: &mut Applicability) -> String {
- let impls_iterator = get_trait_def_id(cx, &paths::ITERATOR)
- .map_or(false, |id| implements_trait(cx, cx.tables().expr_ty(arg), id, &[]));
+ let impls_iterator = get_trait_def_id(cx, &paths::ITERATOR).map_or(false, |id| {
+ implements_trait(cx, cx.typeck_results().expr_ty(arg), id, &[])
+ });
if impls_iterator {
format!(
"{}",
// (&mut x).into_iter() ==> x.iter_mut()
match &arg.kind {
ExprKind::AddrOf(BorrowKind::Ref, mutability, arg_inner)
- if has_iter_method(cx, cx.tables().expr_ty(&arg_inner)).is_some() =>
+ if has_iter_method(cx, cx.typeck_results().expr_ty(&arg_inner)).is_some() =>
{
let meth_name = match mutability {
Mutability::Mut => "iter_mut",
if let PatKind::Tuple(ref pat, _) = pat.kind {
if pat.len() == 2 {
let arg_span = arg.span;
- let (new_pat_span, kind, ty, mutbl) = match cx.tables().expr_ty(arg).kind {
+ let (new_pat_span, kind, ty, mutbl) = match cx.typeck_results().expr_ty(arg).kind {
ty::Ref(_, ty, mutbl) => match (&pat[0].kind, &pat[1].kind) {
(key, _) if pat_is_wild(key, body) => (pat[1].span, "value", ty, mutbl),
(_, value) if pat_is_wild(value, body) => (pat[0].span, "key", ty, Mutability::Not),
};
let def_id = body.hir_id.owner.to_def_id();
cx.tcx.infer_ctxt().enter(|infcx| {
- ExprUseVisitor::new(&mut delegate, &infcx, def_id.expect_local(), cx.param_env, cx.tables()).walk_expr(body);
+ ExprUseVisitor::new(
+ &mut delegate,
+ &infcx,
+ def_id.expect_local(),
+ cx.param_env,
+ cx.typeck_results(),
+ )
+ .walk_expr(body);
});
delegate.mutation_span()
}
if index_used_directly {
self.indexed_directly.insert(
seqvar.segments[0].ident.name,
- (Some(extent), self.cx.tables().node_type(seqexpr.hir_id)),
+ (Some(extent), self.cx.typeck_results().node_type(seqexpr.hir_id)),
);
}
return false; // no need to walk further *on the variable*
if index_used_directly {
self.indexed_directly.insert(
seqvar.segments[0].ident.name,
- (None, self.cx.tables().node_type(seqexpr.hir_id)),
+ (None, self.cx.typeck_results().node_type(seqexpr.hir_id)),
);
}
return false; // no need to walk further *on the variable*
ExprKind::Call(ref f, args) => {
self.visit_expr(f);
for expr in args {
- let ty = self.cx.tables().expr_ty_adjusted(expr);
+ let ty = self.cx.typeck_results().expr_ty_adjusted(expr);
self.prefer_mutable = false;
if let ty::Ref(_, _, mutbl) = ty.kind {
if mutbl == Mutability::Mut {
}
},
ExprKind::MethodCall(_, _, args, _) => {
- let def_id = self.cx.tables().type_dependent_def_id(expr.hir_id).unwrap();
+ let def_id = self.cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
for (ty, expr) in self.cx.tcx.fn_sig(def_id).inputs().skip_binder().iter().zip(args) {
self.prefer_mutable = false;
if let ty::Ref(_, _, mutbl) = ty.kind {
fn is_ref_iterable_type(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
// no walk_ptrs_ty: calling iter() on a reference can make sense because it
// will allow further borrows afterwards
- let ty = cx.tables().expr_ty(e);
+ let ty = cx.typeck_results().expr_ty(e);
is_iterable_array(ty, cx) ||
is_type_diagnostic_item(cx, ty, sym!(vec_type)) ||
match_type(cx, ty, &paths::LINKED_LIST) ||
}
fn check_infinite_loop<'tcx>(cx: &LateContext<'tcx>, cond: &'tcx Expr<'_>, expr: &'tcx Expr<'_>) {
- if constant(cx, cx.tables(), cond).is_some() {
+ if constant(cx, cx.typeck_results(), cond).is_some() {
// A pure constant condition (e.g., `while false`) is not linted.
return;
}
if let Some(ref generic_args) = chain_method.args;
if let Some(GenericArg::Type(ref ty)) = generic_args.args.get(0);
then {
- let ty = cx.tables().node_type(ty.hir_id);
+ let ty = cx.typeck_results().node_type(ty.hir_id);
if is_type_diagnostic_item(cx, ty, sym!(vec_type)) ||
is_type_diagnostic_item(cx, ty, sym!(vecdeque_type)) ||
match_type(cx, ty, &paths::BTREEMAP) ||
if let hir::ExprKind::MethodCall(ref method, _, ref args, _) = e.kind;
if args.len() == 2;
if method.ident.as_str() == "map";
- let ty = cx.tables().expr_ty(&args[0]);
+ let ty = cx.typeck_results().expr_ty(&args[0]);
if is_type_diagnostic_item(cx, ty, sym!(option_type)) || match_trait_method(cx, e, &paths::ITERATOR);
if let hir::ExprKind::Closure(_, _, body_id, _, _) = args[1].kind;
let closure_body = cx.tcx.hir().body(body_id);
match closure_expr.kind {
hir::ExprKind::Unary(hir::UnOp::UnDeref, ref inner) => {
if ident_eq(name, inner) {
- if let ty::Ref(.., Mutability::Not) = cx.tables().expr_ty(inner).kind {
+ if let ty::Ref(.., Mutability::Not) = cx.typeck_results().expr_ty(inner).kind {
lint(cx, e.span, args[0].span, true);
}
}
if ident_eq(name, &obj[0]) && method.ident.as_str() == "clone"
&& match_trait_method(cx, closure_expr, &paths::CLONE_TRAIT) {
- let obj_ty = cx.tables().expr_ty(&obj[0]);
+ let obj_ty = cx.typeck_results().expr_ty(&obj[0]);
if let ty::Ref(_, ty, _) = obj_ty.kind {
let copy = is_copy(cx, ty);
lint(cx, e.span, args[0].span, copy);
if_chain! {
if let ExprKind::MethodCall(ref method, _, ref args, _) = expr.kind;
if args.len() == 2 && method.ident.as_str() == "map";
- let caller_ty = cx.tables().expr_ty(&args[0]);
+ let caller_ty = cx.typeck_results().expr_ty(&args[0]);
if match_trait_method(cx, expr, &paths::ITERATOR)
|| is_type_diagnostic_item(cx, caller_ty, sym!(result_type))
|| is_type_diagnostic_item(cx, caller_ty, sym!(option_type));
/// Returns true iff an expression returns the same thing as a parameter's pattern
fn match_expr_param(cx: &LateContext<'_>, expr: &Expr<'_>, pat: &Pat<'_>) -> bool {
if let PatKind::Binding(_, _, ident, _) = pat.kind {
- match_var(expr, ident.name) && !(cx.tables().hir_owner == expr.hir_id.owner && is_adjusted(cx, expr))
+ match_var(expr, ident.name) && !(cx.typeck_results().hir_owner == expr.hir_id.owner && is_adjusted(cx, expr))
} else {
false
}
}
fn is_unit_function(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if let ty::FnDef(id, _) = ty.kind {
if let Some(fn_type) = cx.tcx.fn_sig(id).no_bound_vars() {
}
fn is_unit_expression(cx: &LateContext<'_>, expr: &hir::Expr<'_>) -> bool {
- is_unit_type(cx.tables().expr_ty(expr))
+ is_unit_type(cx.typeck_results().expr_ty(expr))
}
/// The expression inside a closure may or may not have surrounding braces and
fn lint_map_unit_fn(cx: &LateContext<'_>, stmt: &hir::Stmt<'_>, expr: &hir::Expr<'_>, map_args: &[hir::Expr<'_>]) {
let var_arg = &map_args[0];
- let (map_type, variant, lint) = if is_type_diagnostic_item(cx, cx.tables().expr_ty(var_arg), sym!(option_type)) {
- ("Option", "Some", OPTION_MAP_UNIT_FN)
- } else if is_type_diagnostic_item(cx, cx.tables().expr_ty(var_arg), sym!(result_type)) {
- ("Result", "Ok", RESULT_MAP_UNIT_FN)
- } else {
- return;
- };
+ let (map_type, variant, lint) =
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(var_arg), sym!(option_type)) {
+ ("Option", "Some", OPTION_MAP_UNIT_FN)
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(var_arg), sym!(result_type)) {
+ ("Result", "Ok", RESULT_MAP_UNIT_FN)
+ } else {
+ return;
+ };
let fn_arg = &map_args[1];
if is_unit_function(cx, fn_arg) {
}
fn is_vector(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
let ty = walk_ptrs_ty(ty);
is_type_diagnostic_item(cx, ty, sym!(vec_type))
}
fn is_full_range(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
let ty = walk_ptrs_ty(ty);
match_type(cx, ty, &utils::paths::RANGE_FULL)
}
return;
};
- let ty = cx.tables().expr_ty(ex);
+ let ty = cx.typeck_results().expr_ty(ex);
if ty.kind != ty::Bool || is_allowed(cx, MATCH_BOOL, ex.hir_id) {
check_single_match_single_pattern(cx, ex, arms, expr, els);
check_single_match_opt_like(cx, ex, arms, expr, ty, els);
fn check_match_bool(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>) {
// Type of expression is `bool`.
- if cx.tables().expr_ty(ex).kind == ty::Bool {
+ if cx.typeck_results().expr_ty(ex).kind == ty::Bool {
span_lint_and_then(
cx,
MATCH_BOOL,
}
fn check_overlapping_arms<'tcx>(cx: &LateContext<'tcx>, ex: &'tcx Expr<'_>, arms: &'tcx [Arm<'_>]) {
- if arms.len() >= 2 && cx.tables().expr_ty(ex).is_integral() {
- let ranges = all_ranges(cx, arms, cx.tables().expr_ty(ex));
+ if arms.len() >= 2 && cx.typeck_results().expr_ty(ex).is_integral() {
+ let ranges = all_ranges(cx, arms, cx.typeck_results().expr_ty(ex));
let type_ranges = type_ranges(&ranges);
if !type_ranges.is_empty() {
if let Some((start, end)) = overlapping(&type_ranges) {
}
fn check_wild_err_arm(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) {
- let ex_ty = walk_ptrs_ty(cx.tables().expr_ty(ex));
+ let ex_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(ex));
if is_type_diagnostic_item(cx, ex_ty, sym!(result_type)) {
for arm in arms {
if let PatKind::TupleStruct(ref path, ref inner, _) = arm.pat.kind {
}
fn check_wild_enum_match(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>]) {
- let ty = cx.tables().expr_ty(ex);
+ let ty = cx.typeck_results().expr_ty(ex);
if !ty.is_enum() {
// If there isn't a nice closed set of possible values that can be conveniently enumerated,
// don't complain about not enumerating the mall.
"as_mut"
};
- let output_ty = cx.tables().expr_ty(expr);
- let input_ty = cx.tables().expr_ty(ex);
+ let output_ty = cx.typeck_results().expr_ty(expr);
+ let input_ty = cx.typeck_results().expr_ty(ex);
let cast = if_chain! {
if let ty::Adt(_, substs) = input_ty.kind;
fn find_matches_sugg(cx: &LateContext<'_>, ex: &Expr<'_>, arms: &[Arm<'_>], expr: &Expr<'_>, desugared: bool) {
if_chain! {
if arms.len() == 2;
- if cx.tables().expr_ty(expr).is_bool();
+ if cx.typeck_results().expr_ty(expr).is_bool();
if is_wild(&arms[1].pat);
if let Some(first) = find_bool_lit(&arms[0].body.kind, desugared);
if let Some(second) = find_bool_lit(&arms[1].body.kind, desugared);
match match_body.kind {
ExprKind::Block(block, _) => {
// macro + expr_ty(body) == ()
- if block.span.from_expansion() && cx.tables().expr_ty(&match_body).is_unit() {
+ if block.span.from_expansion() && cx.typeck_results().expr_ty(&match_body).is_unit() {
snippet_body.push(';');
}
},
_ => {
// expr_ty(body) == ()
- if cx.tables().expr_ty(&match_body).is_unit() {
+ if cx.typeck_results().expr_ty(&match_body).is_unit() {
snippet_body.push(';');
}
},
{
if let PatKind::Range(ref lhs, ref rhs, range_end) = pat.kind {
let lhs = match lhs {
- Some(lhs) => constant(cx, cx.tables(), lhs)?.0,
+ Some(lhs) => constant(cx, cx.typeck_results(), lhs)?.0,
None => miri_to_const(ty.numeric_min_val(cx.tcx)?)?,
};
let rhs = match rhs {
- Some(rhs) => constant(cx, cx.tables(), rhs)?.0,
+ Some(rhs) => constant(cx, cx.typeck_results(), rhs)?.0,
None => miri_to_const(ty.numeric_max_val(cx.tcx)?)?,
};
let rhs = match range_end {
}
if let PatKind::Lit(ref value) = pat.kind {
- let value = constant(cx, cx.tables(), value)?.0;
+ let value = constant(cx, cx.typeck_results(), value)?.0;
return Some(SpannedRange {
span: pat.span,
node: (value.clone(), Bound::Included(value)),
if let Some(def_id) = cx.qpath_res(func_qpath, func.hir_id).opt_def_id();
if match_def_path(cx, def_id, &paths::MEM_DISCRIMINANT);
// type is non-enum
- let ty_param = cx.tables().node_substs(func.hir_id).type_at(0);
+ let ty_param = cx.typeck_results().node_substs(func.hir_id).type_at(0);
if !ty_param.is_enum();
then {
if let ExprKind::Path(ref qpath) = path_expr.kind {
if let Some(def_id) = qpath_res(cx, qpath, path_expr.hir_id).opt_def_id() {
if match_def_path(cx, def_id, &paths::MEM_FORGET) {
- let forgot_ty = cx.tables().expr_ty(&args[0]);
+ let forgot_ty = cx.typeck_results().expr_ty(&args[0]);
if forgot_ty.ty_adt_def().map_or(false, |def| def.has_dtor(cx.tcx)) {
span_lint(cx, MEM_FORGET, e.span, "usage of `mem::forget` on `Drop` type");
fn check_replace_with_uninit(cx: &LateContext<'_>, src: &Expr<'_>, dest: &Expr<'_>, expr_span: Span) {
if_chain! {
// check if replacement is mem::MaybeUninit::uninit().assume_init()
- if let Some(method_def_id) = cx.tables().type_dependent_def_id(src.hir_id);
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(src.hir_id);
if cx.tcx.is_diagnostic_item(sym::assume_init, method_def_id);
then {
let mut applicability = Applicability::MachineApplicable;
applicability,
);
} else if cx.tcx.is_diagnostic_item(sym::mem_zeroed, repl_def_id) &&
- !cx.tables().expr_ty(src).is_primitive() {
+ !cx.typeck_results().expr_ty(src).is_primitive() {
span_lint_and_help(
cx,
MEM_REPLACE_WITH_UNINIT,
/// Lint use of `_.and_then(|x| Some(y))` for `Option`s
fn lint(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
- if !match_type(cx, cx.tables().expr_ty(&args[0]), Self::TYPE_QPATH) {
+ if !match_type(cx, cx.typeck_results().expr_ty(&args[0]), Self::TYPE_QPATH) {
return;
}
/// Checks for the `INEFFICIENT_TO_STRING` lint
pub fn lint<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, arg: &hir::Expr<'_>, arg_ty: Ty<'tcx>) {
if_chain! {
- if let Some(to_string_meth_did) = cx.tables().type_dependent_def_id(expr.hir_id);
+ if let Some(to_string_meth_did) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if match_def_path(cx, to_string_meth_did, &paths::TO_STRING_METHOD);
- if let Some(substs) = cx.tables().node_substs_opt(expr.hir_id);
+ if let Some(substs) = cx.typeck_results().node_substs_opt(expr.hir_id);
let self_ty = substs.type_at(0);
let (deref_self_ty, deref_count) = walk_ptrs_ty_depth(self_ty);
if deref_count >= 1;
let arith_lhs = &args[1][0];
let arith_rhs = &args[1][1];
- let ty = cx.tables().expr_ty(arith_lhs);
+ let ty = cx.typeck_results().expr_ty(arith_lhs);
if !ty.is_integral() {
return;
}
}
}
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
let ty_str = ty.to_string();
// `std::T::MAX` `std::T::MIN` constants
lint_or_fun_call(cx, expr, *method_span, &method_call.ident.as_str(), args);
lint_expect_fun_call(cx, expr, *method_span, &method_call.ident.as_str(), args);
- let self_ty = cx.tables().expr_ty_adjusted(&args[0]);
+ let self_ty = cx.typeck_results().expr_ty_adjusted(&args[0]);
if args.len() == 1 && method_call.ident.name == sym!(clone) {
lint_clone_on_copy(cx, expr, &args[0], self_ty);
lint_clone_on_ref_ptr(cx, expr, &args[0]);
if let hir::ExprKind::Path(ref qpath) = fun.kind;
let path = &*last_path_segment(qpath).ident.as_str();
if ["default", "new"].contains(&path);
- let arg_ty = cx.tables().expr_ty(arg);
+ let arg_ty = cx.typeck_results().expr_ty(arg);
if let Some(default_trait_id) = get_trait_def_id(cx, &paths::DEFAULT_TRAIT);
if implements_trait(cx, arg_ty, default_trait_id, &[]);
) {
if let hir::ExprKind::MethodCall(ref path, _, ref args, _) = &arg.kind {
if path.ident.as_str() == "len" {
- let ty = walk_ptrs_ty(cx.tables().expr_ty(&args[0]));
+ let ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0]));
match ty.kind {
ty::Slice(_) | ty::Array(_, _) => return,
if { finder.visit_expr(&arg); finder.found };
if !contains_return(&arg);
- let self_ty = cx.tables().expr_ty(self_expr);
+ let self_ty = cx.typeck_results().expr_ty(self_expr);
if let Some(&(_, fn_has_arguments, poss, suffix)) =
know_types.iter().find(|&&i| match_type(cx, self_ty, i.0));
if call_args.len() == 1
&& (method_name.ident.name == sym!(as_str) || method_name.ident.name == sym!(as_ref))
&& {
- let arg_type = cx.tables().expr_ty(&call_args[0]);
+ let arg_type = cx.typeck_results().expr_ty(&call_args[0]);
let base_type = walk_ptrs_ty(arg_type);
base_type.kind == ty::Str || is_type_diagnostic_item(cx, base_type, sym!(string_type))
}
// Only `&'static str` or `String` can be used directly in the `panic!`. Other types should be
// converted to string.
fn requires_to_string(cx: &LateContext<'_>, arg: &hir::Expr<'_>) -> bool {
- let arg_ty = cx.tables().expr_ty(arg);
+ let arg_ty = cx.typeck_results().expr_ty(arg);
if is_type_diagnostic_item(cx, arg_ty, sym!(string_type)) {
return false;
}
false
}
},
- hir::ExprKind::MethodCall(..) => cx
- .tables()
- .type_dependent_def_id(arg.hir_id)
- .map_or(false, |method_id| {
- matches!(
- cx.tcx.fn_sig(method_id).output().skip_binder().kind,
- ty::Ref(ty::ReStatic, ..)
- )
- }),
+ hir::ExprKind::MethodCall(..) => {
+ cx.typeck_results()
+ .type_dependent_def_id(arg.hir_id)
+ .map_or(false, |method_id| {
+ matches!(
+ cx.tcx.fn_sig(method_id).output().skip_binder().kind,
+ ty::Ref(ty::ReStatic, ..)
+ )
+ })
+ },
hir::ExprKind::Path(ref p) => matches!(
cx.qpath_res(p, arg.hir_id),
hir::def::Res::Def(hir::def::DefKind::Const | hir::def::DefKind::Static, _)
return;
}
- let receiver_type = cx.tables().expr_ty_adjusted(&args[0]);
+ let receiver_type = cx.typeck_results().expr_ty_adjusted(&args[0]);
let closure_args = if is_type_diagnostic_item(cx, receiver_type, sym!(option_type)) {
"||"
} else if is_type_diagnostic_item(cx, receiver_type, sym!(result_type)) {
/// Checks for the `CLONE_ON_COPY` lint.
fn lint_clone_on_copy(cx: &LateContext<'_>, expr: &hir::Expr<'_>, arg: &hir::Expr<'_>, arg_ty: Ty<'_>) {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if let ty::Ref(_, inner, _) = arg_ty.kind {
if let ty::Ref(_, innermost, _) = inner.kind {
span_lint_and_then(
// &*x is a nop, &x.clone() is not
hir::ExprKind::AddrOf(..) => return,
// (*x).func() is useless, x.clone().func() can work in case func borrows mutably
- hir::ExprKind::MethodCall(_, _, parent_args, _) if expr.hir_id == parent_args[0].hir_id => return,
+ hir::ExprKind::MethodCall(_, _, parent_args, _) if expr.hir_id == parent_args[0].hir_id => {
+ return;
+ },
_ => {},
},
}
// x.clone() might have dereferenced x, possibly through Deref impls
- if cx.tables().expr_ty(arg) == ty {
+ if cx.typeck_results().expr_ty(arg) == ty {
snip = Some(("try removing the `clone` call", format!("{}", snippet)));
} else {
let deref_count = cx
- .tables()
+ .typeck_results()
.expr_adjustments(arg)
.iter()
.filter(|adj| matches!(adj.kind, ty::adjustment::Adjust::Deref(_)))
}
fn lint_clone_on_ref_ptr(cx: &LateContext<'_>, expr: &hir::Expr<'_>, arg: &hir::Expr<'_>) {
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(arg));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(arg));
if let ty::Adt(_, subst) = obj_ty.kind {
let caller_type = if is_type_diagnostic_item(cx, obj_ty, sym::Rc) {
let arg = &args[1];
if let Some(arglists) = method_chain_args(arg, &["chars"]) {
let target = &arglists[0][0];
- let self_ty = walk_ptrs_ty(cx.tables().expr_ty(target));
+ let self_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(target));
let ref_str = if self_ty.kind == ty::Str {
""
} else if is_type_diagnostic_item(cx, self_ty, sym!(string_type)) {
}
fn lint_extend(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(&args[0]));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0]));
if is_type_diagnostic_item(cx, obj_ty, sym!(string_type)) {
lint_string_extend(cx, expr, args);
}
fn lint_cstring_as_ptr(cx: &LateContext<'_>, expr: &hir::Expr<'_>, source: &hir::Expr<'_>, unwrap: &hir::Expr<'_>) {
if_chain! {
- let source_type = cx.tables().expr_ty(source);
+ let source_type = cx.typeck_results().expr_ty(source);
if let ty::Adt(def, substs) = source_type.kind;
if cx.tcx.is_diagnostic_item(sym!(result_type), def.did);
if match_type(cx, substs.type_at(0), &paths::CSTRING);
fn lint_iter_cloned_collect<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, iter_args: &'tcx [hir::Expr<'_>]) {
if_chain! {
- if is_type_diagnostic_item(cx, cx.tables().expr_ty(expr), sym!(vec_type));
- if let Some(slice) = derefs_to_slice(cx, &iter_args[0], cx.tables().expr_ty(&iter_args[0]));
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(expr), sym!(vec_type));
+ if let Some(slice) = derefs_to_slice(cx, &iter_args[0], cx.typeck_results().expr_ty(&iter_args[0]));
if let Some(to_replace) = expr.span.trim_start(slice.span.source_callsite());
then {
fn lint_step_by<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, args: &'tcx [hir::Expr<'_>]) {
if match_trait_method(cx, expr, &paths::ITERATOR) {
- if let Some((Constant::Int(0), _)) = constant(cx, cx.tables(), &args[1]) {
+ if let Some((Constant::Int(0), _)) = constant(cx, cx.typeck_results(), &args[1]) {
span_lint(
cx,
ITERATOR_STEP_BY_ZERO,
parent_expr_opt = get_parent_expr(cx, parent_expr);
}
- if derefs_to_slice(cx, caller_expr, cx.tables().expr_ty(caller_expr)).is_some() {
+ if derefs_to_slice(cx, caller_expr, cx.typeck_results().expr_ty(caller_expr)).is_some() {
// caller is a Slice
if_chain! {
if let hir::ExprKind::Index(ref caller_var, ref index_expr) = &caller_expr.kind;
);
}
}
- } else if is_type_diagnostic_item(cx, cx.tables().expr_ty(caller_expr), sym!(vec_type))
- || matches!(&walk_ptrs_ty(cx.tables().expr_ty(caller_expr)).kind, ty::Array(_, _))
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(caller_expr), sym!(vec_type))
+ || matches!(
+ &walk_ptrs_ty(cx.typeck_results().expr_ty(caller_expr)).kind,
+ ty::Array(_, _)
+ )
{
// caller is a Vec or an Array
let mut applicability = Applicability::MachineApplicable;
) {
let iter_args = nth_and_iter_args[1];
let mut_str = if is_mut { "_mut" } else { "" };
- let caller_type = if derefs_to_slice(cx, &iter_args[0], cx.tables().expr_ty(&iter_args[0])).is_some() {
+ let caller_type = if derefs_to_slice(cx, &iter_args[0], cx.typeck_results().expr_ty(&iter_args[0])).is_some() {
"slice"
- } else if is_type_diagnostic_item(cx, cx.tables().expr_ty(&iter_args[0]), sym!(vec_type)) {
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&iter_args[0]), sym!(vec_type)) {
"Vec"
- } else if is_type_diagnostic_item(cx, cx.tables().expr_ty(&iter_args[0]), sym!(vecdeque_type)) {
+ } else if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&iter_args[0]), sym!(vecdeque_type)) {
"VecDeque"
} else {
let nth_args = nth_and_iter_args[0];
fn lint_iter_nth_zero<'tcx>(cx: &LateContext<'tcx>, expr: &hir::Expr<'_>, nth_args: &'tcx [hir::Expr<'_>]) {
if_chain! {
if match_trait_method(cx, expr, &paths::ITERATOR);
- if let Some((Constant::Int(0), _)) = constant(cx, cx.tables(), &nth_args[1]);
+ if let Some((Constant::Int(0), _)) = constant(cx, cx.typeck_results(), &nth_args[1]);
then {
let mut applicability = Applicability::MachineApplicable;
span_lint_and_sugg(
// Note: we don't want to lint `get_mut().unwrap` for `HashMap` or `BTreeMap`,
// because they do not implement `IndexMut`
let mut applicability = Applicability::MachineApplicable;
- let expr_ty = cx.tables().expr_ty(&get_args[0]);
+ let expr_ty = cx.typeck_results().expr_ty(&get_args[0]);
let get_args_str = if get_args.len() > 1 {
snippet_with_applicability(cx, get_args[1].span, "_", &mut applicability)
} else {
}
if let hir::ExprKind::MethodCall(ref path, _, ref args, _) = expr.kind {
- if path.ident.name == sym!(iter) && may_slice(cx, cx.tables().expr_ty(&args[0])) {
+ if path.ident.name == sym!(iter) && may_slice(cx, cx.typeck_results().expr_ty(&args[0])) {
Some(&args[0])
} else {
None
/// lint use of `unwrap()` for `Option`s and `Result`s
fn lint_unwrap(cx: &LateContext<'_>, expr: &hir::Expr<'_>, unwrap_args: &[hir::Expr<'_>]) {
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(&unwrap_args[0]));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&unwrap_args[0]));
let mess = if is_type_diagnostic_item(cx, obj_ty, sym!(option_type)) {
Some((UNWRAP_USED, "an Option", "None"))
/// lint use of `expect()` for `Option`s and `Result`s
fn lint_expect(cx: &LateContext<'_>, expr: &hir::Expr<'_>, expect_args: &[hir::Expr<'_>]) {
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(&expect_args[0]));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&expect_args[0]));
let mess = if is_type_diagnostic_item(cx, obj_ty, sym!(option_type)) {
Some((EXPECT_USED, "an Option", "None"))
fn lint_ok_expect(cx: &LateContext<'_>, expr: &hir::Expr<'_>, ok_args: &[hir::Expr<'_>]) {
if_chain! {
// lint if the caller of `ok()` is a `Result`
- if is_type_diagnostic_item(cx, cx.tables().expr_ty(&ok_args[0]), sym!(result_type));
- let result_type = cx.tables().expr_ty(&ok_args[0]);
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&ok_args[0]), sym!(result_type));
+ let result_type = cx.typeck_results().expr_ty(&ok_args[0]);
if let Some(error_type) = get_error_type(cx, result_type);
if has_debug_impl(error_type, cx);
}
// lint if caller of `.map().flatten()` is an Option
- if is_type_diagnostic_item(cx, cx.tables().expr_ty(&map_args[0]), sym!(option_type)) {
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&map_args[0]), sym!(option_type)) {
let msg = "called `map(..).flatten()` on an `Option`. \
This is more succinctly expressed by calling `.and_then(..)`";
let self_snippet = snippet(cx, map_args[0].span, "..");
unwrap_args: &'tcx [hir::Expr<'_>],
) {
// lint if the caller of `map()` is an `Option`
- let is_option = is_type_diagnostic_item(cx, cx.tables().expr_ty(&map_args[0]), sym!(option_type));
- let is_result = is_type_diagnostic_item(cx, cx.tables().expr_ty(&map_args[0]), sym!(result_type));
+ let is_option = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&map_args[0]), sym!(option_type));
+ let is_result = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&map_args[0]), sym!(result_type));
if is_option || is_result {
// Don't make a suggestion that may fail to compile due to mutably borrowing
/// lint use of `_.map_or(None, _)` for `Option`s and `Result`s
fn lint_map_or_none<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx hir::Expr<'_>, map_or_args: &'tcx [hir::Expr<'_>]) {
- let is_option = is_type_diagnostic_item(cx, cx.tables().expr_ty(&map_or_args[0]), sym!(option_type));
- let is_result = is_type_diagnostic_item(cx, cx.tables().expr_ty(&map_or_args[0]), sym!(result_type));
+ let is_option = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&map_or_args[0]), sym!(option_type));
+ let is_result = is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&map_or_args[0]), sym!(result_type));
// There are two variants of this `map_or` lint:
// (1) using `map_or` as an adapter from `Result<T,E>` to `Option<T>`
if segment.ident.name == sym!(Some);
then {
let mut applicability = Applicability::MachineApplicable;
- let self_ty = walk_ptrs_ty(cx.tables().expr_ty_adjusted(&args[0][0]));
+ let self_ty = walk_ptrs_ty(cx.typeck_results().expr_ty_adjusted(&args[0][0]));
if self_ty.kind != ty::Str {
return false;
if match_trait_method(cx, expr, &paths::ASREF_TRAIT) || match_trait_method(cx, expr, &paths::ASMUT_TRAIT) {
// check if the type after `as_ref` or `as_mut` is the same as before
let recvr = &as_ref_args[0];
- let rcv_ty = cx.tables().expr_ty(recvr);
- let res_ty = cx.tables().expr_ty(expr);
+ let rcv_ty = cx.typeck_results().expr_ty(recvr);
+ let res_ty = cx.typeck_results().expr_ty(expr);
let (base_res_ty, res_depth) = walk_ptrs_ty_depth(res_ty);
let (base_rcv_ty, rcv_depth) = walk_ptrs_ty_depth(rcv_ty);
if base_rcv_ty == base_res_ty && rcv_depth >= res_depth {
if args.is_empty();
if let hir::ExprKind::Path(ref path) = callee.kind;
if match_qpath(path, &paths::MEM_MAYBEUNINIT_UNINIT);
- if !is_maybe_uninit_ty_valid(cx, cx.tables().expr_ty_adjusted(outer));
+ if !is_maybe_uninit_ty_valid(cx, cx.typeck_results().expr_ty_adjusted(outer));
then {
span_lint(
cx,
) {
let same_mutability = |m| (is_mut && m == &hir::Mutability::Mut) || (!is_mut && m == &hir::Mutability::Not);
- let option_ty = cx.tables().expr_ty(&as_ref_args[0]);
+ let option_ty = cx.typeck_results().expr_ty(&as_ref_args[0]);
if !is_type_diagnostic_item(cx, option_ty, sym!(option_type)) {
return;
}
if let hir::ExprKind::Path(qpath) = &args[0].kind;
if let hir::def::Res::Local(local_id) = cx.qpath_res(qpath, args[0].hir_id);
if closure_body.params[0].pat.hir_id == local_id;
- let adj = cx.tables().expr_adjustments(&args[0]).iter().map(|x| &x.kind).collect::<Box<[_]>>();
+ let adj = cx.typeck_results().expr_adjustments(&args[0]).iter().map(|x| &x.kind).collect::<Box<[_]>>();
if let [ty::adjustment::Adjust::Deref(None), ty::adjustment::Adjust::Borrow(_)] = *adj;
then {
- let method_did = cx.tables().type_dependent_def_id(closure_expr.hir_id).unwrap();
+ let method_did = cx.typeck_results().type_dependent_def_id(closure_expr.hir_id).unwrap();
deref_aliases.iter().any(|path| match_def_path(cx, method_did, path))
} else {
false
fn check_pointer_offset(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
if_chain! {
if args.len() == 2;
- if let ty::RawPtr(ty::TypeAndMut { ref ty, .. }) = cx.tables().expr_ty(&args[0]).kind;
+ if let ty::RawPtr(ty::TypeAndMut { ref ty, .. }) = cx.typeck_results().expr_ty(&args[0]).kind;
if let Ok(layout) = cx.tcx.layout_of(cx.param_env.and(ty));
if layout.is_zst();
then {
}
fn lint_filetype_is_file(cx: &LateContext<'_>, expr: &hir::Expr<'_>, args: &[hir::Expr<'_>]) {
- let ty = cx.tables().expr_ty(&args[0]);
+ let ty = cx.typeck_results().expr_ty(&args[0]);
if !match_type(cx, ty, &paths::FILE_TYPE) {
return;
map_span: Span,
) {
// lint if the caller of `map()` is an `Option`
- if is_type_diagnostic_item(cx, cx.tables().expr_ty(&map_args[0]), sym!(option_type)) {
- if !is_copy(cx, cx.tables().expr_ty(&unwrap_args[1])) {
+ if is_type_diagnostic_item(cx, cx.typeck_results().expr_ty(&map_args[0]), sym!(option_type)) {
+ if !is_copy(cx, cx.typeck_results().expr_ty(&unwrap_args[1])) {
// Do not lint if the `map` argument uses identifiers in the `map`
// argument that are also used in the `unwrap_or` argument
}
match (
outer_max,
- Constant::partial_cmp(cx.tcx, cx.tables().expr_ty(ie), &outer_c, &inner_c),
+ Constant::partial_cmp(cx.tcx, cx.typeck_results().expr_ty(ie), &outer_c, &inner_c),
) {
(_, None) | (MinMax::Max, Some(Ordering::Less)) | (MinMax::Min, Some(Ordering::Greater)) => (),
_ => {
fn min_max<'a>(cx: &LateContext<'_>, expr: &'a Expr<'a>) -> Option<(MinMax, Constant, &'a Expr<'a>)> {
if let ExprKind::Call(ref path, ref args) = expr.kind {
if let ExprKind::Path(ref qpath) = path.kind {
- cx.tables()
+ cx.typeck_results()
.qpath_res(qpath, path.hir_id)
.opt_def_id()
.and_then(|def_id| {
if args.len() != 2 {
return None;
}
- constant_simple(cx, cx.tables(), &args[0]).map_or_else(
- || constant_simple(cx, cx.tables(), &args[1]).map(|c| (m, c, &args[0])),
+ constant_simple(cx, cx.typeck_results(), &args[0]).map_or_else(
+ || constant_simple(cx, cx.typeck_results(), &args[1]).map(|c| (m, c, &args[0])),
|c| {
- if constant_simple(cx, cx.tables(), &args[1]).is_none() {
+ if constant_simple(cx, cx.typeck_results(), &args[1]).is_none() {
// otherwise ignore
Some((m, c, &args[1]))
} else {
fn check_nan(cx: &LateContext<'_>, expr: &Expr<'_>, cmp_expr: &Expr<'_>) {
if_chain! {
if !in_constant(cx, cmp_expr.hir_id);
- if let Some((value, _)) = constant(cx, cx.tables(), expr);
+ if let Some((value, _)) = constant(cx, cx.typeck_results(), expr);
then {
let needs_lint = match value {
Constant::F32(num) => num.is_nan(),
}
fn is_named_constant<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
- if let Some((_, res)) = constant(cx, cx.tables(), expr) {
+ if let Some((_, res)) = constant(cx, cx.typeck_results(), expr) {
res
} else {
false
}
fn is_allowed<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> bool {
- match constant(cx, cx.tables(), expr) {
+ match constant(cx, cx.typeck_results(), expr) {
Some((Constant::F32(f), _)) => f == 0.0 || f.is_infinite(),
Some((Constant::F64(f), _)) => f == 0.0 || f.is_infinite(),
Some((Constant::Vec(vec), _)) => vec.iter().all(|f| match f {
}
fn is_float(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- let value = &walk_ptrs_ty(cx.tables().expr_ty(expr)).kind;
+ let value = &walk_ptrs_ty(cx.typeck_results().expr_ty(expr)).kind;
if let ty::Array(arr_ty, _) = value {
return matches!(arr_ty.kind, ty::Float(_));
}
fn is_array(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- matches!(&walk_ptrs_ty(cx.tables().expr_ty(expr)).kind, ty::Array(_, _))
+ matches!(&walk_ptrs_ty(cx.typeck_results().expr_ty(expr)).kind, ty::Array(_, _))
}
fn check_to_owned(cx: &LateContext<'_>, expr: &Expr<'_>, other: &Expr<'_>, left: bool) {
let (arg_ty, snip) = match expr.kind {
ExprKind::MethodCall(.., ref args, _) if args.len() == 1 => {
if match_trait_method(cx, expr, &paths::TO_STRING) || match_trait_method(cx, expr, &paths::TO_OWNED) {
- (cx.tables().expr_ty(&args[0]), snippet(cx, args[0].span, ".."))
+ (cx.typeck_results().expr_ty(&args[0]), snippet(cx, args[0].span, ".."))
} else {
return;
}
ExprKind::Call(ref path, ref v) if v.len() == 1 => {
if let ExprKind::Path(ref path) = path.kind {
if match_qpath(path, &["String", "from_str"]) || match_qpath(path, &["String", "from"]) {
- (cx.tables().expr_ty(&v[0]), snippet(cx, v[0].span, ".."))
+ (cx.typeck_results().expr_ty(&v[0]), snippet(cx, v[0].span, ".."))
} else {
return;
}
_ => return,
};
- let other_ty = cx.tables().expr_ty(other);
+ let other_ty = cx.typeck_results().expr_ty(other);
let without_deref = symmetric_partial_eq(cx, arg_ty, other_ty).unwrap_or_default();
let with_deref = arg_ty
}
fn analyze_operand(operand: &Expr<'_>, cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<OperandInfo> {
- match constant(cx, cx.tables(), operand) {
- Some((Constant::Int(v), _)) => match cx.tables().expr_ty(expr).kind {
+ match constant(cx, cx.typeck_results(), operand) {
+ Some((Constant::Int(v), _)) => match cx.typeck_results().expr_ty(expr).kind {
ty::Int(ity) => {
let value = sext(cx.tcx, v, ity);
return Some(OperandInfo {
}
fn check_non_const_operands<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>, operand: &Expr<'_>) {
- let operand_type = cx.tables().expr_ty(operand);
+ let operand_type = cx.typeck_results().expr_ty(operand);
if might_have_negative_value(operand_type) {
span_lint_and_then(
cx,
if let hir::PatKind::Wild = local.pat.kind {
return;
}
- check_ty(cx, local.span, cx.tables().pat_ty(&*local.pat));
+ check_ty(cx, local.span, cx.typeck_results().pat_ty(&*local.pat));
}
}
expr.span,
"generally you want to avoid `&mut &mut _` if possible",
);
- } else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.tables().expr_ty(e).kind {
+ } else if let ty::Ref(_, _, hir::Mutability::Mut) = self.cx.typeck_results().expr_ty(e).kind {
span_lint(
self.cx,
MUT_MUT,
check_arguments(
cx,
arguments,
- cx.tables().expr_ty(fn_expr),
+ cx.typeck_results().expr_ty(fn_expr),
&rustc_hir_pretty::to_string(rustc_hir_pretty::NO_ANN, |s| s.print_qpath(path, false)),
);
}
},
ExprKind::MethodCall(ref path, _, ref arguments, _) => {
- let def_id = cx.tables().type_dependent_def_id(e.hir_id).unwrap();
- let substs = cx.tables().node_substs(e.hir_id);
+ let def_id = cx.typeck_results().type_dependent_def_id(e.hir_id).unwrap();
+ let substs = cx.typeck_results().node_substs(e.hir_id);
let method_type = cx.tcx.type_of(def_id).subst(cx.tcx, substs);
check_arguments(cx, arguments, method_type, &path.ident.as_str())
},
return;
},
ExprKind::Path(_) => {
- if let Some(adj) = self.cx.tables().adjustments().get(expr.hir_id) {
+ if let Some(adj) = self.cx.typeck_results().adjustments().get(expr.hir_id) {
if adj
.iter()
.any(|a| matches!(a.target.kind, ty::Ref(_, _, Mutability::Mut)))
impl<'tcx> LateLintPass<'tcx> for Mutex {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if let ty::Adt(_, subst) = ty.kind {
if is_type_diagnostic_item(cx, ty, sym!(mutex_type)) {
let mutex_param = subst.type_at(0);
use self::Expression::{Bool, Other};
if let ExprKind::Binary(op, ref left_side, ref right_side) = e.kind {
- let (l_ty, r_ty) = (cx.tables().expr_ty(left_side), cx.tables().expr_ty(right_side));
+ let (l_ty, r_ty) = (
+ cx.typeck_results().expr_ty(left_side),
+ cx.typeck_results().expr_ty(right_side),
+ );
if l_ty.is_bool() && r_ty.is_bool() {
let mut applicability = Applicability::MachineApplicable;
return;
}
if let ExprKind::AddrOf(BorrowKind::Ref, Mutability::Not, ref inner) = e.kind {
- if let ty::Ref(..) = cx.tables().expr_ty(inner).kind {
- for adj3 in cx.tables().expr_adjustments(e).windows(3) {
+ if let ty::Ref(..) = cx.typeck_results().expr_ty(inner).kind {
+ for adj3 in cx.typeck_results().expr_adjustments(e).windows(3) {
if let [Adjustment {
kind: Adjust::Deref(_), ..
}, Adjustment {
}
if_chain! {
if let PatKind::Binding(BindingAnnotation::Ref, .., name, _) = pat.kind;
- if let ty::Ref(_, tam, mutbl) = cx.tables().pat_ty(pat).kind;
+ if let ty::Ref(_, tam, mutbl) = cx.typeck_results().pat_ty(pat).kind;
if mutbl == Mutability::Not;
if let ty::Ref(_, _, mutbl) = tam.kind;
// only lint immutable refs, because borrowed `&mut T` cannot be moved out
} = {
let mut ctx = MovedVariablesCtxt::default();
cx.tcx.infer_ctxt().enter(|infcx| {
- euv::ExprUseVisitor::new(&mut ctx, &infcx, fn_def_id, cx.param_env, cx.tables()).consume_body(body);
+ euv::ExprUseVisitor::new(&mut ctx, &infcx, fn_def_id, cx.param_env, cx.typeck_results())
+ .consume_body(body);
});
ctx
};
impl<'tcx> LateLintPass<'tcx> for NeedlessUpdate {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if let ExprKind::Struct(_, ref fields, Some(ref base)) = expr.kind {
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if let ty::Adt(def, _) = ty.kind {
if fields.len() == def.non_enum_variant().fields.len() {
span_lint(
then {
- let ty = cx.tables().expr_ty(left);
+ let ty = cx.typeck_results().expr_ty(left);
let implements_ord = {
if let Some(id) = utils::get_trait_def_id(cx, &paths::ORD) {
fn check_mul(cx: &LateContext<'_>, span: Span, lit: &Expr<'_>, exp: &Expr<'_>) {
if_chain! {
if let ExprKind::Lit(ref l) = lit.kind;
- if let Constant::Int(1) = consts::lit_to_constant(&l.node, cx.tables().expr_ty_opt(lit));
- if cx.tables().expr_ty(exp).is_integral();
+ if let Constant::Int(1) = consts::lit_to_constant(&l.node, cx.typeck_results().expr_ty_opt(lit));
+ if cx.typeck_results().expr_ty(exp).is_integral();
then {
span_lint(cx, NEG_MULTIPLY, span, "Negation by multiplying with `-1`");
}
}
match expr.kind {
ExprKind::Lit(..) | ExprKind::Closure(..) => true,
- ExprKind::Path(..) => !has_drop(cx, cx.tables().expr_ty(expr)),
+ ExprKind::Path(..) => !has_drop(cx, cx.typeck_results().expr_ty(expr)),
ExprKind::Index(ref a, ref b) | ExprKind::Binary(_, ref a, ref b) => {
has_no_effect(cx, a) && has_no_effect(cx, b)
},
| ExprKind::AddrOf(_, _, ref inner)
| ExprKind::Box(ref inner) => has_no_effect(cx, inner),
ExprKind::Struct(_, ref fields, ref base) => {
- !has_drop(cx, cx.tables().expr_ty(expr))
+ !has_drop(cx, cx.typeck_results().expr_ty(expr))
&& fields.iter().all(|field| has_no_effect(cx, &field.expr))
&& base.as_ref().map_or(true, |base| has_no_effect(cx, base))
},
let res = qpath_res(cx, qpath, callee.hir_id);
match res {
Res::Def(DefKind::Struct | DefKind::Variant | DefKind::Ctor(..), ..) => {
- !has_drop(cx, cx.tables().expr_ty(expr)) && args.iter().all(|arg| has_no_effect(cx, arg))
+ !has_drop(cx, cx.typeck_results().expr_ty(expr))
+ && args.iter().all(|arg| has_no_effect(cx, arg))
},
_ => false,
}
| ExprKind::AddrOf(_, _, ref inner)
| ExprKind::Box(ref inner) => reduce_expression(cx, inner).or_else(|| Some(vec![inner])),
ExprKind::Struct(_, ref fields, ref base) => {
- if has_drop(cx, cx.tables().expr_ty(expr)) {
+ if has_drop(cx, cx.typeck_results().expr_ty(expr)) {
None
} else {
Some(fields.iter().map(|f| &f.expr).chain(base).map(Deref::deref).collect())
let res = qpath_res(cx, qpath, callee.hir_id);
match res {
Res::Def(DefKind::Struct | DefKind::Variant | DefKind::Ctor(..), ..)
- if !has_drop(cx, cx.tables().expr_ty(expr)) =>
+ if !has_drop(cx, cx.typeck_results().expr_ty(expr)) =>
{
Some(args.iter().collect())
},
}
let ty = if needs_check_adjustment {
- let adjustments = cx.tables().expr_adjustments(dereferenced_expr);
+ let adjustments = cx.typeck_results().expr_adjustments(dereferenced_expr);
if let Some(i) = adjustments
.iter()
.position(|adj| matches!(adj.kind, Adjust::Borrow(_) | Adjust::Deref(_)))
{
if i == 0 {
- cx.tables().expr_ty(dereferenced_expr)
+ cx.typeck_results().expr_ty(dereferenced_expr)
} else {
adjustments[i - 1].target
}
return;
}
} else {
- cx.tables().expr_ty(dereferenced_expr)
+ cx.typeck_results().expr_ty(dereferenced_expr)
};
verify_ty_bound(cx, ty, Source::Expr { expr: expr.span });
use rustc_middle::lint::in_external_macro;
use rustc_session::{declare_tool_lint, impl_lint_pass};
use rustc_span::source_map::Span;
-use rustc_span::symbol::{Ident, SymbolStr};
+use rustc_span::symbol::{Ident, Symbol};
use std::cmp::Ordering;
declare_clippy_lint! {
impl_lint_pass!(NonExpressiveNames => [SIMILAR_NAMES, MANY_SINGLE_CHAR_NAMES, JUST_UNDERSCORES_AND_DIGITS]);
struct ExistingName {
- interned: SymbolStr,
+ interned: Symbol,
span: Span,
len: usize,
exemptions: &'static [&'static str],
let mut split_at = None;
match existing_name.len.cmp(&count) {
Ordering::Greater => {
- if existing_name.len - count != 1 || levenstein_not_1(&interned_name, &existing_name.interned) {
+ if existing_name.len - count != 1 || levenstein_not_1(&interned_name, &existing_name.interned.as_str()) {
continue;
}
},
Ordering::Less => {
- if count - existing_name.len != 1 || levenstein_not_1(&existing_name.interned, &interned_name) {
+ if count - existing_name.len != 1 || levenstein_not_1(&existing_name.interned.as_str(), &interned_name) {
continue;
}
},
Ordering::Equal => {
let mut interned_chars = interned_name.chars();
- let mut existing_chars = existing_name.interned.chars();
+ let interned_str = existing_name.interned.as_str();
+ let mut existing_chars = interned_str.chars();
let first_i = interned_chars.next().expect("we know we have at least one char");
let first_e = existing_chars.next().expect("we know we have at least one char");
let eq_or_numeric = |(a, b): (char, char)| a == b || a.is_numeric() && b.is_numeric();
}
self.0.names.push(ExistingName {
exemptions: get_exemptions(&interned_name).unwrap_or(&[]),
- interned: interned_name,
+ interned: ident.name,
span: ident.span,
len: count,
});
impl<'tcx> LateLintPass<'tcx> for OpenOptions {
fn check_expr(&mut self, cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) {
if let ExprKind::MethodCall(ref path, _, ref arguments, _) = e.kind {
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(&arguments[0]));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&arguments[0]));
if path.ident.name == sym!(open) && match_type(cx, obj_ty, &paths::OPEN_OPTIONS) {
let mut options = Vec::new();
get_open_options(cx, &arguments[0], &mut options);
fn get_open_options(cx: &LateContext<'_>, argument: &Expr<'_>, options: &mut Vec<(OpenOption, Argument)>) {
if let ExprKind::MethodCall(ref path, _, ref arguments, _) = argument.kind {
- let obj_ty = walk_ptrs_ty(cx.tables().expr_ty(&arguments[0]));
+ let obj_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&arguments[0]));
// Only proceed if this is a call on some object of type std::fs::OpenOptions
if match_type(cx, obj_ty, &paths::OPEN_OPTIONS) && arguments.len() >= 2 {
/// Returns true iff the given expression is the result of calling `Result::ok`
fn is_result_ok(cx: &LateContext<'_>, expr: &'_ Expr<'_>) -> bool {
if let ExprKind::MethodCall(ref path, _, &[ref receiver], _) = &expr.kind {
- path.ident.name.to_ident_string() == "ok" && match_type(cx, &cx.tables().expr_ty(&receiver), &paths::RESULT)
+ path.ident.name.to_ident_string() == "ok"
+ && match_type(cx, &cx.typeck_results().expr_ty(&receiver), &paths::RESULT)
} else {
false
}
if let ExprKind::Path(QPath::Resolved(_, ref path2)) = ident2.kind;
if let ExprKind::Path(QPath::Resolved(_, ref path3)) = second.kind;
if eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]);
- if cx.tables().expr_ty(ident1).is_integral();
- if cx.tables().expr_ty(ident2).is_integral();
+ if cx.typeck_results().expr_ty(ident1).is_integral();
+ if cx.typeck_results().expr_ty(ident2).is_integral();
then {
if let BinOpKind::Lt = op.node {
if let BinOpKind::Add = op2.node {
if let ExprKind::Path(QPath::Resolved(_, ref path2)) = ident2.kind;
if let ExprKind::Path(QPath::Resolved(_, ref path3)) = first.kind;
if eq(&path1.segments[0], &path3.segments[0]) || eq(&path2.segments[0], &path3.segments[0]);
- if cx.tables().expr_ty(ident1).is_integral();
- if cx.tables().expr_ty(ident2).is_integral();
+ if cx.typeck_results().expr_ty(ident1).is_integral();
+ if cx.typeck_results().expr_ty(ident2).is_integral();
then {
if let BinOpKind::Gt = op.node {
if let BinOpKind::Add = op2.node {
if let ExprKind::MethodCall(ref path, _, ref args, _) = expr.kind;
if path.ident.name == sym!(push);
if args.len() == 2;
- if match_type(cx, walk_ptrs_ty(cx.tables().expr_ty(&args[0])), &paths::PATH_BUF);
+ if match_type(cx, walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0])), &paths::PATH_BUF);
if let Some(get_index_arg) = args.get(1);
if let ExprKind::Lit(ref lit) = get_index_arg.kind;
if let LitKind::Str(ref path_lit, _) = lit.node;
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
if let StmtKind::Local(ref local) = stmt.kind {
if let Some(init) = &local.init {
- if let Some(init_ty) = cx.tables().node_type_opt(init.hir_id) {
+ if let Some(init_ty) = cx.typeck_results().node_type_opt(init.hir_id) {
let pat = &local.pat;
if in_external_macro(cx.sess(), pat.span) {
return;
if let ExprKind::Match(ref expr, arms, source) = expr.kind {
match source {
MatchSource::Normal | MatchSource::IfLetDesugar { .. } | MatchSource::WhileLetDesugar => {
- if let Some(expr_ty) = cx.tables().node_type_opt(expr.hir_id) {
+ if let Some(expr_ty) = cx.typeck_results().node_type_opt(expr.hir_id) {
'pattern_checks: for arm in arms {
let pat = &arm.pat;
if in_external_macro(cx.sess(), pat.span) {
_: Span,
hir_id: HirId,
) {
- if let Some(fn_sig) = cx.tables().liberated_fn_sigs().get(hir_id) {
+ if let Some(fn_sig) = cx.typeck_results().liberated_fn_sigs().get(hir_id) {
for (param, ty) in body.params.iter().zip(fn_sig.inputs().iter()) {
apply_lint(cx, ¶m.pat, ty, DerefPossible::Impossible);
}
// Is the type of the expression a usize?
fn is_expr_ty_usize<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
- cx.tables().expr_ty(expr) == cx.tcx.types.usize
+ cx.typeck_results().expr_ty(expr) == cx.tcx.types.usize
}
// Is the type of the expression a raw pointer?
fn is_expr_ty_raw_ptr<'tcx>(cx: &LateContext<'tcx>, expr: &Expr<'_>) -> bool {
- cx.tables().expr_ty(expr).is_unsafe_ptr()
+ cx.typeck_results().expr_ty(expr).is_unsafe_ptr()
}
fn build_suggestion<'tcx>(
}
fn moves_by_default(cx: &LateContext<'_>, expression: &Expr<'_>) -> bool {
- let expr_ty = cx.tables().expr_ty(expression);
+ let expr_ty = cx.typeck_results().expr_ty(expression);
!expr_ty.is_copy_modulo_regions(cx.tcx.at(expression.span), cx.param_env)
}
fn is_option(cx: &LateContext<'_>, expression: &Expr<'_>) -> bool {
- let expr_ty = cx.tables().expr_ty(expression);
+ let expr_ty = cx.typeck_results().expr_ty(expression);
is_type_diagnostic_item(cx, expr_ty, sym!(option_type))
}
if_chain! {
if let Some(higher::Range { start: Some(start), end: Some(end), limits }) = higher::range(cx, expr);
- let ty = cx.tables().expr_ty(start);
+ let ty = cx.typeck_results().expr_ty(start);
if let ty::Int(_) | ty::Uint(_) = ty.kind;
- if let Some((start_idx, _)) = constant(cx, cx.tables(), start);
- if let Some((end_idx, _)) = constant(cx, cx.tables(), end);
+ if let Some((start_idx, _)) = constant(cx, cx.typeck_results(), start);
+ if let Some((end_idx, _)) = constant(cx, cx.typeck_results(), end);
if let Some(ordering) = Constant::partial_cmp(cx.tcx, ty, &start_idx, &end_idx);
if is_empty_range(limits, ordering);
then {
}
fn const_str<'tcx>(cx: &LateContext<'tcx>, e: &'tcx Expr<'_>) -> Option<String> {
- constant(cx, cx.tables(), e).and_then(|(c, _)| match c {
+ constant(cx, cx.typeck_results(), e).and_then(|(c, _)| match c {
Constant::Str(s) => Some(s),
_ => None,
})
if_chain! {
if let ExprKind::MethodCall(ref path, _, ref args, _) = expr.kind;
if path.ident.name == sym!(repeat);
- if let Some(Constant::Int(1)) = constant_context(cx, cx.tables()).expr(&args[1]);
+ if let Some(Constant::Int(1)) = constant_context(cx, cx.typeck_results()).expr(&args[1]);
if !in_macro(args[0].span);
then {
- let ty = walk_ptrs_ty(cx.tables().expr_ty(&args[0]));
+ let ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0]));
if ty.is_str() {
span_lint_and_sugg(
cx,
}
fn is_binding(cx: &LateContext<'_>, pat_id: HirId) -> bool {
- let var_ty = cx.tables().node_type_opt(pat_id);
+ let var_ty = cx.typeck_results().node_type_opt(pat_id);
var_ty.map_or(false, |var_ty| !matches!(var_ty.kind, ty::Adt(..)))
}
}
fn is_string(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
- is_type_diagnostic_item(cx, walk_ptrs_ty(cx.tables().expr_ty(e)), sym!(string_type))
+ is_type_diagnostic_item(cx, walk_ptrs_ty(cx.typeck_results().expr_ty(e)), sym!(string_type))
}
fn is_add(cx: &LateContext<'_>, src: &Expr<'_>, target: &Expr<'_>) -> bool {
if let ExprKind::Index(ref lhs1, ref idx1) = lhs1.kind {
if let ExprKind::Index(ref lhs2, ref idx2) = lhs2.kind {
if SpanlessEq::new(cx).ignore_fn().eq_expr(lhs1, lhs2) {
- let ty = walk_ptrs_ty(cx.tables().expr_ty(lhs1));
+ let ty = walk_ptrs_ty(cx.typeck_results().expr_ty(lhs1));
if matches!(ty.kind, ty::Slice(_))
|| matches!(ty.kind, ty::Array(_, _))
if_chain! {
if let [char_arg, radix_arg] = &**to_digit_args;
if to_digits_path.ident.name.as_str() == "to_digit";
- let char_arg_ty = cx.tables().expr_ty_adjusted(char_arg);
+ let char_arg_ty = cx.typeck_results().expr_ty_adjusted(char_arg);
if char_arg_ty.kind == ty::Char;
then {
Some((true, char_arg, radix_arg))
if let Some(def_id) = cx.qpath_res(qpath, path_expr.hir_id).opt_def_id();
if match_def_path(cx, def_id, &paths::TRANSMUTE);
then {
- let from_ty = cx.tables().expr_ty(&args[0]);
- let to_ty = cx.tables().expr_ty(e);
+ let from_ty = cx.typeck_results().expr_ty(&args[0]);
+ let to_ty = cx.typeck_results().expr_ty(e);
match (&from_ty.kind, &to_ty.kind) {
_ if from_ty == to_ty => span_lint(
then {
// Catching transmute over constants that resolve to `null`.
- let mut const_eval_context = constant_context(cx, cx.tables());
+ let mut const_eval_context = constant_context(cx, cx.typeck_results());
if_chain! {
if let ExprKind::Path(ref _qpath) = args[0].kind;
let x = const_eval_context.expr(&args[0]);
if let Some(return_type) = find_err_return_type(cx, &expr.kind);
then {
- let err_type = cx.tables().expr_ty(err_arg);
+ let err_type = cx.typeck_results().expr_ty(err_arg);
let origin_snippet = if err_arg.span.from_expansion() {
snippet_with_macro_callsite(cx, err_arg.span, "_")
} else {
if match_qpath(from_error_fn, &paths::TRY_FROM_ERROR);
if let Some(from_error_arg) = from_error_args.get(0);
then {
- Some(cx.tables().expr_ty(from_error_arg))
+ Some(cx.typeck_results().expr_ty(from_error_arg))
} else {
None
}
use rustc_lint::{LateContext, LateLintPass, LintContext};
use rustc_middle::hir::map::Map;
use rustc_middle::lint::in_external_macro;
-use rustc_middle::ty::{self, InferTy, Ty, TyCtxt, TyS, TypeckTables};
+use rustc_middle::ty::{self, InferTy, Ty, TyCtxt, TyS, TypeckResults};
use rustc_session::{declare_lint_pass, declare_tool_lint, impl_lint_pass};
use rustc_span::hygiene::{ExpnKind, MacroKind};
use rustc_span::source_map::Span;
impl<'tcx> LateLintPass<'tcx> for LetUnitValue {
fn check_stmt(&mut self, cx: &LateContext<'tcx>, stmt: &'tcx Stmt<'_>) {
if let StmtKind::Local(ref local) = stmt.kind {
- if is_unit(cx.tables().pat_ty(&local.pat)) {
+ if is_unit(cx.typeck_results().pat_ty(&local.pat)) {
if in_external_macro(cx.sess(), stmt.span) || local.pat.span.from_expansion() {
return;
}
if let ExpnKind::Macro(MacroKind::Bang, symbol) = callee.kind {
if let ExprKind::Binary(ref cmp, ref left, _) = expr.kind {
let op = cmp.node;
- if op.is_comparison() && is_unit(cx.tables().expr_ty(left)) {
+ if op.is_comparison() && is_unit(cx.typeck_results().expr_ty(left)) {
let result = match &*symbol.as_str() {
"assert_eq" | "debug_assert_eq" => "succeed",
"assert_ne" | "debug_assert_ne" => "fail",
}
if let ExprKind::Binary(ref cmp, ref left, _) = expr.kind {
let op = cmp.node;
- if op.is_comparison() && is_unit(cx.tables().expr_ty(left)) {
+ if op.is_comparison() && is_unit(cx.typeck_results().expr_ty(left)) {
let result = match op {
BinOpKind::Eq | BinOpKind::Le | BinOpKind::Ge => "true",
_ => "false",
let args_to_recover = args
.iter()
.filter(|arg| {
- if is_unit(cx.tables().expr_ty(arg)) && !is_unit_literal(arg) {
+ if is_unit(cx.typeck_results().expr_ty(arg)) && !is_unit_literal(arg) {
!matches!(&arg.kind, ExprKind::Match(.., MatchSource::TryDesugar))
} else {
false
}
// don't lint for positive constants
- let const_val = constant(cx, &cx.tables(), op);
+ let const_val = constant(cx, &cx.typeck_results(), op);
if_chain! {
if let Some((const_val, _)) = const_val;
if let Constant::Int(n) = const_val;
return;
}
if let ExprKind::Cast(ref ex, _) = expr.kind {
- let (cast_from, cast_to) = (cx.tables().expr_ty(ex), cx.tables().expr_ty(expr));
+ let (cast_from, cast_to) = (cx.typeck_results().expr_ty(ex), cx.typeck_results().expr_ty(expr));
lint_fn_to_numeric_cast(cx, expr, ex, cast_from, cast_to);
if let ExprKind::Lit(ref lit) = ex.kind {
if_chain! {
if let ExprKind::Cast(e, _) = &expr.kind;
if let ExprKind::Lit(l) = &e.kind;
if let LitKind::Char(c) = l.node;
- if ty::Uint(UintTy::U8) == cx.tables().expr_ty(expr).kind;
+ if ty::Uint(UintTy::U8) == cx.typeck_results().expr_ty(expr).kind;
then {
let mut applicability = Applicability::MachineApplicable;
let snippet = snippet_with_applicability(cx, e.span, "'x'", &mut applicability);
fn is_cast_between_fixed_and_target<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'tcx>) -> bool {
if let ExprKind::Cast(ref cast_exp, _) = expr.kind {
- let precast_ty = cx.tables().expr_ty(cast_exp);
- let cast_ty = cx.tables().expr_ty(expr);
+ let precast_ty = cx.typeck_results().expr_ty(cast_exp);
+ let cast_ty = cx.typeck_results().expr_ty(expr);
return is_isize_or_usize(precast_ty) != is_isize_or_usize(cast_ty);
}
// absurd comparison only makes sense on primitive types
// primitive types don't implement comparison operators with each other
- if cx.tables().expr_ty(lhs) != cx.tables().expr_ty(rhs) {
+ if cx.typeck_results().expr_ty(lhs) != cx.typeck_results().expr_ty(rhs) {
return None;
}
fn detect_extreme_expr<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<ExtremeExpr<'tcx>> {
use crate::types::ExtremeType::{Maximum, Minimum};
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
- let cv = constant(cx, cx.tables(), expr)?.0;
+ let cv = constant(cx, cx.typeck_results(), expr)?.0;
let which = match (&ty.kind, cv) {
(&ty::Bool, Constant::Bool(false)) | (&ty::Uint(_), Constant::Int(0)) => Minimum,
fn numeric_cast_precast_bounds<'a>(cx: &LateContext<'_>, expr: &'a Expr<'_>) -> Option<(FullInt, FullInt)> {
if let ExprKind::Cast(ref cast_exp, _) = expr.kind {
- let pre_cast_ty = cx.tables().expr_ty(cast_exp);
- let cast_ty = cx.tables().expr_ty(expr);
+ let pre_cast_ty = cx.typeck_results().expr_ty(cast_exp);
+ let cast_ty = cx.typeck_results().expr_ty(expr);
// if it's a cast from i32 to u32 wrapping will invalidate all these checks
if cx.layout_of(pre_cast_ty).ok().map(|l| l.size) == cx.layout_of(cast_ty).ok().map(|l| l.size) {
return None;
}
fn node_as_const_fullint<'tcx>(cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) -> Option<FullInt> {
- let val = constant(cx, cx.tables(), expr)?.0;
+ let val = constant(cx, cx.typeck_results(), expr)?.0;
if let Constant::Int(const_int) = val {
- match cx.tables().expr_ty(expr).kind {
+ match cx.typeck_results().expr_ty(expr).kind {
ty::Int(ity) => Some(FullInt::S(sext(cx.tcx, const_int, ity))),
ty::Uint(_) => Some(FullInt::U(const_int)),
_ => None,
/// Looks for default-hasher-dependent constructors like `HashMap::new`.
struct ImplicitHasherConstructorVisitor<'a, 'b, 'tcx> {
cx: &'a LateContext<'tcx>,
- maybe_typeck_tables: Option<&'tcx TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx TypeckResults<'tcx>>,
target: &'b ImplicitHasherType<'tcx>,
suggestions: BTreeMap<Span, String>,
}
fn new(cx: &'a LateContext<'tcx>, target: &'b ImplicitHasherType<'tcx>) -> Self {
Self {
cx,
- maybe_typeck_tables: cx.maybe_typeck_tables(),
+ maybe_typeck_results: cx.maybe_typeck_results(),
target,
suggestions: BTreeMap::new(),
}
type Map = Map<'tcx>;
fn visit_body(&mut self, body: &'tcx Body<'_>) {
- let old_maybe_typeck_tables = self.maybe_typeck_tables.replace(self.cx.tcx.body_tables(body.id()));
+ let old_maybe_typeck_results = self.maybe_typeck_results.replace(self.cx.tcx.typeck_body(body.id()));
walk_body(self, body);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
}
fn visit_expr(&mut self, e: &'tcx Expr<'_>) {
if let ExprKind::Path(QPath::TypeRelative(ref ty, ref method)) = fun.kind;
if let TyKind::Path(QPath::Resolved(None, ty_path)) = ty.kind;
then {
- if !TyS::same_type(self.target.ty(), self.maybe_typeck_tables.unwrap().expr_ty(e)) {
+ if !TyS::same_type(self.target.ty(), self.maybe_typeck_results.unwrap().expr_ty(e)) {
return;
}
if let TyKind::Ptr(MutTy { mutbl: Mutability::Mut, .. }) = t.kind;
if let ExprKind::Cast(e, t) = &e.kind;
if let TyKind::Ptr(MutTy { mutbl: Mutability::Not, .. }) = t.kind;
- if let ty::Ref(..) = cx.tables().node_type(e.hir_id).kind;
+ if let ty::Ref(..) = cx.typeck_results().node_type(e.hir_id).kind;
then {
span_lint(
cx,
}
fn is_trait_ptr(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- match cx.tables().expr_ty_adjusted(expr).kind {
+ match cx.typeck_results().expr_ty_adjusted(expr).kind {
ty::RawPtr(ty::TypeAndMut { ty, .. }) => ty.is_trait(),
_ => false,
}
}
fn is_fn_def(cx: &LateContext<'_>, expr: &Expr<'_>) -> bool {
- matches!(cx.tables().expr_ty(expr).kind, ty::FnDef(..))
+ matches!(cx.typeck_results().expr_ty(expr).kind, ty::FnDef(..))
}
if_chain! {
if match_def_path(cx, def_id, &paths::PTR_EQ) ||
match_def_path(cx, def_id, &paths::RC_PTR_EQ) ||
match_def_path(cx, def_id, &paths::ARC_PTR_EQ);
- let ty_param = cx.tables().node_substs(func.hir_id).type_at(0);
+ let ty_param = cx.typeck_results().node_substs(func.hir_id).type_at(0);
if ty_param.is_trait();
then {
span_lint_and_help(
if_chain! {
if let ExprKind::Binary(binop, ref left, ref right) = expr.kind;
if is_comparison(binop.node);
- if cx.tables().expr_ty_adjusted(left).is_fn_ptr() &&
- cx.tables().expr_ty_adjusted(right).is_fn_ptr();
+ if cx.typeck_results().expr_ty_adjusted(left).is_fn_ptr() &&
+ cx.typeck_results().expr_ty_adjusted(right).is_fn_ptr();
if is_fn_def(cx, left) || is_fn_def(cx, right);
then {
span_lint(
if let name = name_ident.ident.name.to_ident_string();
if name == "sort_by" || name == "sort_unstable_by";
if let [vec, Expr { kind: ExprKind::Closure(_, _, closure_body_id, _, _), .. }] = args;
- if utils::match_type(cx, &cx.tables().expr_ty(vec), &paths::VEC);
+ if utils::match_type(cx, &cx.typeck_results().expr_ty(vec), &paths::VEC);
if let closure_body = cx.tcx.hir().body(*closure_body_id);
if let &[
Param { pat: Pat { kind: PatKind::Binding(_, _, left_ident, _), .. }, ..},
use rustc_lint::{EarlyContext, EarlyLintPass};
use rustc_session::{declare_lint_pass, declare_tool_lint};
use rustc_span::source_map::Span;
-use rustc_span::symbol::{Ident, SymbolStr};
+use rustc_span::symbol::Ident;
declare_clippy_lint! {
/// **What it does:** Checks for imports that remove "unsafe" from an item's
}
#[must_use]
-fn contains_unsafe(name: &SymbolStr) -> bool {
+fn contains_unsafe(name: &str) -> bool {
name.contains("Unsafe") || name.contains("unsafe")
}
if_chain! {
if let ExprKind::MethodCall(method_name, _, args, _) = &expr.kind;
if let ExprKind::Path(QPath::Resolved(None, path)) = &args[0].kind;
- let ty = cx.tables().expr_ty(&args[0]);
+ let ty = cx.typeck_results().expr_ty(&args[0]);
let name = method_name.ident.as_str();
if is_relevant_option_call(cx, ty, &name) || is_relevant_result_call(cx, ty, &name);
then {
ExprKind::MethodCall(ref name, .., ref args, _) => {
if match_trait_method(cx, e, &paths::INTO) && &*name.ident.as_str() == "into" {
- let a = cx.tables().expr_ty(e);
- let b = cx.tables().expr_ty(&args[0]);
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
if TyS::same_type(a, b) {
let sugg = snippet_with_macro_callsite(cx, args[0].span, "<expr>").to_string();
span_lint_and_sugg(
}
}
if match_trait_method(cx, e, &paths::INTO_ITERATOR) && &*name.ident.as_str() == "into_iter" {
- let a = cx.tables().expr_ty(e);
- let b = cx.tables().expr_ty(&args[0]);
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
if TyS::same_type(a, b) {
let sugg = snippet(cx, args[0].span, "<expr>").into_owned();
span_lint_and_sugg(
}
if match_trait_method(cx, e, &paths::TRY_INTO_TRAIT) && &*name.ident.as_str() == "try_into" {
if_chain! {
- let a = cx.tables().expr_ty(e);
- let b = cx.tables().expr_ty(&args[0]);
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
if is_type_diagnostic_item(cx, a, sym!(result_type));
if let ty::Adt(_, substs) = a.kind;
if let Some(a_type) = substs.types().next();
if args.len() == 1;
if let ExprKind::Path(ref qpath) = path.kind;
if let Some(def_id) = cx.qpath_res(qpath, path.hir_id).opt_def_id();
- let a = cx.tables().expr_ty(e);
- let b = cx.tables().expr_ty(&args[0]);
+ let a = cx.typeck_results().expr_ty(e);
+ let b = cx.typeck_results().expr_ty(&args[0]);
then {
if_chain! {
Some(expr)
}
- let def_path = match cx.tables().expr_ty(expr).kind {
+ let def_path = match cx.typeck_results().expr_ty(expr).kind {
ty::Adt(def, _) => cx.tcx.def_path(def.did),
_ => return None,
};
};
use rustc_lint::LateContext;
use rustc_middle::ich::StableHashingContextProvider;
-use rustc_middle::ty::TypeckTables;
+use rustc_middle::ty::TypeckResults;
use rustc_span::Symbol;
use std::hash::Hash;
pub struct SpanlessEq<'a, 'tcx> {
/// Context used to evaluate constant expressions.
cx: &'a LateContext<'tcx>,
- maybe_typeck_tables: Option<&'tcx TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx TypeckResults<'tcx>>,
/// If is true, never consider as equal expressions containing function
/// calls.
ignore_fn: bool,
pub fn new(cx: &'a LateContext<'tcx>) -> Self {
Self {
cx,
- maybe_typeck_tables: cx.maybe_typeck_tables(),
+ maybe_typeck_results: cx.maybe_typeck_results(),
ignore_fn: false,
}
}
return false;
}
- if let Some(tables) = self.maybe_typeck_tables {
+ if let Some(typeck_results) = self.maybe_typeck_results {
if let (Some(l), Some(r)) = (
- constant_simple(self.cx, tables, left),
- constant_simple(self.cx, tables, right),
+ constant_simple(self.cx, typeck_results, left),
+ constant_simple(self.cx, typeck_results, right),
) {
if l == r {
return true;
!self.ignore_fn && self.eq_path_segment(l_path, r_path) && self.eq_exprs(l_args, r_args)
},
(&ExprKind::Repeat(ref le, ref ll_id), &ExprKind::Repeat(ref re, ref rl_id)) => {
- let mut celcx = constant_context(self.cx, self.cx.tcx.body_tables(ll_id.body));
+ let mut celcx = constant_context(self.cx, self.cx.tcx.typeck_body(ll_id.body));
let ll = celcx.expr(&self.cx.tcx.hir().body(ll_id.body).value);
- let mut celcx = constant_context(self.cx, self.cx.tcx.body_tables(rl_id.body));
+ let mut celcx = constant_context(self.cx, self.cx.tcx.typeck_body(rl_id.body));
let rl = celcx.expr(&self.cx.tcx.hir().body(rl_id.body).value);
self.eq_expr(le, re) && ll == rl
match (left, right) {
(&TyKind::Slice(ref l_vec), &TyKind::Slice(ref r_vec)) => self.eq_ty(l_vec, r_vec),
(&TyKind::Array(ref lt, ref ll_id), &TyKind::Array(ref rt, ref rl_id)) => {
- let old_maybe_typeck_tables = self.maybe_typeck_tables;
+ let old_maybe_typeck_results = self.maybe_typeck_results;
- let mut celcx = constant_context(self.cx, self.cx.tcx.body_tables(ll_id.body));
- self.maybe_typeck_tables = Some(self.cx.tcx.body_tables(ll_id.body));
+ let mut celcx = constant_context(self.cx, self.cx.tcx.typeck_body(ll_id.body));
+ self.maybe_typeck_results = Some(self.cx.tcx.typeck_body(ll_id.body));
let ll = celcx.expr(&self.cx.tcx.hir().body(ll_id.body).value);
- let mut celcx = constant_context(self.cx, self.cx.tcx.body_tables(rl_id.body));
- self.maybe_typeck_tables = Some(self.cx.tcx.body_tables(rl_id.body));
+ let mut celcx = constant_context(self.cx, self.cx.tcx.typeck_body(rl_id.body));
+ self.maybe_typeck_results = Some(self.cx.tcx.typeck_body(rl_id.body));
let rl = celcx.expr(&self.cx.tcx.hir().body(rl_id.body).value);
let eq_ty = self.eq_ty(lt, rt);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
eq_ty && ll == rl
},
(&TyKind::Ptr(ref l_mut), &TyKind::Ptr(ref r_mut)) => {
pub struct SpanlessHash<'a, 'tcx> {
/// Context used to evaluate constant expressions.
cx: &'a LateContext<'tcx>,
- maybe_typeck_tables: Option<&'tcx TypeckTables<'tcx>>,
+ maybe_typeck_results: Option<&'tcx TypeckResults<'tcx>>,
s: StableHasher,
}
pub fn new(cx: &'a LateContext<'tcx>) -> Self {
Self {
cx,
- maybe_typeck_tables: cx.maybe_typeck_tables(),
+ maybe_typeck_results: cx.maybe_typeck_results(),
s: StableHasher::new(),
}
}
#[allow(clippy::many_single_char_names, clippy::too_many_lines)]
pub fn hash_expr(&mut self, e: &Expr<'_>) {
let simple_const = self
- .maybe_typeck_tables
- .and_then(|tables| constant_simple(self.cx, tables, e));
+ .maybe_typeck_results
+ .and_then(|typeck_results| constant_simple(self.cx, typeck_results, e));
// const hashing may result in the same hash as some unrelated node, so add a sort of
// discriminant depending on which path we're choosing next
CaptureBy::Ref => 1,
}
.hash(&mut self.s);
- // closures inherit TypeckTables
+ // closures inherit TypeckResults
self.hash_expr(&self.cx.tcx.hir().body(eid).value);
},
ExprKind::Field(ref e, ref f) => {
self.hash_name(path.ident.name);
},
}
- // self.maybe_typeck_tables.unwrap().qpath_res(p, id).hash(&mut self.s);
+ // self.maybe_typeck_results.unwrap().qpath_res(p, id).hash(&mut self.s);
}
pub fn hash_path(&mut self, p: &Path<'_>) {
}
pub fn hash_body(&mut self, body_id: BodyId) {
- // swap out TypeckTables when hashing a body
- let old_maybe_typeck_tables = self.maybe_typeck_tables.replace(self.cx.tcx.body_tables(body_id));
+ // swap out TypeckResults when hashing a body
+ let old_maybe_typeck_results = self.maybe_typeck_results.replace(self.cx.tcx.typeck_body(body_id));
self.hash_expr(&self.cx.tcx.hir().body(body_id).value);
- self.maybe_typeck_tables = old_maybe_typeck_tables;
+ self.maybe_typeck_results = old_maybe_typeck_results;
}
fn hash_generic_args(&mut self, arg_list: &[GenericArg<'_>]) {
}
match stmt.kind {
hir::StmtKind::Local(ref local) => {
- println!("local variable of type {}", cx.tables().node_type(local.hir_id));
+ println!("local variable of type {}", cx.typeck_results().node_type(local.hir_id));
println!("pattern:");
print_pat(cx, &local.pat, 0);
if let Some(ref e) = local.init {
fn print_expr(cx: &LateContext<'_>, expr: &hir::Expr<'_>, indent: usize) {
let ind = " ".repeat(indent);
println!("{}+", ind);
- println!("{}ty: {}", ind, cx.tables().expr_ty(expr));
- println!("{}adjustments: {:?}", ind, cx.tables().adjustments().get(expr.hir_id));
+ println!("{}ty: {}", ind, cx.typeck_results().expr_ty(expr));
+ println!(
+ "{}adjustments: {:?}",
+ ind,
+ cx.typeck_results().adjustments().get(expr.hir_id)
+ );
match expr.kind {
hir::ExprKind::Box(ref e) => {
println!("{}Box", ind);
if let ExprKind::MethodCall(ref path, _, ref args, _) = expr.kind;
let fn_name = path.ident;
if let Some(sugg) = self.map.get(&*fn_name.as_str());
- let ty = walk_ptrs_ty(cx.tables().expr_ty(&args[0]));
+ let ty = walk_ptrs_ty(cx.typeck_results().expr_ty(&args[0]));
if match_type(cx, ty, &paths::EARLY_CONTEXT)
|| match_type(cx, ty, &paths::LATE_CONTEXT);
then {
let args = arg_lists[1];
if args.len() == 1;
let self_arg = &args[0];
- let self_ty = walk_ptrs_ty(cx.tables().expr_ty(self_arg));
+ let self_ty = walk_ptrs_ty(cx.typeck_results().expr_ty(self_arg));
if match_type(cx, self_ty, &paths::SYNTAX_CONTEXT);
then {
span_lint_and_sugg(
/// Checks if the method call given in `expr` belongs to the given trait.
pub fn match_trait_method(cx: &LateContext<'_>, expr: &Expr<'_>, path: &[&str]) -> bool {
- let def_id = cx.tables().type_dependent_def_id(expr.hir_id).unwrap();
+ let def_id = cx.typeck_results().type_dependent_def_id(expr.hir_id).unwrap();
let trt_id = cx.tcx.trait_of_item(def_id);
trt_id.map_or(false, |trt_id| match_def_path(cx, trt_id, path))
}
match qpath {
hir::QPath::Resolved(_, path) => path.res,
hir::QPath::TypeRelative(..) => {
- if cx.tcx.has_typeck_tables(id.owner.to_def_id()) {
- cx.tcx
- .typeck_tables_of(id.owner.to_def_id().expect_local())
- .qpath_res(qpath, id)
+ if cx.tcx.has_typeck_results(id.owner.to_def_id()) {
+ cx.tcx.typeck(id.owner.to_def_id().expect_local()).qpath_res(qpath, id)
} else {
Res::Err
}
let parent_item = map.get_parent_item(e.hir_id);
if let Some((Constant::Int(v), _)) = map
.maybe_body_owned_by(parent_item)
- .and_then(|body_id| constant(cx, cx.tcx.body_tables(body_id), e))
+ .and_then(|body_id| constant(cx, cx.tcx.typeck_body(body_id), e))
{
value == v
} else {
/// See `rustc_middle::ty::adjustment::Adjustment` and `rustc_typeck::check::coercion` for more
/// information on adjustments and coercions.
pub fn is_adjusted(cx: &LateContext<'_>, e: &Expr<'_>) -> bool {
- cx.tables().adjustments().get(e.hir_id).is_some()
+ cx.typeck_results().adjustments().get(e.hir_id).is_some()
}
/// Returns the pre-expansion span if is this comes from an expansion of the
is_enum_variant(cx, qpath, pat.hir_id) || are_refutable(cx, pats.iter().map(|pat| &**pat))
},
PatKind::Slice(ref head, ref middle, ref tail) => {
- match &cx.tables().node_type(pat.hir_id).kind {
+ match &cx.typeck_results().node_type(pat.hir_id).kind {
ty::Slice(..) => {
// [..] is the only irrefutable slice pattern.
!head.is_empty() || middle.is_none() || !tail.is_empty()
None
}
},
- ExprKind::MethodCall(_, _, _, _) => cx.tables().type_dependent_def_id(expr.hir_id),
+ ExprKind::MethodCall(_, _, _, _) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
_ => None,
};
/// Returns the `DefId` of the callee if the given expression is a function or method call.
pub fn fn_def_id(cx: &LateContext<'_>, expr: &Expr<'_>) -> Option<DefId> {
match &expr.kind {
- ExprKind::MethodCall(..) => cx.tables().type_dependent_def_id(expr.hir_id),
+ ExprKind::MethodCall(..) => cx.typeck_results().type_dependent_def_id(expr.hir_id),
ExprKind::Call(
Expr {
kind: ExprKind::Path(qpath),
..
},
..,
- ) => cx.tables().qpath_res(qpath, expr.hir_id).opt_def_id(),
+ ) => cx.typeck_results().qpath_res(qpath, expr.hir_id).opt_def_id(),
_ => None,
}
}
};
let def_id = expr.hir_id.owner.to_def_id();
cx.tcx.infer_ctxt().enter(|infcx| {
- ExprUseVisitor::new(&mut delegate, &infcx, def_id.expect_local(), cx.param_env, cx.tables()).walk_expr(expr);
+ ExprUseVisitor::new(
+ &mut delegate,
+ &infcx,
+ def_id.expect_local(),
+ cx.param_env,
+ cx.typeck_results(),
+ )
+ .walk_expr(expr);
});
if delegate.skip {
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
// search for `&vec![_]` expressions where the adjusted type is `&[_]`
if_chain! {
- if let ty::Ref(_, ty, _) = cx.tables().expr_ty_adjusted(expr).kind;
+ if let ty::Ref(_, ty, _) = cx.typeck_results().expr_ty_adjusted(expr).kind;
if let ty::Slice(..) = ty.kind;
if let ExprKind::AddrOf(BorrowKind::Ref, _, ref addressee) = expr.kind;
if let Some(vec_args) = higher::vec_macro(cx, addressee);
if_chain! {
if let Some((_, arg, _)) = higher::for_loop(expr);
if let Some(vec_args) = higher::vec_macro(cx, arg);
- if is_copy(cx, vec_type(cx.tables().expr_ty_adjusted(arg)));
+ if is_copy(cx, vec_type(cx.typeck_results().expr_ty_adjusted(arg)));
then {
// report the error around the `vec!` not inside `<std macros>:`
let span = arg.span
let mut applicability = Applicability::MachineApplicable;
let snippet = match *vec_args {
higher::VecArgs::Repeat(elem, len) => {
- if constant(cx, cx.tables(), len).is_some() {
+ if constant(cx, cx.typeck_results(), len).is_some() {
format!(
"&[{}; {}]",
snippet_with_applicability(cx, elem.span, "elem", &mut applicability),
fn check_expr(&mut self, cx: &LateContext<'tcx>, expr: &'tcx Expr<'_>) {
if_chain! {
if let hir::ExprKind::MethodCall(path_segment, _, ref args, _) = expr.kind;
- if let Some(method_def_id) = cx.tables().type_dependent_def_id(expr.hir_id);
+ if let Some(method_def_id) = cx.typeck_results().type_dependent_def_id(expr.hir_id);
if match_def_path(cx, method_def_id, &paths::VEC_RESIZE) && args.len() == 3;
if let ExprKind::Lit(Spanned { node: LitKind::Int(0, _), .. }) = args[1].kind;
if let ExprKind::Lit(Spanned { node: LitKind::Int(..), .. }) = args[2].kind;
if let ExprKind::MethodCall(method_name, _, exprs, _) = expr.kind;
if method_name.ident.as_str() == "read_to_end";
if let ExprKind::Path(QPath::Resolved(None, _)) = &exprs[0].kind;
- let ty = cx.tables().expr_ty(&exprs[0]);
+ let ty = cx.typeck_results().expr_ty(&exprs[0]);
if match_type(cx, ty, &paths::FILE);
then {
return true
if let ExprKind::MethodCall(method_name, _, exprs, _) = expr.kind;
if method_name.ident.as_str() == "read_to_string";
if let ExprKind::Path(QPath::Resolved(None, _)) = &exprs[0].kind;
- let ty = cx.tables().expr_ty(&exprs[0]);
+ let ty = cx.typeck_results().expr_ty(&exprs[0]);
if match_type(cx, ty, &paths::FILE);
then {
return true
// TODO - constant_simple does not fold many operations involving floats.
// That's probably fine for this lint - it's pretty unlikely that someone would
// do something like 0.0/(2.0 - 2.0), but it would be nice to warn on that case too.
- if let Some(lhs_value) = constant_simple(cx, cx.tables(), left);
- if let Some(rhs_value) = constant_simple(cx, cx.tables(), right);
+ if let Some(lhs_value) = constant_simple(cx, cx.typeck_results(), left);
+ if let Some(rhs_value) = constant_simple(cx, cx.typeck_results(), right);
if Constant::F32(0.0) == lhs_value || Constant::F64(0.0) == lhs_value;
if Constant::F32(0.0) == rhs_value || Constant::F64(0.0) == rhs_value;
then {
- is it a primitive type?
- does it implement a trait?
-This operation is performed using the [`expr_ty()`][expr_ty] method from the [`TypeckTables`][TypeckTables] struct,
+This operation is performed using the [`expr_ty()`][expr_ty] method from the [`TypeckResults`][TypeckResults] struct,
that gives you access to the underlying structure [`TyS`][TyS].
Example of use:
impl LateLintPass<'_> for MyStructLint {
fn check_expr(&mut self, cx: &LateContext<'_>, expr: &Expr<'_>) {
// Get type of `expr`
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
// Match its kind to enter its type
match ty.kind {
ty::Adt(adt_def, _) if adt_def.is_struct() => println!("Our `expr` is a struct!"),
}
```
-Similarly in [`TypeckTables`][TypeckTables] methods, you have the [`pat_ty()`][pat_ty] method
+Similarly in [`TypeckResults`][TypeckResults] methods, you have the [`pat_ty()`][pat_ty] method
to retrieve a type from a pattern.
Two noticeable items here:
- `cx` is the lint context [`LateContext`][LateContext].
The two most useful data structures in this context are `tcx` and `tables`,
allowing us to jump to type definitions and other compilation stages such as HIR.
-- `tables` is [`TypeckTables`][TypeckTables] and is created by type checking step,
+- `tables` is [`TypeckResults`][TypeckResults] and is created by type checking step,
it includes useful information such as types of expressions, ways to resolve methods and so on.
# Checking if an expr is calling a specific method
}
// 2. Using type context `TyCtxt`
- let ty = cx.tables().expr_ty(expr);
+ let ty = cx.typeck_results().expr_ty(expr);
if cx.tcx.lang_items()
// we are looking for the `DefId` of `Drop` trait in lang items
.drop_trait()
[TyS]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TyS.html
[TyKind]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/enum.TyKind.html
-[TypeckTables]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckTables.html
-[expr_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckTables.html#method.expr_ty
+[TypeckResults]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckResults.html
+[expr_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/struct.TypeckResults.html#method.expr_ty
[LateContext]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_lint/struct.LateContext.html
[TyCtxt]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TyCtxt.html
-[pat_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TypeckTables.html#method.pat_ty
+[pat_ty]: https://doc.rust-lang.org/nightly/nightly-rustc/rustc_middle/ty/context/struct.TypeckResults.html#method.pat_ty
[paths]: ../clippy_lints/src/utils/paths.rs
/// The rustdoc executable.
pub rustdoc_path: Option<PathBuf>,
+ /// The rust-demangler executable.
+ pub rust_demangler_path: Option<PathBuf>,
+
/// The Python executable to use for LLDB.
pub lldb_python: String,
&self.target == name || // triple
util::matches_os(&self.target, name) || // target
util::matches_env(&self.target, name) || // env
+ self.target.ends_with(name) || // target and env
name == util::get_arch(&self.target) || // architecture
name == util::get_pointer_width(&self.target) || // pointer width
name == self.stage_id.split('-').next().unwrap() || // stage
.reqopt("", "run-lib-path", "path to target shared libraries", "PATH")
.reqopt("", "rustc-path", "path to rustc to use for compiling", "PATH")
.optopt("", "rustdoc-path", "path to rustdoc to use for compiling", "PATH")
+ .optopt("", "rust-demangler-path", "path to rust-demangler to use in tests", "PATH")
.reqopt("", "lldb-python", "path to python to use for doc tests", "PATH")
.reqopt("", "docck-python", "path to python to use for doc tests", "PATH")
.optopt("", "valgrind-path", "path to Valgrind executable for Valgrind tests", "PROGRAM")
run_lib_path: make_absolute(opt_path(matches, "run-lib-path")),
rustc_path: opt_path(matches, "rustc-path"),
rustdoc_path: matches.opt_str("rustdoc-path").map(PathBuf::from),
+ rust_demangler_path: matches.opt_str("rust-demangler-path").map(PathBuf::from),
lldb_python: matches.opt_str("lldb-python").unwrap(),
docck_python: matches.opt_str("docck-python").unwrap(),
valgrind_path: matches.opt_str("valgrind-path"),
logv(c, format!("run_lib_path: {:?}", config.run_lib_path));
logv(c, format!("rustc_path: {:?}", config.rustc_path.display()));
logv(c, format!("rustdoc_path: {:?}", config.rustdoc_path));
+ logv(c, format!("rust_demangler_path: {:?}", config.rust_demangler_path));
logv(c, format!("src_base: {:?}", config.src_base.display()));
logv(c, format!("build_base: {:?}", config.build_base.display()));
logv(c, format!("stage_id: {}", config.stage_id));
stamp.add_path(&rustdoc_path);
stamp.add_path(&rust_src_dir.join("src/etc/htmldocck.py"));
}
+ // FIXME(richkadel): Do I need to add an `if let Some(rust_demangler_path) contribution to the
+ // stamp here as well?
// Compiletest itself.
stamp.add_dir(&rust_src_dir.join("src/tools/compiletest/"));
cmd.env("RUSTDOC", cwd.join(rustdoc));
}
+ if let Some(ref rust_demangler) = self.config.rust_demangler_path {
+ cmd.env("RUST_DEMANGLER", cwd.join(rust_demangler));
+ }
+
if let Some(ref node) = self.config.nodejs {
cmd.env("NODE", node);
}
DEFAULT_EDITION,
&Some(playground)
)
- .to_string()
+ .into_string()
)?
}
None => write!(output, "<p>No description.</p>\n")?,
use crate::Redirect::*;
+// Add linkcheck exceptions here
+// If at all possible you should use intra-doc links to avoid linkcheck issues. These
+// are cases where that does not work
+// [(generated_documentation_page, &[broken_links])]
+const LINKCHECK_EXCEPTIONS: &[(&str, &[&str])] = &[
+ // These are methods on slice, and `Self` does not work on primitive impls
+ // in intra-doc links (primitive impls are weird)
+ // https://github.com/rust-lang/rust/issues/62834 is necessary to be
+ // able to link to slices
+ (
+ "std/io/struct.IoSlice.html",
+ &[
+ "#method.as_mut_ptr",
+ "#method.sort_by_key",
+ "#method.make_ascii_uppercase",
+ "#method.make_ascii_lowercase",
+ ],
+ ),
+ // These try to link to std::collections, but are defined in alloc
+ // https://github.com/rust-lang/rust/issues/74481
+ ("std/collections/btree_map/struct.BTreeMap.html", &["#insert-and-complex-keys"]),
+ ("std/collections/btree_set/struct.BTreeSet.html", &["#insert-and-complex-keys"]),
+ ("alloc/collections/btree_map/struct.BTreeMap.html", &["#insert-and-complex-keys"]),
+ ("alloc/collections/btree_set/struct.BTreeSet.html", &["#insert-and-complex-keys"]),
+];
+
macro_rules! t {
($e:expr) => {
match $e {
}
}
+fn is_exception(file: &Path, link: &str) -> bool {
+ if let Some(entry) = LINKCHECK_EXCEPTIONS.iter().find(|&(f, _)| file.ends_with(f)) {
+ entry.1.contains(&link)
+ } else {
+ false
+ }
+}
+
fn check(cache: &mut Cache, root: &Path, file: &Path, errors: &mut bool) -> Option<PathBuf> {
// Ignore non-HTML files.
if file.extension().and_then(|s| s.to_str()) != Some("html") {
return None;
}
- // Unfortunately we're not 100% full of valid links today to we need a few
- // exceptions to get this past `make check` today.
- // FIXME(#32129)
- if file.ends_with("std/io/struct.IoSlice.html")
- || file.ends_with("std/string/struct.String.html")
- {
- return None;
- }
- // FIXME(#32553)
- if file.ends_with("alloc/string/struct.String.html") {
- return None;
- }
- // FIXME(#32130)
- if file.ends_with("alloc/collections/btree_map/struct.BTreeMap.html")
- || file.ends_with("alloc/collections/btree_set/struct.BTreeSet.html")
- || file.ends_with("std/collections/btree_map/struct.BTreeMap.html")
- || file.ends_with("std/collections/btree_set/struct.BTreeSet.html")
- || file.ends_with("std/collections/hash_map/struct.HashMap.html")
- || file.ends_with("std/collections/hash_set/struct.HashSet.html")
- {
- return None;
- }
-
let res = load_file(cache, root, file, SkipRedirect);
let (pretty_file, contents) = match res {
Ok(res) => res,
let entry = &mut cache.get_mut(&pretty_path).unwrap();
entry.parse_ids(&pretty_path, &contents, errors);
- if !entry.ids.contains(*fragment) {
+ if !entry.ids.contains(*fragment) && !is_exception(file, &format!("#{}", fragment))
+ {
*errors = true;
print!("{}:{}: broken link fragment ", pretty_file.display(), i + 1);
println!("`#{}` pointing to `{}`", fragment, pretty_path.display());
};
}
} else {
- *errors = true;
- print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
let pretty_path = path.strip_prefix(root).unwrap_or(&path);
- println!("{}", pretty_path.display());
+ if !is_exception(file, pretty_path.to_str().unwrap()) {
+ *errors = true;
+ print!("{}:{}: broken link - ", pretty_file.display(), i + 1);
+ println!("{}", pretty_path.display());
+ }
}
});
Some(pretty_file)
-Subproject commit eee22ffddab20f51e1866bcbe4c5a69a90bdd260
+Subproject commit 515287f114b546a72d4a1fe8ffe1dbc20dedf13d
MAINTAINERS = {
'miri': {'oli-obk', 'RalfJung', 'eddyb'},
'rls': {'Xanewok'},
- 'rustfmt': {'topecongiro'},
+ 'rustfmt': {'topecongiro', 'calebcartwright'},
'book': {'carols10cents', 'steveklabnik'},
'nomicon': {'frewsxcv', 'Gankra'},
'reference': {'steveklabnik', 'Havvy', 'matthewjasper', 'ehuss'},
-Subproject commit 8b0983e89ad9a28b142eccf3755a8c9aaeb37852
+Subproject commit c9c518e5e9761bf35d466c47c57c3a1358b56b3c
--- /dev/null
+[package]
+authors = ["The Rust Project Developers"]
+name = "rust-demangler"
+version = "0.0.0"
+edition = "2018"
+
+[dependencies]
+rustc-demangle = "0.1"
+
+[[bin]]
+name = "rust-demangler"
+path = "main.rs"
--- /dev/null
+//! Demangles rustc mangled names.
+//!
+//! This tool uses https://crates.io/crates/rustc-demangle to convert an input buffer of
+//! newline-separated mangled names into their demangled translations.
+//!
+//! This tool can be leveraged by other applications that support third-party demanglers.
+//! It takes a list of mangled names (one per line) on standard input, and prints a corresponding
+//! list of demangled names. The tool is designed to support other programs that can leverage a
+//! third-party demangler, such as `llvm-cov`, via the `-Xdemangler=<path-to-demangler>` option.
+//!
+//! To use `rust-demangler`, first build the tool with:
+//!
+//! ```shell
+//! $ ./x.py build rust-demangler
+//! ```
+//!
+//! Then, with `llvm-cov` for example, add the `-Xdemangler=...` option:
+//!
+//! ```shell
+//! $ TARGET="${PWD}/build/x86_64-unknown-linux-gnu"
+//! $ "${TARGET}"/llvm/bin/llvm-cov show --Xdemangler="${TARGET}"/stage0-tools-bin/rust-demangler \
+//! --instr-profile=main.profdata ./main --show-line-counts-or-regions
+//! ```
+
+use rustc_demangle::demangle;
+use std::io::{self, Read, Write};
+
+fn main() -> io::Result<()> {
+ let mut buffer = String::new();
+ io::stdin().read_to_string(&mut buffer)?;
+ let lines = buffer.lines();
+ let mut demangled = Vec::new();
+ for mangled in lines {
+ demangled.push(demangle(mangled).to_string());
+ }
+ demangled.push("".to_string());
+ io::stdout().write_all(demangled.join("\n").as_bytes())?;
+ Ok(())
+}
+#![feature(restricted_std)]
pub use std::*;
"MIT",
"Unlicense/MIT",
"Unlicense OR MIT",
+ "0BSD OR MIT OR Apache-2.0", // adler license
];
/// These are exceptions to Rust's permissive licensing policy, and
("ryu", "Apache-2.0 OR BSL-1.0"), // rls/cargo/... (because of serde)
("bytesize", "Apache-2.0"), // cargo
("im-rc", "MPL-2.0+"), // cargo
- ("adler32", "BSD-3-Clause AND Zlib"), // cargo dep that isn't used
("constant_time_eq", "CC0-1.0"), // rustfmt
("sized-chunks", "MPL-2.0+"), // cargo via im-rc
("bitmaps", "MPL-2.0+"), // cargo via im-rc
/// This list is here to provide a speed-bump to adding a new dependency to
/// rustc. Please check with the compiler team before adding an entry.
const PERMITTED_DEPENDENCIES: &[&str] = &[
- "adler32",
+ "addr2line",
+ "adler",
"aho-corasick",
"annotate-snippets",
"ansi_term",
"atty",
"autocfg",
"backtrace",
- "backtrace-sys",
"bitflags",
"block-buffer",
"block-padding",
"generic-array",
"getopts",
"getrandom",
+ "gimli",
"hashbrown",
"hermit-abi",
"humantime",
"miniz_oxide",
"nodrop",
"num_cpus",
+ "object",
"once_cell",
"opaque-debug",
"parking_lot",
"parking_lot_core",
+ "pathdiff",
"pkg-config",
"polonius-engine",
"ppv-lite86",
// A few of those error codes can't be tested but all the others can and *should* be tested!
const EXEMPTED_FROM_TEST: &[&str] = &[
- "E0183", "E0227", "E0279", "E0280", "E0311", "E0313", "E0314", "E0315", "E0377", "E0456",
- "E0461", "E0462", "E0464", "E0465", "E0472", "E0473", "E0474", "E0475", "E0476", "E0479",
- "E0480", "E0481", "E0482", "E0483", "E0484", "E0485", "E0486", "E0487", "E0488", "E0489",
- "E0514", "E0519", "E0523", "E0553", "E0554", "E0570", "E0629", "E0630", "E0640", "E0717",
- "E0727", "E0729",
+ "E0183", "E0227", "E0279", "E0280", "E0311", "E0313", "E0314", "E0315", "E0377", "E0461",
+ "E0462", "E0464", "E0465", "E0472", "E0473", "E0474", "E0475", "E0476", "E0479", "E0480",
+ "E0481", "E0482", "E0483", "E0484", "E0485", "E0486", "E0487", "E0488", "E0489", "E0514",
+ "E0519", "E0523", "E0553", "E0554", "E0570", "E0629", "E0630", "E0640", "E0717", "E0727",
+ "E0729",
];
// Some error codes don't have any tests apparently...
"src/tools/rust-installer",
"src/tools/rustfmt",
"src/doc/book",
+ "src/backtrace",
// Filter RLS output directories
"target/rls",
];
// Update `can_contain` when changing this
if contents.contains(&format!("// ignore-tidy-{}", check))
|| contents.contains(&format!("# ignore-tidy-{}", check))
+ || contents.contains(&format!("/* ignore-tidy-{} */", check))
{
Directive::Ignore(false)
} else {
};
}
+pub fn is_in(full_path: &Path, parent_folder_to_find: &str, folder_to_find: &str) -> bool {
+ if let Some(parent) = full_path.parent() {
+ if parent.file_name().map_or_else(
+ || false,
+ |f| {
+ f.to_string_lossy() == folder_to_find
+ && parent
+ .parent()
+ .and_then(|f| f.file_name())
+ .map_or_else(|| false, |f| f == parent_folder_to_find)
+ },
+ ) {
+ true
+ } else {
+ is_in(parent, parent_folder_to_find, folder_to_find)
+ }
+ } else {
+ false
+ }
+}
+
pub fn check(path: &Path, bad: &mut bool) {
super::walk(path, &mut super::filter_dirs, &mut |entry, contents| {
let file = entry.path();
let filename = file.file_name().unwrap().to_string_lossy();
- let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md"];
+ let extensions = [".rs", ".py", ".js", ".sh", ".c", ".cpp", ".h", ".md", ".css"];
if extensions.iter().all(|e| !filename.ends_with(e)) || filename.starts_with(".#") {
return;
}
+ let is_style_file = filename.ends_with(".css");
let under_rustfmt = filename.ends_with(".rs") &&
// This list should ideally be sourced from rustfmt.toml but we don't want to add a toml
// parser to tidy.
// currently), just the long error code explanation ones.
return;
}
+ if is_style_file && !is_in(file, "src", "librustdoc") {
+ // We only check CSS files in rustdoc.
+ return;
+ }
if contents.is_empty() {
tidy_error!(bad, "{}: empty file", file.display());
COLS
};
- let can_contain =
- contents.contains("// ignore-tidy-") || contents.contains("# ignore-tidy-");
+ let can_contain = contents.contains("// ignore-tidy-")
+ || contents.contains("# ignore-tidy-")
+ || contents.contains("/* ignore-tidy-");
// Enable testing ICE's that require specific (untidy)
// file formats easily eg. `issue-1234-ignore-tidy.rs`
if filename.contains("ignore-tidy") {
&format!("line longer than {} chars", max_columns)
);
}
- if line.contains('\t') {
+ if !is_style_file && line.contains('\t') {
suppressible_tidy_err!(err, skip_tab, "tab character");
}
if line.ends_with(' ') || line.ends_with('\t') {
suppressible_tidy_err!(err, skip_end_whitespace, "trailing whitespace");
}
+ if is_style_file && line.starts_with(' ') {
+ err("CSS files use tabs for indent");
+ }
if line.contains('\r') {
suppressible_tidy_err!(err, skip_cr, "CR character");
}