3. Build and install:
```sh
- $ ./x.py build && sudo ./x.py dist --install
+ $ ./x.py build && sudo ./x.py install
```
> ***Note:*** Install locations can be adjusted by copying the config file
> adjusting the `prefix` option under `[install]`. Various other options are
> also supported, and are documented in the config file.
- When complete, `sudo ./x.py dist --install` will place several programs into
+ When complete, `sudo ./x.py install` will place several programs into
`/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
API-documentation tool. This install does not include [Cargo],
Rust's package manager, which you may also want to build.
4. Navigate to Rust's source code (or clone it), then build it:
```sh
- $ ./x.py build && ./x.py dist --install
+ $ ./x.py build && ./x.py install
```
#### MSVC
"hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
"miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
"num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
"psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
"rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
"serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
dependencies = [
"bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
"libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
"openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
"url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "libgit2-sys"
-version = "0.6.10"
+version = "0.6.11"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
[[package]]
name = "semver"
-version = "0.6.0"
+version = "0.7.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
dependencies = [
"semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
]
[[package]]
"checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b"
"checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
"checksum libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "babb8281da88cba992fa1f4ddec7d63ed96280a1a53ec9b919fd37b53d71e502"
-"checksum libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dd89dd7196d5fa35b659c3eaf3c1b14b9bd961bfd1a07dfca49adeb8a6aa3763"
+"checksum libgit2-sys 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d9dc31ee90fb179b706d35fb672e91d0b74e950d7fb4ea7eae3c0f5ecbf2d3d3"
"checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75"
"checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
"checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
"checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
"checksum rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)" = "<none>"
"checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
-"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
+"checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85"
"checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
"checksum serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34b623917345a631dc9608d5194cc206b3fe6c3554cd1c75b937e55e285254af"
"checksum serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "38a3db3a5757f68069aba764b793823ea9fb9717c42c016f8903f8add50f508a"
# Note that this address should not contain a trailing slash as file names will
# be appended to it.
#upload-addr = "https://example.com/folder"
+
+# Whether to build a plain source tarball to upload
+# We disable that on Windows not to override the one already uploaded on S3
+# as the one built on Windows will contain backslashes in paths causing problems
+# on linux
+#src-tarball = true
use channel;
use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
-fn pkgname(build: &Build, component: &str) -> String {
+pub fn pkgname(build: &Build, component: &str) -> String {
if component == "cargo" {
format!("{}-{}", component, build.cargo_package_vers())
} else if component == "rls" {
- format!("{}-{}", component, build.package_vers(&build.release_num("rls")))
+ format!("{}-{}", component, build.rls_package_vers())
} else {
assert!(component.starts_with("rust"));
format!("{}-{}", component, build.rust_package_vers())
t!(fs::remove_dir_all(&image));
}
-const CARGO_VENDOR_VERSION: &'static str = "0.1.4";
-
-/// Creates the `rust-src` installer component and the plain source tarball
-pub fn rust_src(build: &Build) {
- if !build.config.rust_dist_src {
- return
- }
-
- println!("Dist src");
-
- // Make sure that the root folder of tarball has the correct name
- let plain_name = format!("rustc-{}-src", build.rust_package_vers());
- let plain_dst_src = tmpdir(build).join(&plain_name);
- let _ = fs::remove_dir_all(&plain_dst_src);
- t!(fs::create_dir_all(&plain_dst_src));
-
- // This is the set of root paths which will become part of the source package
- let src_files = [
- "COPYRIGHT",
- "LICENSE-APACHE",
- "LICENSE-MIT",
- "CONTRIBUTING.md",
- "README.md",
- "RELEASES.md",
- "configure",
- "x.py",
- ];
- let src_dirs = [
- "man",
- "src",
- ];
-
- let filter_fn = move |path: &Path| {
+fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
+ fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
let spath = match path.to_str() {
Some(path) => path,
None => return false,
}
}
+ let full_path = Path::new(dir).join(path);
+ if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) {
+ return false;
+ }
+
let excludes = [
"CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules",
".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}",
!path.iter()
.map(|s| s.to_str().unwrap())
.any(|s| excludes.contains(&s))
- };
-
- // Copy the directories using our filter
- for item in &src_dirs {
- let dst = &plain_dst_src.join(item);
- t!(fs::create_dir(dst));
- cp_filtered(&build.src.join(item), dst, &filter_fn);
- }
- // Copy the files normally
- for item in &src_files {
- copy(&build.src.join(item), &plain_dst_src.join(item));
- }
-
- // If we're building from git sources, we need to vendor a complete distribution.
- if build.src_is_git {
- // Get cargo-vendor installed, if it isn't already.
- let mut has_cargo_vendor = false;
- let mut cmd = Command::new(&build.cargo);
- for line in output(cmd.arg("install").arg("--list")).lines() {
- has_cargo_vendor |= line.starts_with("cargo-vendor ");
- }
- if !has_cargo_vendor {
- let mut cmd = Command::new(&build.cargo);
- cmd.arg("install")
- .arg("--force")
- .arg("--debug")
- .arg("--vers").arg(CARGO_VENDOR_VERSION)
- .arg("cargo-vendor")
- .env("RUSTC", &build.rustc);
- build.run(&mut cmd);
- }
-
- // Vendor all Cargo dependencies
- let mut cmd = Command::new(&build.cargo);
- cmd.arg("vendor")
- .current_dir(&plain_dst_src.join("src"));
- build.run(&mut cmd);
}
- // Create the version file
- write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
-
- // Create plain source tarball
- let mut tarball = rust_src_location(build);
- tarball.set_extension(""); // strip .gz
- tarball.set_extension(""); // strip .tar
- if let Some(dir) = tarball.parent() {
- t!(fs::create_dir_all(dir));
+ // Copy the directories using our filter
+ for item in src_dirs {
+ let dst = &dst_dir.join(item);
+ t!(fs::create_dir_all(dst));
+ cp_filtered(&build.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path));
}
- let mut cmd = rust_installer(build);
- cmd.arg("tarball")
- .arg("--input").arg(&plain_name)
- .arg("--output").arg(&tarball)
- .arg("--work-dir=.")
- .current_dir(tmpdir(build));
- build.run(&mut cmd);
+}
+/// Creates the `rust-src` installer component
+pub fn rust_src(build: &Build) {
+ println!("Dist src");
let name = pkgname(build, "rust-src");
let image = tmpdir(build).join(format!("{}-image", name));
"src/liballoc",
"src/liballoc_jemalloc",
"src/liballoc_system",
+ "src/libbacktrace",
"src/libcollections",
"src/libcompiler_builtins",
"src/libcore",
"src/libstd_unicode",
"src/libunwind",
"src/rustc/libc_shim",
+ "src/libtest",
+ "src/libterm",
+ "src/libgetopts",
+ "src/compiler-rt",
+ "src/jemalloc",
+ ];
+ let std_src_dirs_exclude = [
+ "src/compiler-rt/test",
+ "src/jemalloc/test/unit",
];
- for item in &std_src_dirs {
- let dst = &dst_src.join(item);
- t!(fs::create_dir_all(dst));
- cp_r(&plain_dst_src.join(item), dst);
- }
+ copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
// Create source tarball in rust-installer format
let mut cmd = rust_installer(build);
build.run(&mut cmd);
t!(fs::remove_dir_all(&image));
- t!(fs::remove_dir_all(&plain_dst_src));
+}
+
+const CARGO_VENDOR_VERSION: &'static str = "0.1.4";
+
+/// Creates the plain source tarball
+pub fn plain_source_tarball(build: &Build) {
+ println!("Create plain source tarball");
+
+ // Make sure that the root folder of tarball has the correct name
+ let plain_name = format!("{}-src", pkgname(build, "rustc"));
+ let plain_dst_src = tmpdir(build).join(&plain_name);
+ let _ = fs::remove_dir_all(&plain_dst_src);
+ t!(fs::create_dir_all(&plain_dst_src));
+
+ // This is the set of root paths which will become part of the source package
+ let src_files = [
+ "COPYRIGHT",
+ "LICENSE-APACHE",
+ "LICENSE-MIT",
+ "CONTRIBUTING.md",
+ "README.md",
+ "RELEASES.md",
+ "configure",
+ "x.py",
+ ];
+ let src_dirs = [
+ "man",
+ "src",
+ ];
+
+ copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
+
+ // Copy the files normally
+ for item in &src_files {
+ copy(&build.src.join(item), &plain_dst_src.join(item));
+ }
+
+ // Create the version file
+ write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+
+ // If we're building from git sources, we need to vendor a complete distribution.
+ if build.src_is_git {
+ // Get cargo-vendor installed, if it isn't already.
+ let mut has_cargo_vendor = false;
+ let mut cmd = Command::new(&build.cargo);
+ for line in output(cmd.arg("install").arg("--list")).lines() {
+ has_cargo_vendor |= line.starts_with("cargo-vendor ");
+ }
+ if !has_cargo_vendor {
+ let mut cmd = Command::new(&build.cargo);
+ cmd.arg("install")
+ .arg("--force")
+ .arg("--debug")
+ .arg("--vers").arg(CARGO_VENDOR_VERSION)
+ .arg("cargo-vendor")
+ .env("RUSTC", &build.rustc);
+ build.run(&mut cmd);
+ }
+
+ // Vendor all Cargo dependencies
+ let mut cmd = Command::new(&build.cargo);
+ cmd.arg("vendor")
+ .current_dir(&plain_dst_src.join("src"));
+ build.run(&mut cmd);
+ }
+
+ // Create plain source tarball
+ let mut tarball = rust_src_location(build);
+ tarball.set_extension(""); // strip .gz
+ tarball.set_extension(""); // strip .tar
+ if let Some(dir) = tarball.parent() {
+ t!(fs::create_dir_all(dir));
+ }
+ let mut cmd = rust_installer(build);
+ cmd.arg("tarball")
+ .arg("--input").arg(&plain_name)
+ .arg("--output").arg(&tarball)
+ .arg("--work-dir=.")
+ .current_dir(tmpdir(build));
+ build.run(&mut cmd);
}
fn install(src: &Path, dstdir: &Path, perms: u32) {
Clean,
Dist {
paths: Vec<PathBuf>,
- install: bool,
+ },
+ Install {
+ paths: Vec<PathBuf>,
},
}
bench Build and run some benchmarks
doc Build documentation
clean Clean out build directories
- dist Build and/or install distribution artifacts
+ dist Build distribution artifacts
+ install Install distribution artifacts
To learn more about a subcommand, run `./x.py <subcommand> -h`");
|| (s == "bench")
|| (s == "doc")
|| (s == "clean")
- || (s == "dist"));
+ || (s == "dist")
+ || (s == "install"));
let subcommand = match possible_subcommands.first() {
Some(s) => s,
None => {
match subcommand.as_str() {
"test" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
"bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
- "dist" => { opts.optflag("", "install", "run installer as well"); },
_ => { },
};
"dist" => {
Subcommand::Dist {
paths: paths,
- install: matches.opt_present("install"),
+ }
+ }
+ "install" => {
+ Subcommand::Install {
+ paths: paths,
}
}
_ => {
use std::process::Command;
use Build;
-use dist::{sanitize_sh, tmpdir};
+use dist::{pkgname, sanitize_sh, tmpdir};
pub struct Installer<'a> {
build: &'a Build,
bindir: PathBuf,
libdir: PathBuf,
mandir: PathBuf,
+ empty_dir: PathBuf,
+}
+
+impl<'a> Drop for Installer<'a> {
+ fn drop(&mut self) {
+ t!(fs::remove_dir_all(&self.empty_dir));
+ }
}
impl<'a> Installer<'a> {
let libdir = add_destdir(&libdir, &destdir);
let mandir = add_destdir(&mandir, &destdir);
+ let empty_dir = build.out.join("tmp/empty_dir");
+
+ t!(fs::create_dir_all(&empty_dir));
+
Installer {
build,
prefix,
bindir,
libdir,
mandir,
+ empty_dir,
}
}
- /// Installs everything.
- pub fn install(&self, stage: u32, host: &str) {
- let empty_dir = self.build.out.join("tmp/empty_dir");
- t!(fs::create_dir_all(&empty_dir));
-
- if self.build.config.docs {
- self.install_sh("docs", "rust-docs", &self.build.rust_package_vers(),
- stage, Some(host), &empty_dir);
- }
+ pub fn install_docs(&self, stage: u32, host: &str) {
+ self.install_sh("docs", "rust-docs", stage, Some(host));
+ }
+ pub fn install_std(&self, stage: u32) {
for target in self.build.config.target.iter() {
- self.install_sh("std", "rust-std", &self.build.rust_package_vers(),
- stage, Some(target), &empty_dir);
+ self.install_sh("std", "rust-std", stage, Some(target));
}
+ }
- if self.build.config.extended {
- self.install_sh("cargo", "cargo", &self.build.cargo_package_vers(),
- stage, Some(host), &empty_dir);
- self.install_sh("rls", "rls", &self.build.rls_package_vers(),
- stage, Some(host), &empty_dir);
- self.install_sh("analysis", "rust-analysis", &self.build.rust_package_vers(),
- stage, Some(host), &empty_dir);
- self.install_sh("src", "rust-src", &self.build.rust_package_vers(),
- stage, None, &empty_dir);
- }
+ pub fn install_cargo(&self, stage: u32, host: &str) {
+ self.install_sh("cargo", "cargo", stage, Some(host));
+ }
- self.install_sh("rustc", "rustc", &self.build.rust_package_vers(),
- stage, Some(host), &empty_dir);
+ pub fn install_rls(&self, stage: u32, host: &str) {
+ self.install_sh("rls", "rls", stage, Some(host));
+ }
+
+ pub fn install_analysis(&self, stage: u32, host: &str) {
+ self.install_sh("analysis", "rust-analysis", stage, Some(host));
+ }
- t!(fs::remove_dir_all(&empty_dir));
+ pub fn install_src(&self, stage: u32) {
+ self.install_sh("src", "rust-src", stage, None);
+ }
+ pub fn install_rustc(&self, stage: u32, host: &str) {
+ self.install_sh("rustc", "rustc", stage, Some(host));
}
- fn install_sh(&self, package: &str, name: &str, version: &str,
- stage: u32, host: Option<&str>, empty_dir: &Path) {
+ fn install_sh(&self, package: &str, name: &str, stage: u32, host: Option<&str>) {
println!("Install {} stage{} ({:?})", package, stage, host);
let package_name = if let Some(host) = host {
- format!("{}-{}-{}", name, version, host)
+ format!("{}-{}", pkgname(self.build, name), host)
} else {
- format!("{}-{}", name, version)
+ pkgname(self.build, name)
};
let mut cmd = Command::new("sh");
- cmd.current_dir(empty_dir)
+ cmd.current_dir(&self.empty_dir)
.arg(sanitize_sh(&tmpdir(self.build).join(&package_name).join("install.sh")))
.arg(format!("--prefix={}", sanitize_sh(&self.prefix)))
.arg(format!("--sysconfdir={}", sanitize_sh(&self.sysconfdir)))
$(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
$(Q)$(BOOTSTRAP) test distcheck $(BOOTSTRAP_ARGS)
install:
- $(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
+ $(Q)$(BOOTSTRAP) install $(BOOTSTRAP_ARGS)
tidy:
$(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS)
prepare:
.host(true)
.run(move |s| check::docs(build, &s.compiler()));
rules.test("check-distcheck", "distcheck")
+ .dep(|s| s.name("dist-plain-source-tarball"))
.dep(|s| s.name("dist-src"))
.run(move |_| check::distcheck(build));
dist::mingw(build, s.target)
}
});
+ rules.dist("dist-plain-source-tarball", "src")
+ .default(build.config.rust_dist_src)
+ .host(true)
+ .only_build(true)
+ .only_host_build(true)
+ .dep(move |s| tool_rust_installer(build, s))
+ .run(move |_| dist::plain_source_tarball(build));
rules.dist("dist-src", "src")
.default(true)
.host(true)
.dep(|s| s.name("tool-rls"))
.dep(move |s| tool_rust_installer(build, s))
.run(move |s| dist::rls(build, s.stage, s.target));
- rules.dist("install", "path/to/nowhere")
- .dep(|s| s.name("default:dist"))
- .run(move |s| install::Installer::new(build).install(s.stage, s.target));
rules.dist("dist-cargo", "cargo")
.host(true)
.only_host_build(true)
.dep(move |s| s.name("tool-build-manifest").target(&build.config.build).stage(0))
.run(move |_| dist::hash_and_sign(build));
+ rules.install("install-docs", "src/doc")
+ .default(build.config.docs)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-docs"))
+ .run(move |s| install::Installer::new(build).install_docs(s.stage, s.target));
+ rules.install("install-std", "src/libstd")
+ .default(true)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-std"))
+ .run(move |s| install::Installer::new(build).install_std(s.stage));
+ rules.install("install-cargo", "cargo")
+ .default(build.config.extended)
+ .host(true)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-cargo"))
+ .run(move |s| install::Installer::new(build).install_cargo(s.stage, s.target));
+ rules.install("install-rls", "rls")
+ .default(build.config.extended)
+ .host(true)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-rls"))
+ .run(move |s| install::Installer::new(build).install_rls(s.stage, s.target));
+ rules.install("install-analysis", "analysis")
+ .default(build.config.extended)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-analysis"))
+ .run(move |s| install::Installer::new(build).install_analysis(s.stage, s.target));
+ rules.install("install-src", "src")
+ .default(build.config.extended)
+ .host(true)
+ .only_build(true)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-src"))
+ .run(move |s| install::Installer::new(build).install_src(s.stage));
+ rules.install("install-rustc", "src/librustc")
+ .default(true)
+ .host(true)
+ .only_host_build(true)
+ .dep(|s| s.name("dist-rustc"))
+ .run(move |s| install::Installer::new(build).install_rustc(s.stage, s.target));
+
rules.verify();
return rules;
Bench,
Dist,
Doc,
+ Install,
}
impl<'a> Rule<'a> {
self.rule(name, path, Kind::Dist)
}
+ /// Same as `build`, but for `Kind::Install`.
+ fn install<'b>(&'b mut self, name: &'a str, path: &'a str)
+ -> RuleBuilder<'a, 'b> {
+ self.rule(name, path, Kind::Install)
+ }
+
fn rule<'b>(&'b mut self,
name: &'a str,
path: &'a str,
"test" => Kind::Test,
"bench" => Kind::Bench,
"dist" => Kind::Dist,
+ "install" => Kind::Install,
_ => return None,
};
let rules = self.rules.values().filter(|r| r.kind == kind);
Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]),
- Subcommand::Dist { ref paths, install } => {
- if install {
- return vec![self.sbuild.name("install")]
- } else {
- (Kind::Dist, &paths[..])
- }
- }
+ Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
+ Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
Subcommand::Clean => panic!(),
};
use config::Config;
use flags::Flags;
- macro_rules! a {
- ($($a:expr),*) => (vec![$($a.to_string()),*])
- }
-
fn build(args: &[&str],
extra_host: &[&str],
extra_target: &[&str]) -> Build {
libssl-dev \
pkg-config
-RUN curl -o /usr/local/bin/sccache \
- https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
- chmod +x /usr/local/bin/sccache
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
- dpkg -i dumb-init_*.deb && \
- rm dumb-init_*.deb
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
WORKDIR /tmp
COPY cross/build-arm-musl.sh /tmp/
RUN ./build-arm-musl.sh
-# originally from
-# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
-RUN mkdir /usr/local/mips-linux-musl
-RUN curl -L https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2 | \
- tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
-RUN for file in /usr/local/mips-linux-musl/bin/mips-openwrt-linux-*; do \
- ln -s $file /usr/local/bin/`basename $file`; \
- done
+COPY cross/install-mips-musl.sh /tmp/
+RUN ./install-mips-musl.sh
-# Note that this originally came from:
-# https://downloads.openwrt.org/snapshots/trunk/malta/generic/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
-RUN mkdir /usr/local/mipsel-linux-musl
-RUN curl -L https://s3.amazonaws.com/rust-lang-ci/libc/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \
- tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
-RUN for file in /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-*; do \
- ln -s $file /usr/local/bin/`basename $file`; \
- done
+COPY cross/install-mipsel-musl.sh /tmp/
+RUN ./install-mipsel-musl.sh
ENV TARGETS=asmjs-unknown-emscripten
ENV TARGETS=$TARGETS,wasm32-unknown-emscripten
--musl-root-armhf=/usr/local/arm-linux-musleabihf \
--musl-root-armv7=/usr/local/armv7-linux-musleabihf
ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+mkdir /usr/local/mips-linux-musl
+
+# originally from
+# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/
+# OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
+URL="https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror"
+FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2"
+curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
+
+for file in /usr/local/mips-linux-musl/bin/mips-openwrt-linux-*; do
+ ln -s $file /usr/local/bin/`basename $file`
+done
--- /dev/null
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+mkdir /usr/local/mipsel-linux-musl
+
+# Note that this originally came from:
+# https://downloads.openwrt.org/snapshots/trunk/malta/generic/
+# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
+URL="https://s3.amazonaws.com/rust-lang-ci/libc"
+FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2"
+curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
+
+for file in /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-*; do
+ ln -s $file /usr/local/bin/`basename $file`
+done
- [cfg_target_has_atomic](language-features/cfg-target-has-atomic.md)
- [cfg_target_thread_local](language-features/cfg-target-thread-local.md)
- [cfg_target_vendor](language-features/cfg-target-vendor.md)
- - [closure_to_fn_coercion](language-features/closure-to-fn-coercion.md)
- [compiler_builtins](language-features/compiler-builtins.md)
- [concat_idents](language-features/concat-idents.md)
- [conservative_impl_trait](language-features/conservative-impl-trait.md)
- [io](library-features/io.md)
- [ip](library-features/ip.md)
- [iter_rfind](library-features/iter-rfind.md)
+ - [iterator_step_by](library-features/iterator-step-by.md)
- [libstd_io_internals](library-features/libstd-io-internals.md)
- [libstd_sys_internals](library-features/libstd-sys-internals.md)
- [libstd_thread_internals](library-features/libstd-thread-internals.md)
+++ /dev/null
-# `closure_to_fn_coercion`
-
-The tracking issue for this feature is: [#39817]
-
-[#39817]: https://github.com/rust-lang/rust/issues/39817
-
-------------------------
/// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
/// pointers from children back to their parents.
///
+/// # Cloning references
+///
+/// Creating a new reference from an existing reference counted pointer is done using the
+/// `Clone` trait implemented for [`Arc<T>`][`arc`] and [`Weak<T>`][`weak`].
+///
+/// ```
+/// use std::sync::Arc;
+/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
+/// // The two syntaxes below are equivalent.
+/// let a = foo.clone();
+/// let b = Arc::clone(&foo);
+/// // a and b both point to the same memory location as foo.
+/// ```
+///
+/// The `Arc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+/// the meaning of the code. In the example above, this syntax makes it easier to see that
+/// this code is creating a new reference rather than copying the whole content of foo.
+///
/// ## `Deref` behavior
///
/// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
/// let five = Arc::new(5);
///
/// for _ in 0..10 {
-/// let five = five.clone();
+/// let five = Arc::clone(&five);
///
/// thread::spawn(move || {
/// println!("{:?}", five);
/// let val = Arc::new(AtomicUsize::new(5));
///
/// for _ in 0..10 {
-/// let val = val.clone();
+/// let val = Arc::clone(&val);
///
/// thread::spawn(move || {
/// let v = val.fetch_add(1, Ordering::SeqCst);
/// assert_eq!(Arc::try_unwrap(x), Ok(3));
///
/// let x = Arc::new(4);
- /// let _y = x.clone();
+ /// let _y = Arc::clone(&x);
/// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
/// ```
#[inline]
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
- /// let _also_five = five.clone();
+ /// let _also_five = Arc::clone(&five);
///
/// // This assertion is deterministic because we haven't shared
/// // the `Arc` between threads.
/// use std::sync::Arc;
///
/// let five = Arc::new(5);
- /// let same_five = five.clone();
+ /// let same_five = Arc::clone(&five);
/// let other_five = Arc::new(5);
///
/// assert!(Arc::ptr_eq(&five, &same_five));
///
/// let five = Arc::new(5);
///
- /// five.clone();
+ /// Arc::clone(&five);
/// ```
#[inline]
fn clone(&self) -> Arc<T> {
/// let mut data = Arc::new(5);
///
/// *Arc::make_mut(&mut data) += 1; // Won't clone anything
- /// let mut other_data = data.clone(); // Won't clone inner data
+ /// let mut other_data = Arc::clone(&data); // Won't clone inner data
/// *Arc::make_mut(&mut data) += 1; // Clones inner data
/// *Arc::make_mut(&mut data) += 1; // Won't clone anything
/// *Arc::make_mut(&mut other_data) *= 2; // Won't clone anything
/// *Arc::get_mut(&mut x).unwrap() = 4;
/// assert_eq!(*x, 4);
///
- /// let _y = x.clone();
+ /// let _y = Arc::clone(&x);
/// assert!(Arc::get_mut(&mut x).is_none());
/// ```
#[inline]
/// }
///
/// let foo = Arc::new(Foo);
- /// let foo2 = foo.clone();
+ /// let foo2 = Arc::clone(&foo);
///
/// drop(foo); // Doesn't print anything
/// drop(foo2); // Prints "dropped!"
/// # Examples
///
/// ```
- /// use std::sync::Arc;
+ /// use std::sync::{Arc, Weak};
///
/// let weak_five = Arc::downgrade(&Arc::new(5));
///
- /// weak_five.clone();
+ /// Weak::clone(&weak_five);
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
/// # Examples
///
/// ```
- /// use std::sync::Arc;
+ /// use std::sync::{Arc, Weak};
///
/// struct Foo;
///
///
/// let foo = Arc::new(Foo);
/// let weak_foo = Arc::downgrade(&foo);
- /// let other_weak_foo = weak_foo.clone();
+ /// let other_weak_foo = Weak::clone(&weak_foo);
///
/// drop(weak_foo); // Doesn't print anything
/// drop(foo); // Prints "dropped!"
//! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the value may have
//! already been destroyed.
//!
+//! # Cloning references
+//!
+//! Creating a new reference from an existing reference counted pointer is done using the
+//! `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
+//!
+//! ```
+//! use std::rc::Rc;
+//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
+//! // The two syntaxes below are equivalent.
+//! let a = foo.clone();
+//! let b = Rc::clone(&foo);
+//! // a and b both point to the same memory location as foo.
+//! ```
+//!
+//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+//! the meaning of the code. In the example above, this syntax makes it easier to see that
+//! this code is creating a new reference rather than copying the whole content of foo.
+//!
//! # Examples
//!
//! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
//! // the reference count in the process.
//! let gadget1 = Gadget {
//! id: 1,
-//! owner: gadget_owner.clone(),
+//! owner: Rc::clone(&gadget_owner),
//! };
//! let gadget2 = Gadget {
//! id: 2,
-//! owner: gadget_owner.clone(),
+//! owner: Rc::clone(&gadget_owner),
//! };
//!
//! // Dispose of our local variable `gadget_owner`.
//! let gadget1 = Rc::new(
//! Gadget {
//! id: 1,
-//! owner: gadget_owner.clone(),
+//! owner: Rc::clone(&gadget_owner),
//! }
//! );
//! let gadget2 = Rc::new(
//! Gadget {
//! id: 2,
-//! owner: gadget_owner.clone(),
+//! owner: Rc::clone(&gadget_owner),
//! }
//! );
//!
/// assert_eq!(Rc::try_unwrap(x), Ok(3));
///
/// let x = Rc::new(4);
- /// let _y = x.clone();
+ /// let _y = Rc::clone(&x);
/// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
/// ```
#[inline]
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
- /// let _also_five = five.clone();
+ /// let _also_five = Rc::clone(&five);
///
/// assert_eq!(2, Rc::strong_count(&five));
/// ```
/// *Rc::get_mut(&mut x).unwrap() = 4;
/// assert_eq!(*x, 4);
///
- /// let _y = x.clone();
+ /// let _y = Rc::clone(&x);
/// assert!(Rc::get_mut(&mut x).is_none());
/// ```
#[inline]
/// use std::rc::Rc;
///
/// let five = Rc::new(5);
- /// let same_five = five.clone();
+ /// let same_five = Rc::clone(&five);
/// let other_five = Rc::new(5);
///
/// assert!(Rc::ptr_eq(&five, &same_five));
/// let mut data = Rc::new(5);
///
/// *Rc::make_mut(&mut data) += 1; // Won't clone anything
- /// let mut other_data = data.clone(); // Won't clone inner data
+ /// let mut other_data = Rc::clone(&data); // Won't clone inner data
/// *Rc::make_mut(&mut data) += 1; // Clones inner data
/// *Rc::make_mut(&mut data) += 1; // Won't clone anything
/// *Rc::make_mut(&mut other_data) *= 2; // Won't clone anything
/// }
///
/// let foo = Rc::new(Foo);
- /// let foo2 = foo.clone();
+ /// let foo2 = Rc::clone(&foo);
///
/// drop(foo); // Doesn't print anything
/// drop(foo2); // Prints "dropped!"
///
/// let five = Rc::new(5);
///
- /// five.clone();
+ /// Rc::clone(&five);
/// ```
#[inline]
fn clone(&self) -> Rc<T> {
/// # Examples
///
/// ```
- /// use std::rc::Rc;
+ /// use std::rc::{Rc, Weak};
///
/// struct Foo;
///
///
/// let foo = Rc::new(Foo);
/// let weak_foo = Rc::downgrade(&foo);
- /// let other_weak_foo = weak_foo.clone();
+ /// let other_weak_foo = Weak::clone(&weak_foo);
///
/// drop(weak_foo); // Doesn't print anything
/// drop(foo); // Prints "dropped!"
/// # Examples
///
/// ```
- /// use std::rc::Rc;
+ /// use std::rc::{Rc, Weak};
///
/// let weak_five = Rc::downgrade(&Rc::new(5));
///
- /// weak_five.clone();
+ /// Weak::clone(&weak_five);
/// ```
#[inline]
fn clone(&self) -> Weak<T> {
//! // instead of a max-heap.
//! impl Ord for State {
//! fn cmp(&self, other: &State) -> Ordering {
-//! // Notice that the we flip the ordering here
+//! // Notice that the we flip the ordering on costs.
+//! // In case of a tie we compare positions - this step is necessary
+//! // to make implementations of `PartialEq` and `Ord` consistent.
//! other.cost.cmp(&self.cost)
+//! .then_with(|| self.position.cmp(&other.position))
//! }
//! }
//!
/// the rule that `eq` is a strict inverse of `ne`; that is, `!(a == b)` if and
/// only if `a != b`.
///
+/// Implementations of `PartialEq`, `PartialOrd`, and `Ord` *must* agree with
+/// each other. It's easy to accidentally make them disagree by deriving some
+/// of the traits and manually implementing others.
+///
/// An example implementation for a domain in which two books are considered
/// the same book if their ISBN matches, even if the formats differ:
///
/// Then you must define an implementation for `cmp()`. You may find it useful to use
/// `cmp()` on your type's fields.
///
+/// Implementations of `PartialEq`, `PartialOrd`, and `Ord` *must* agree with each other. It's
+/// easy to accidentally make them disagree by deriving some of the traits and manually
+/// implementing others.
+///
/// Here's an example where you want to sort people by height only, disregarding `id`
/// and `name`:
///
///
/// ## How can I implement `PartialOrd`?
///
-/// PartialOrd only requires implementation of the `partial_cmp` method, with the others generated
-/// from default implementations.
+/// `PartialOrd` only requires implementation of the `partial_cmp` method, with the others
+/// generated from default implementations.
///
/// However it remains possible to implement the others separately for types which do not have a
/// total order. For example, for floating point numbers, `NaN < 0 == false` and `NaN >= 0 ==
///
/// `PartialOrd` requires your type to be `PartialEq`.
///
+/// Implementations of `PartialEq`, `PartialOrd`, and `Ord` *must* agree with each other. It's
+/// easy to accidentally make them disagree by deriving some of the traits and manually
+/// implementing others.
+///
/// If your type is `Ord`, you can implement `partial_cmp()` by using `cmp()`:
///
/// ```
#[unstable(feature = "fused", issue = "35602")]
impl<I> FusedIterator for Cycle<I> where I: Clone + Iterator {}
-/// An iterator that steps by n elements every iteration.
+/// An adapter for stepping iterators by a custom amount.
///
/// This `struct` is created by the [`step_by`] method on [`Iterator`]. See
/// its documentation for more.
self.iter.nth(self.step)
}
}
+
+ #[inline]
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ let inner_hint = self.iter.size_hint();
+
+ if self.first_take {
+ let f = |n| if n == 0 { 0 } else { 1 + (n-1)/(self.step+1) };
+ (f(inner_hint.0), inner_hint.1.map(f))
+ } else {
+ let f = |n| n / (self.step+1);
+ (f(inner_hint.0), inner_hint.1.map(f))
+ }
+ }
}
+// StepBy can only make the iterator shorter, so the len will still fit.
+#[unstable(feature = "iterator_step_by",
+ reason = "unstable replacement of Range::step_by",
+ issue = "27741")]
+impl<I> ExactSizeIterator for StepBy<I> where I: ExactSizeIterator {}
+
/// An iterator that strings two iterators together.
///
/// This `struct` is created by the [`chain`] method on [`Iterator`]. See its
it.next();
}
+#[test]
+fn test_iterator_step_by_size_hint() {
+ struct StubSizeHint(usize, Option<usize>);
+ impl Iterator for StubSizeHint {
+ type Item = ();
+ fn next(&mut self) -> Option<()> {
+ self.0 -= 1;
+ if let Some(ref mut upper) = self.1 {
+ *upper -= 1;
+ }
+ Some(())
+ }
+ fn size_hint(&self) -> (usize, Option<usize>) {
+ (self.0, self.1)
+ }
+ }
+
+ // The two checks in each case are needed because the logic
+ // is different before the first call to `next()`.
+
+ let mut it = StubSizeHint(10, Some(10)).step_by(1);
+ assert_eq!(it.size_hint(), (10, Some(10)));
+ it.next();
+ assert_eq!(it.size_hint(), (9, Some(9)));
+
+ // exact multiple
+ let mut it = StubSizeHint(10, Some(10)).step_by(3);
+ assert_eq!(it.size_hint(), (4, Some(4)));
+ it.next();
+ assert_eq!(it.size_hint(), (3, Some(3)));
+
+ // larger base range, but not enough to get another element
+ let mut it = StubSizeHint(12, Some(12)).step_by(3);
+ assert_eq!(it.size_hint(), (4, Some(4)));
+ it.next();
+ assert_eq!(it.size_hint(), (3, Some(3)));
+
+ // smaller base range, so fewer resulting elements
+ let mut it = StubSizeHint(9, Some(9)).step_by(3);
+ assert_eq!(it.size_hint(), (3, Some(3)));
+ it.next();
+ assert_eq!(it.size_hint(), (2, Some(2)));
+
+ // infinite upper bound
+ let mut it = StubSizeHint(usize::MAX, None).step_by(1);
+ assert_eq!(it.size_hint(), (usize::MAX, None));
+ it.next();
+ assert_eq!(it.size_hint(), (usize::MAX-1, None));
+
+ // still infinite with larger step
+ let mut it = StubSizeHint(7, None).step_by(3);
+ assert_eq!(it.size_hint(), (3, None));
+ it.next();
+ assert_eq!(it.size_hint(), (2, None));
+
+ // propagates ExactSizeIterator
+ let a = [1,2,3,4,5];
+ let it = a.iter().step_by(2);
+ assert_eq!(it.len(), 3);
+
+ // Cannot be TrustedLen as a step greater than one makes an iterator
+ // with (usize::MAX, None) no longer meet the safety requirements
+ trait TrustedLenCheck { fn test(self) -> bool; }
+ impl<T:Iterator> TrustedLenCheck for T {
+ default fn test(self) -> bool { false }
+ }
+ impl<T:TrustedLen> TrustedLenCheck for T {
+ fn test(self) -> bool { true }
+ }
+ assert!(TrustedLenCheck::test(a.iter()));
+ assert!(!TrustedLenCheck::test(a.iter().step_by(1)));
+}
+
#[test]
fn test_filter_map() {
let it = (0..).step_by(1).take(10)
#![feature(slice_patterns)]
#![feature(sort_internals)]
#![feature(sort_unstable)]
+#![feature(specialization)]
#![feature(step_by)]
#![feature(step_trait)]
#![feature(test)]
+#![feature(trusted_len)]
#![feature(try_from)]
#![feature(unicode)]
#![feature(unique)]
IsMirAvailable(D),
ItemAttrs(D),
FnArgNames(D),
- FileMap(D, Arc<String>),
}
impl<D: Clone + Debug> DepNode<D> {
ConstIsRvaluePromotableToStatic(ref d) => op(d).map(ConstIsRvaluePromotableToStatic),
IsMirAvailable(ref d) => op(d).map(IsMirAvailable),
GlobalMetaData(ref d, kind) => op(d).map(|d| GlobalMetaData(d, kind)),
- FileMap(ref d, ref file_name) => op(d).map(|d| FileMap(d, file_name.clone())),
}
}
}
use hir::def_id::DefId;
use rustc_data_structures::fx::FxHashMap;
use std::cell::RefCell;
-use std::collections::hash_map::Entry;
use std::ops::Index;
use std::hash::Hash;
use std::marker::PhantomData;
self.graph.read(dep_node);
}
- /// Registers a (synthetic) write to the key `k`. Usually this is
- /// invoked automatically by `insert`.
- fn write(&self, k: &M::Key) {
- let dep_node = M::to_dep_node(k);
- self.graph.write(dep_node);
- }
-
pub fn get(&self, k: &M::Key) -> Option<&M::Value> {
self.read(k);
self.map.get(k)
}
- pub fn insert(&mut self, k: M::Key, v: M::Value) {
- self.write(&k);
- let old_value = self.map.insert(k, v);
- assert!(old_value.is_none());
- }
-
- pub fn entry(&mut self, k: M::Key) -> Entry<M::Key, M::Value> {
- self.write(&k);
- self.map.entry(k)
- }
-
pub fn contains_key(&self, k: &M::Key) -> bool {
self.read(k);
self.map.contains_key(k)
}
}
- pub fn write(&self, v: DepNode<DefId>) {
- if self.data.thread.is_enqueue_enabled() {
- self.data.thread.enqueue(DepMessage::Write(v));
- }
- }
-
/// Indicates that a previous work product exists for `v`. This is
/// invoked during initial start-up based on what nodes are clean
/// (and what files exist in the incr. directory).
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use dep_graph::{DepGraph, DepNode};
-use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
-use rustc_data_structures::bitvec::BitVector;
use std::rc::Rc;
-use std::sync::Arc;
use syntax::codemap::CodeMap;
use syntax_pos::{BytePos, FileMap};
use ty::TyCtxt;
codemap: &'tcx CodeMap,
line_cache: [CacheEntry; 3],
time_stamp: usize,
- dep_graph: DepGraph,
- dep_tracking_reads: BitVector,
}
impl<'tcx> CachingCodemapView<'tcx> {
pub fn new<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CachingCodemapView<'tcx> {
let codemap = tcx.sess.codemap();
- let files = codemap.files_untracked();
+ let files = codemap.files();
let first_file = files[0].clone();
let entry = CacheEntry {
time_stamp: 0,
};
CachingCodemapView {
- dep_graph: tcx.dep_graph.clone(),
codemap: codemap,
line_cache: [entry.clone(), entry.clone(), entry.clone()],
time_stamp: 0,
- dep_tracking_reads: BitVector::new(files.len()),
}
}
for cache_entry in self.line_cache.iter_mut() {
if pos >= cache_entry.line_start && pos < cache_entry.line_end {
cache_entry.time_stamp = self.time_stamp;
- if self.dep_tracking_reads.insert(cache_entry.file_index) {
- self.dep_graph.read(dep_node(cache_entry));
- }
return Some((cache_entry.file.clone(),
cache_entry.line_number,
// If the entry doesn't point to the correct file, fix it up
if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos {
let file_valid;
- let files = self.codemap.files_untracked();
+ let files = self.codemap.files();
if files.len() > 0 {
let file_index = self.codemap.lookup_filemap_idx(pos);
cache_entry.line_end = line_bounds.1;
cache_entry.time_stamp = self.time_stamp;
- if self.dep_tracking_reads.insert(cache_entry.file_index) {
- self.dep_graph.read(dep_node(cache_entry));
- }
-
return Some((cache_entry.file.clone(),
cache_entry.line_number,
pos - cache_entry.line_start));
}
}
-
-fn dep_node(cache_entry: &CacheEntry) -> DepNode<DefId> {
- let def_id = DefId {
- krate: CrateNum::from_u32(cache_entry.file.crate_of_origin),
- index: CRATE_DEF_INDEX,
- };
- let name = Arc::new(cache_entry.file.name.clone());
- DepNode::FileMap(def_id, name)
-}
}
}
+ pub fn force_span_hashing(mut self) -> Self {
+ self.hash_spans = true;
+ self
+ }
+
#[inline]
pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self,
hash_bodies: bool,
impl_stable_hash_for!(enum mir::Mutability { Mut, Not });
impl_stable_hash_for!(enum mir::BorrowKind { Shared, Unique, Mut });
impl_stable_hash_for!(enum mir::LocalKind { Var, Temp, Arg, ReturnPointer });
-impl_stable_hash_for!(struct mir::LocalDecl<'tcx> { mutability, ty, name, source_info,
-is_user_variable});
+impl_stable_hash_for!(struct mir::LocalDecl<'tcx> {
+ mutability,
+ ty,
+ name,
+ source_info,
+ is_user_variable
+});
impl_stable_hash_for!(struct mir::UpvarDecl { debug_name, by_ref });
impl_stable_hash_for!(struct mir::BasicBlockData<'tcx> { statements, terminator, is_cleanup });
-impl_stable_hash_for!(struct mir::Terminator<'tcx> { source_info, kind });
+
+impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Terminator<'tcx> {
+ #[inline]
+ fn hash_stable<W: StableHasherResult>(&self,
+ hcx: &mut StableHashingContext<'a, 'tcx>,
+ hasher: &mut StableHasher<W>) {
+ let mir::Terminator {
+ ref kind,
+ ref source_info,
+ } = *self;
+
+ let hash_spans_unconditionally = match *kind {
+ mir::TerminatorKind::Assert { .. } => {
+ // Assert terminators generate a panic message that contains the
+ // source location, so we always have to feed its span into the
+ // ICH.
+ true
+ }
+ mir::TerminatorKind::Goto { .. } |
+ mir::TerminatorKind::SwitchInt { .. } |
+ mir::TerminatorKind::Resume |
+ mir::TerminatorKind::Return |
+ mir::TerminatorKind::Unreachable |
+ mir::TerminatorKind::Drop { .. } |
+ mir::TerminatorKind::DropAndReplace { .. } |
+ mir::TerminatorKind::Call { .. } => false,
+ };
+
+ if hash_spans_unconditionally {
+ hcx.while_hashing_spans(true, |hcx| {
+ source_info.hash_stable(hcx, hasher);
+ })
+ } else {
+ source_info.hash_stable(hcx, hasher);
+ }
+
+ kind.hash_stable(hcx, hasher);
+ }
+}
+
impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Local {
#[inline]
mir::Rvalue::Discriminant(ref lvalue) => {
lvalue.hash_stable(hcx, hasher);
}
- mir::Rvalue::Box(ty) => {
+ mir::Rvalue::NullaryOp(op, ty) => {
+ op.hash_stable(hcx, hasher);
ty.hash_stable(hcx, hasher);
}
mir::Rvalue::Aggregate(ref kind, ref operands) => {
Le,
Ne,
Ge,
- Gt
+ Gt,
+ Offset
});
impl_stable_hash_for!(enum mir::UnOp {
Neg
});
+impl_stable_hash_for!(enum mir::NullOp {
+ Box,
+ SizeOf
+});
impl_stable_hash_for!(struct mir::Constant<'tcx> { span, ty, literal });
#![cfg_attr(stage0, feature(staged_api))]
#![cfg_attr(stage0, feature(loop_break_value))]
-#![recursion_limit="128"]
+#![recursion_limit="192"]
extern crate arena;
extern crate core;
"detects missing fragment specifiers in unused `macro_rules!` patterns"
}
+declare_lint! {
+ pub PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
+ Warn,
+ "detects parenthesized generic parameters in type and module names"
+}
+
declare_lint! {
pub DEPRECATED,
Warn,
LEGACY_IMPORTS,
LEGACY_CONSTRUCTOR_VISIBILITY,
MISSING_FRAGMENT_SPECIFIER,
+ PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
DEPRECATED
)
}
// flags
fn is_const_fn(&self, did: DefId) -> bool;
- fn is_default_impl(&self, impl_did: DefId) -> bool;
fn is_dllimport_foreign_item(&self, def: DefId) -> bool;
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool;
// flags
fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
- fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") }
fn is_dllimport_foreign_item(&self, id: DefId) -> bool { false }
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool { false }
BinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>),
CheckedBinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>),
+ NullaryOp(NullOp, Ty<'tcx>),
UnaryOp(UnOp, Operand<'tcx>),
/// Read the discriminant of an ADT.
/// be defined to return, say, a 0) if ADT is not an enum.
Discriminant(Lvalue<'tcx>),
- /// Creates an *uninitialized* Box
- Box(Ty<'tcx>),
-
/// Create an aggregate value, like a tuple or struct. This is
/// only needed because we want to distinguish `dest = Foo { x:
/// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
Ge,
/// The `>` operator (greater than)
Gt,
+ /// The `ptr.offset` operator
+ Offset,
}
impl BinOp {
}
}
+#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
+pub enum NullOp {
+ /// Return the size of a value of that type
+ SizeOf,
+ /// Create a new uninitialized box for a value of that type
+ Box,
+}
+
#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
pub enum UnOp {
/// The `!` operator for logical inversion
}
UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a),
Discriminant(ref lval) => write!(fmt, "discriminant({:?})", lval),
- Box(ref t) => write!(fmt, "Box({:?})", t),
+ NullaryOp(ref op, ref t) => write!(fmt, "{:?}({:?})", op, t),
Ref(_, borrow_kind, ref lv) => {
let kind_str = match borrow_kind {
BorrowKind::Shared => "",
CheckedBinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
UnaryOp(op, ref val) => UnaryOp(op, val.fold_with(folder)),
Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
- Box(ty) => Box(ty.fold_with(folder)),
+ NullaryOp(op, ty) => NullaryOp(op, ty.fold_with(folder)),
Aggregate(ref kind, ref fields) => {
let kind = box match **kind {
AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
rhs.visit_with(visitor) || lhs.visit_with(visitor),
UnaryOp(_, ref val) => val.visit_with(visitor),
Discriminant(ref lval) => lval.visit_with(visitor),
- Box(ty) => ty.visit_with(visitor),
+ NullaryOp(_, ty) => ty.visit_with(visitor),
Aggregate(ref kind, ref fields) => {
(match **kind {
AggregateKind::Array(ty) => ty.visit_with(visitor),
let ty = op.ty(tcx, lhs_ty, rhs_ty);
tcx.intern_tup(&[ty, tcx.types.bool], false)
}
- Rvalue::UnaryOp(_, ref operand) => {
+ Rvalue::UnaryOp(UnOp::Not, ref operand) |
+ Rvalue::UnaryOp(UnOp::Neg, ref operand) => {
operand.ty(mir, tcx)
}
Rvalue::Discriminant(ref lval) => {
bug!("Rvalue::Discriminant on Lvalue of type {:?}", ty);
}
}
- Rvalue::Box(t) => {
- tcx.mk_box(t)
- }
+ Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),
+ Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,
Rvalue::Aggregate(ref ak, ref ops) => {
match **ak {
AggregateKind::Array(ty) => {
assert_eq!(lhs_ty, rhs_ty);
lhs_ty
}
- &BinOp::Shl | &BinOp::Shr => {
+ &BinOp::Shl | &BinOp::Shr | &BinOp::Offset => {
lhs_ty // lhs_ty can be != rhs_ty
}
&BinOp::Eq | &BinOp::Lt | &BinOp::Le |
BinOp::Lt => hir::BinOp_::BiLt,
BinOp::Gt => hir::BinOp_::BiGt,
BinOp::Le => hir::BinOp_::BiLe,
- BinOp::Ge => hir::BinOp_::BiGe
+ BinOp::Ge => hir::BinOp_::BiGe,
+ BinOp::Offset => unreachable!()
}
}
}
self.visit_lvalue(lvalue, LvalueContext::Inspect, location);
}
- Rvalue::Box(ref $($mutability)* ty) => {
+ Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
self.visit_ty(ty);
}
pub use self::code_stats::{CodeStats, DataTypeKind, FieldInfo};
pub use self::code_stats::{SizeKind, TypeSizeInfo, VariantInfo};
-use dep_graph::{DepGraph, DepNode};
-use hir::def_id::{DefId, CrateNum, DefIndex, CRATE_DEF_INDEX};
+use dep_graph::DepGraph;
+use hir::def_id::{CrateNum, DefIndex};
+
use lint;
use middle::cstore::CrateStore;
use middle::dependency_format;
use syntax::symbol::Symbol;
use syntax::{ast, codemap};
use syntax::feature_gate::AttributeType;
-use syntax_pos::{Span, MultiSpan, FileMap};
+use syntax_pos::{Span, MultiSpan};
use rustc_back::{LinkerFlavor, PanicStrategy};
use rustc_back::target::Target;
use std::rc::Rc;
use std::fmt;
use std::time::Duration;
-use std::sync::Arc;
mod code_stats;
pub mod config;
};
let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
- // Hook up the codemap with a callback that allows it to register FileMap
- // accesses with the dependency graph.
- let cm_depgraph = dep_graph.clone();
- let codemap_dep_tracking_callback = Box::new(move |filemap: &FileMap| {
- let def_id = DefId {
- krate: CrateNum::from_u32(filemap.crate_of_origin),
- index: CRATE_DEF_INDEX,
- };
- let name = Arc::new(filemap.name.clone());
- let dep_node = DepNode::FileMap(def_id, name);
-
- cm_depgraph.read(dep_node);
- });
- codemap.set_dep_tracking_callback(codemap_dep_tracking_callback);
-
let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap);
let default_sysroot = match sopts.maybe_sysroot {
Some(_) => None,
use ty::inhabitedness::DefIdForest;
use ty::maps;
use ty::steal::Steal;
-use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
+use util::nodemap::{NodeMap, NodeSet, DefIdSet};
use util::nodemap::{FxHashMap, FxHashSet};
use rustc_data_structures::accumulate_vec::AccumulateVec;
/// Maps Expr NodeId's to `true` iff `&expr` can have 'static lifetime.
pub rvalue_promotable_to_static: RefCell<NodeMap<bool>>,
- /// Maps Fn items to a collection of fragment infos.
- ///
- /// The main goal is to identify data (each of which may be moved
- /// or assigned) whose subparts are not moved nor assigned
- /// (i.e. their state is *unfragmented*) and corresponding ast
- /// nodes where the path to that data is moved or assigned.
- ///
- /// In the long term, unfragmented values will have their
- /// destructor entirely driven by a single stack-local drop-flag,
- /// and their parents, the collections of the unfragmented values
- /// (or more simply, "fragmented values"), are mapped to the
- /// corresponding collections of stack-local drop-flags.
- ///
- /// (However, in the short term that is not the case; e.g. some
- /// unfragmented paths still need to be zeroed, namely when they
- /// reference parent data from an outer scope that was not
- /// entirely moved, and therefore that needs to be zeroed so that
- /// we do not get double-drop when we hit the end of the parent
- /// scope.)
- ///
- /// Also: currently the table solely holds keys for node-ids of
- /// unfragmented values (see `FragmentInfo` enum definition), but
- /// longer-term we will need to also store mappings from
- /// fragmented data to the set of unfragmented pieces that
- /// constitute it.
- pub fragment_infos: RefCell<DefIdMap<Vec<ty::FragmentInfo>>>,
-
/// The definite name of the current crate after taking into account
/// attributes, commandline parameters, etc.
pub crate_name: Symbol,
export_map: resolutions.export_map,
fulfilled_predicates: RefCell::new(fulfilled_predicates),
hir: hir,
- maps: maps::Maps::new(dep_graph, providers),
+ maps: maps::Maps::new(providers),
mir_passes,
freevars: RefCell::new(resolutions.freevars),
maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports,
selection_cache: traits::SelectionCache::new(),
evaluation_cache: traits::EvaluationCache::new(),
rvalue_promotable_to_static: RefCell::new(NodeMap()),
- fragment_infos: RefCell::new(DefIdMap()),
crate_name: Symbol::intern(crate_name),
data_layout: data_layout,
layout_cache: RefCell::new(FxHashMap()),
// Always use types for non-local impls, where types are always
// available, and filename/line-number is mostly uninteresting.
- let use_types = !impl_def_id.is_local() || {
+ let use_types = !self.is_default_impl(impl_def_id) && (!impl_def_id.is_local() || {
// Otherwise, use filename/line-number if forced.
let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
!force_no_types && {
ty::queries::impl_trait_ref::try_get(self, DUMMY_SP, impl_def_id).is_ok() &&
ty::queries::type_of::try_get(self, DUMMY_SP, impl_def_id).is_ok()
}
- };
+ });
if !use_types {
return self.push_impl_path_fallback(buffer, impl_def_id);
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
+use dep_graph::{DepNode, DepTrackingMapConfig};
use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE};
use hir::def::Def;
use hir;
use util::nodemap::{DefIdSet, NodeSet};
use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::fx::FxHashMap;
use std::cell::{RefCell, RefMut};
use std::fmt::Debug;
use std::hash::Hash;
+use std::marker::PhantomData;
use std::mem;
use std::collections::BTreeMap;
use std::ops::Deref;
}
}
+struct QueryMap<D: QueryDescription> {
+ phantom: PhantomData<D>,
+ map: FxHashMap<D::Key, D::Value>,
+}
+
+impl<M: QueryDescription> QueryMap<M> {
+ fn new() -> QueryMap<M> {
+ QueryMap {
+ phantom: PhantomData,
+ map: FxHashMap(),
+ }
+ }
+}
+
pub struct CycleError<'a, 'tcx: 'a> {
span: Span,
cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
}
impl<$tcx> Maps<$tcx> {
- pub fn new(dep_graph: DepGraph,
- providers: IndexVec<CrateNum, Providers<$tcx>>)
+ pub fn new(providers: IndexVec<CrateNum, Providers<$tcx>>)
-> Self {
Maps {
providers,
query_stack: RefCell::new(vec![]),
- $($name: RefCell::new(DepTrackingMap::new(dep_graph.clone()))),*
+ $($name: RefCell::new(QueryMap::new())),*
}
}
}
key,
span);
- if let Some(result) = tcx.maps.$name.borrow().get(&key) {
+ if let Some(result) = tcx.maps.$name.borrow().map.get(&key) {
return Ok(f(result));
}
provider(tcx.global_tcx(), key)
})?;
- Ok(f(tcx.maps.$name.borrow_mut().entry(key).or_insert(result)))
+ Ok(f(tcx.maps.$name.borrow_mut().map.entry(key).or_insert(result)))
}
pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
-> Result<$V, CycleError<'a, $tcx>> {
+ // We register the `read` here, but not in `force`, since
+ // `force` does not give access to the value produced (and thus
+ // we actually don't read it).
+ tcx.dep_graph.read(Self::to_dep_node(&key));
Self::try_get_with(tcx, span, key, Clone::clone)
}
pub fn force(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) {
- // FIXME(eddyb) Move away from using `DepTrackingMap`
- // so we don't have to explicitly ignore a false edge:
- // we can't observe a value dependency, only side-effects,
- // through `force`, and once everything has been updated,
- // perhaps only diagnostics, if those, will remain.
- let _ignore = tcx.dep_graph.in_ignore();
match Self::try_get_with(tcx, span, key, |_| ()) {
Ok(()) => {}
Err(e) => tcx.report_cycle(e)
tcx: $tcx,
input: $input,
output: ($($output)*
- $(#[$attr])* $($pub)* $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>,)
+ $(#[$attr])* $($pub)* $name: RefCell<QueryMap<queries::$name<$tcx>>>,)
}
};
/// True if this is a foreign item (i.e., linked via `extern { ... }`).
[] is_foreign_item: IsForeignItem(DefId) -> bool,
+ /// True if this is a default impl (aka impl Foo for ..)
+ [] is_default_impl: ItemSignature(DefId) -> bool,
+
/// Get a map with the variance of every item; use `item_variance`
/// instead.
[] crate_variances: crate_variances(CrateNum) -> Rc<ty::CrateVariancesMap>,
pub pos: usize,
}
-/// Describes the fragment-state associated with a NodeId.
-///
-/// Currently only unfragmented paths have entries in the table,
-/// but longer-term this enum is expected to expand to also
-/// include data for fragmented paths.
-#[derive(Copy, Clone, Debug)]
-pub enum FragmentInfo {
- Moved { var: NodeId, move_expr: NodeId },
- Assigned { var: NodeId, assign_expr: NodeId, assignee_id: NodeId },
-}
-
// Flags that we track on types. These flags are propagated upwards
// through the type during type construction, so that we can quickly
// check whether the type has various kinds of types in it without
/// if not a structure at all. Corresponds to the only possible unsized
/// field, and its type can be used to determine unsizing strategy.
pub fn struct_tail(self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
- while let TyAdt(def, substs) = ty.sty {
- if !def.is_struct() {
- break;
- }
- match def.struct_variant().fields.last() {
- Some(f) => ty = f.ty(self, substs),
- None => break,
+ loop {
+ match ty.sty {
+ ty::TyAdt(def, substs) => {
+ if !def.is_struct() {
+ break;
+ }
+ match def.struct_variant().fields.last() {
+ Some(f) => ty = f.ty(self, substs),
+ None => break,
+ }
+ }
+
+ ty::TyTuple(tys, _) => {
+ if let Some((&last_ty, _)) = tys.split_last() {
+ ty = last_ty;
+ } else {
+ break;
+ }
+ }
+
+ _ => {
+ break;
+ }
}
}
ty
bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id)
})
}
+
+ pub fn const_usize(&self, val: u16) -> ConstInt {
+ match self.sess.target.uint_type {
+ ast::UintTy::U16 => ConstInt::Usize(ConstUsize::Us16(val as u16)),
+ ast::UintTy::U32 => ConstInt::Usize(ConstUsize::Us32(val as u32)),
+ ast::UintTy::U64 => ConstInt::Usize(ConstUsize::Us64(val as u64)),
+ _ => bug!(),
+ }
+ }
}
pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, W> {
+++ /dev/null
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Helper routines used for fragmenting structural paths due to moves for
-//! tracking drop obligations. Please see the extensive comments in the
-//! section "Structural fragments" in `README.md`.
-
-use self::Fragment::*;
-
-use borrowck::InteriorKind::{InteriorField, InteriorElement};
-use borrowck::{self, LoanPath};
-use borrowck::LoanPathKind::{LpVar, LpUpvar, LpDowncast, LpExtend};
-use borrowck::LoanPathElem::{LpDeref, LpInterior};
-use borrowck::move_data::InvalidMovePathIndex;
-use borrowck::move_data::{MoveData, MovePathIndex};
-use rustc::hir::def_id::{DefId};
-use rustc::ty::{self, AdtKind, TyCtxt};
-use rustc::middle::mem_categorization as mc;
-
-use std::mem;
-use std::rc::Rc;
-use syntax::ast;
-use syntax_pos::DUMMY_SP;
-
-#[derive(PartialEq, Eq, PartialOrd, Ord)]
-enum Fragment {
- // This represents the path described by the move path index
- Just(MovePathIndex),
-
- // This represents the collection of all but one of the elements
- // from an array at the path described by the move path index.
- // Note that attached MovePathIndex should have mem_categorization
- // of InteriorElement (i.e. array dereference `&foo[..]`).
- AllButOneFrom(MovePathIndex),
-}
-
-impl Fragment {
- fn loan_path_repr(&self, move_data: &MoveData) -> String {
- let lp = |mpi| move_data.path_loan_path(mpi);
- match *self {
- Just(mpi) => format!("{:?}", lp(mpi)),
- AllButOneFrom(mpi) => format!("$(allbutone {:?})", lp(mpi)),
- }
- }
-
- fn loan_path_user_string(&self, move_data: &MoveData) -> String {
- let lp = |mpi| move_data.path_loan_path(mpi);
- match *self {
- Just(mpi) => lp(mpi).to_string(),
- AllButOneFrom(mpi) => format!("$(allbutone {})", lp(mpi)),
- }
- }
-}
-
-pub fn build_unfragmented_map(this: &mut borrowck::BorrowckCtxt,
- move_data: &MoveData,
- id: ast::NodeId) {
- let fr = &move_data.fragments.borrow();
-
- // For now, don't care about other kinds of fragments; the precise
- // classfication of all paths for non-zeroing *drop* needs them,
- // but the loose approximation used by non-zeroing moves does not.
- let moved_leaf_paths = fr.moved_leaf_paths();
- let assigned_leaf_paths = fr.assigned_leaf_paths();
-
- let mut fragment_infos = Vec::with_capacity(moved_leaf_paths.len());
-
- let find_var_id = |move_path_index: MovePathIndex| -> Option<ast::NodeId> {
- let lp = move_data.path_loan_path(move_path_index);
- match lp.kind {
- LpVar(var_id) => Some(var_id),
- LpUpvar(ty::UpvarId { var_id, closure_expr_id }) => {
- // The `var_id` is unique *relative to* the current function.
- // (Check that we are indeed talking about the same function.)
- assert_eq!(id, closure_expr_id);
- Some(var_id)
- }
- LpDowncast(..) | LpExtend(..) => {
- // This simple implementation of non-zeroing move does
- // not attempt to deal with tracking substructure
- // accurately in the general case.
- None
- }
- }
- };
-
- let moves = move_data.moves.borrow();
- for &move_path_index in moved_leaf_paths {
- let var_id = match find_var_id(move_path_index) {
- None => continue,
- Some(var_id) => var_id,
- };
-
- move_data.each_applicable_move(move_path_index, |move_index| {
- let info = ty::FragmentInfo::Moved {
- var: var_id,
- move_expr: moves[move_index.get()].id,
- };
- debug!("fragment_infos push({:?} \
- due to move_path_index: {} move_index: {}",
- info, move_path_index.get(), move_index.get());
- fragment_infos.push(info);
- true
- });
- }
-
- for &move_path_index in assigned_leaf_paths {
- let var_id = match find_var_id(move_path_index) {
- None => continue,
- Some(var_id) => var_id,
- };
-
- let var_assigns = move_data.var_assignments.borrow();
- for var_assign in var_assigns.iter()
- .filter(|&assign| assign.path == move_path_index)
- {
- let info = ty::FragmentInfo::Assigned {
- var: var_id,
- assign_expr: var_assign.id,
- assignee_id: var_assign.assignee_id,
- };
- debug!("fragment_infos push({:?} due to var_assignment", info);
- fragment_infos.push(info);
- }
- }
-
- let mut fraginfo_map = this.tcx.fragment_infos.borrow_mut();
- let fn_did = this.tcx.hir.local_def_id(id);
- let prev = fraginfo_map.insert(fn_did, fragment_infos);
- assert!(prev.is_none());
-}
-
-pub struct FragmentSets {
- /// During move_data construction, `moved_leaf_paths` tracks paths
- /// that have been used directly by being moved out of. When
- /// move_data construction has been completed, `moved_leaf_paths`
- /// tracks such paths that are *leaf fragments* (e.g. `a.j` if we
- /// never move out any child like `a.j.x`); any parent paths
- /// (e.g. `a` for the `a.j` example) are moved over to
- /// `parents_of_fragments`.
- moved_leaf_paths: Vec<MovePathIndex>,
-
- /// `assigned_leaf_paths` tracks paths that have been used
- /// directly by being overwritten, but is otherwise much like
- /// `moved_leaf_paths`.
- assigned_leaf_paths: Vec<MovePathIndex>,
-
- /// `parents_of_fragments` tracks paths that are definitely
- /// parents of paths that have been moved.
- ///
- /// FIXME(pnkfelix) probably do not want/need
- /// `parents_of_fragments` at all, if we can avoid it.
- ///
- /// Update: I do not see a way to avoid it. Maybe just remove
- /// above fixme, or at least document why doing this may be hard.
- parents_of_fragments: Vec<MovePathIndex>,
-
- /// During move_data construction (specifically the
- /// fixup_fragment_sets call), `unmoved_fragments` tracks paths
- /// that have been "left behind" after a sibling has been moved or
- /// assigned. When move_data construction has been completed,
- /// `unmoved_fragments` tracks paths that were *only* results of
- /// being left-behind, and never directly moved themselves.
- unmoved_fragments: Vec<Fragment>,
-}
-
-impl FragmentSets {
- pub fn new() -> FragmentSets {
- FragmentSets {
- unmoved_fragments: Vec::new(),
- moved_leaf_paths: Vec::new(),
- assigned_leaf_paths: Vec::new(),
- parents_of_fragments: Vec::new(),
- }
- }
-
- pub fn moved_leaf_paths(&self) -> &[MovePathIndex] {
- &self.moved_leaf_paths
- }
-
- pub fn assigned_leaf_paths(&self) -> &[MovePathIndex] {
- &self.assigned_leaf_paths
- }
-
- pub fn add_move(&mut self, path_index: MovePathIndex) {
- self.moved_leaf_paths.push(path_index);
- }
-
- pub fn add_assignment(&mut self, path_index: MovePathIndex) {
- self.assigned_leaf_paths.push(path_index);
- }
-}
-
-pub fn instrument_move_fragments<'a, 'tcx>(this: &MoveData<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- id: ast::NodeId) {
- let span_err = tcx.hir.attrs(id).iter()
- .any(|a| a.check_name("rustc_move_fragments"));
- let print = tcx.sess.opts.debugging_opts.print_move_fragments;
-
- if !span_err && !print { return; }
-
- let sp = tcx.hir.span(id);
-
- let instrument_all_paths = |kind, vec_rc: &Vec<MovePathIndex>| {
- for (i, mpi) in vec_rc.iter().enumerate() {
- let lp = || this.path_loan_path(*mpi);
- if span_err {
- tcx.sess.span_err(sp, &format!("{}: `{}`", kind, lp()));
- }
- if print {
- println!("id:{} {}[{}] `{}`", id, kind, i, lp());
- }
- }
- };
-
- let instrument_all_fragments = |kind, vec_rc: &Vec<Fragment>| {
- for (i, f) in vec_rc.iter().enumerate() {
- let render = || f.loan_path_user_string(this);
- if span_err {
- tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render()));
- }
- if print {
- println!("id:{} {}[{}] `{}`", id, kind, i, render());
- }
- }
- };
-
- let fragments = this.fragments.borrow();
- instrument_all_paths("moved_leaf_path", &fragments.moved_leaf_paths);
- instrument_all_fragments("unmoved_fragment", &fragments.unmoved_fragments);
- instrument_all_paths("parent_of_fragments", &fragments.parents_of_fragments);
- instrument_all_paths("assigned_leaf_path", &fragments.assigned_leaf_paths);
-}
-
-/// Normalizes the fragment sets in `this`; i.e., removes duplicate entries, constructs the set of
-/// parents, and constructs the left-over fragments.
-///
-/// Note: "left-over fragments" means paths that were not directly referenced in moves nor
-/// assignments, but must nonetheless be tracked as potential drop obligations.
-pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
-
- let mut fragments = this.fragments.borrow_mut();
-
- // Swap out contents of fragments so that we can modify the fields
- // without borrowing the common fragments.
- let mut unmoved = mem::replace(&mut fragments.unmoved_fragments, vec![]);
- let mut parents = mem::replace(&mut fragments.parents_of_fragments, vec![]);
- let mut moved = mem::replace(&mut fragments.moved_leaf_paths, vec![]);
- let mut assigned = mem::replace(&mut fragments.assigned_leaf_paths, vec![]);
-
- let path_lps = |mpis: &[MovePathIndex]| -> Vec<String> {
- mpis.iter().map(|mpi| format!("{:?}", this.path_loan_path(*mpi))).collect()
- };
-
- let frag_lps = |fs: &[Fragment]| -> Vec<String> {
- fs.iter().map(|f| f.loan_path_repr(this)).collect()
- };
-
- // First, filter out duplicates
- moved.sort();
- moved.dedup();
- debug!("fragments 1 moved: {:?}", path_lps(&moved));
-
- assigned.sort();
- assigned.dedup();
- debug!("fragments 1 assigned: {:?}", path_lps(&assigned));
-
- // Second, build parents from the moved and assigned.
- for m in &moved {
- let mut p = this.path_parent(*m);
- while p != InvalidMovePathIndex {
- parents.push(p);
- p = this.path_parent(p);
- }
- }
- for a in &assigned {
- let mut p = this.path_parent(*a);
- while p != InvalidMovePathIndex {
- parents.push(p);
- p = this.path_parent(p);
- }
- }
-
- parents.sort();
- parents.dedup();
- debug!("fragments 2 parents: {:?}", path_lps(&parents));
-
- // Third, filter the moved and assigned fragments down to just the non-parents
- moved.retain(|f| non_member(*f, &parents));
- debug!("fragments 3 moved: {:?}", path_lps(&moved));
-
- assigned.retain(|f| non_member(*f, &parents));
- debug!("fragments 3 assigned: {:?}", path_lps(&assigned));
-
- // Fourth, build the leftover from the moved, assigned, and parents.
- for m in &moved {
- let lp = this.path_loan_path(*m);
- add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
- }
- for a in &assigned {
- let lp = this.path_loan_path(*a);
- add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
- }
- for p in &parents {
- let lp = this.path_loan_path(*p);
- add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
- }
-
- unmoved.sort();
- unmoved.dedup();
- debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved));
-
- // Fifth, filter the leftover fragments down to its core.
- unmoved.retain(|f| match *f {
- AllButOneFrom(_) => true,
- Just(mpi) => non_member(mpi, &parents) &&
- non_member(mpi, &moved) &&
- non_member(mpi, &assigned)
- });
- debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved));
-
- // Swap contents back in.
- fragments.unmoved_fragments = unmoved;
- fragments.parents_of_fragments = parents;
- fragments.moved_leaf_paths = moved;
- fragments.assigned_leaf_paths = assigned;
-
- return;
-
- fn non_member(elem: MovePathIndex, set: &[MovePathIndex]) -> bool {
- match set.binary_search(&elem) {
- Ok(_) => false,
- Err(_) => true,
- }
- }
-}
-
-/// Adds all of the precisely-tracked siblings of `lp` as potential move paths of interest. For
-/// example, if `lp` represents `s.x.j`, then adds moves paths for `s.x.i` and `s.x.k`, the
-/// siblings of `s.x.j`.
-fn add_fragment_siblings<'a, 'tcx>(this: &MoveData<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- gathered_fragments: &mut Vec<Fragment>,
- lp: Rc<LoanPath<'tcx>>,
- origin_id: Option<ast::NodeId>) {
- match lp.kind {
- LpVar(_) | LpUpvar(..) => {} // Local variables have no siblings.
-
- // Consuming a downcast is like consuming the original value, so propage inward.
- LpDowncast(ref loan_parent, _) => {
- add_fragment_siblings(this, tcx, gathered_fragments, loan_parent.clone(), origin_id);
- }
-
- // *LV for Unique consumes the contents of the box (at
- // least when it is non-copy...), so propagate inward.
- LpExtend(ref loan_parent, _, LpDeref(mc::Unique)) => {
- add_fragment_siblings(this, tcx, gathered_fragments, loan_parent.clone(), origin_id);
- }
-
- // *LV for unsafe and borrowed pointers do not consume their loan path, so stop here.
- LpExtend(.., LpDeref(mc::UnsafePtr(..))) |
- LpExtend(.., LpDeref(mc::Implicit(..))) |
- LpExtend(.., LpDeref(mc::BorrowedPtr(..))) => {}
-
- // FIXME (pnkfelix): LV[j] should be tracked, at least in the
- // sense of we will track the remaining drop obligation of the
- // rest of the array.
- //
- // Well, either that or LV[j] should be made illegal.
- // But even then, we will need to deal with destructuring
- // bind.
- //
- // Anyway, for now: LV[j] is not tracked precisely
- LpExtend(.., LpInterior(_, InteriorElement(..))) => {
- let mp = this.move_path(tcx, lp.clone());
- gathered_fragments.push(AllButOneFrom(mp));
- }
-
- // field access LV.x and tuple access LV#k are the cases
- // we are interested in
- LpExtend(ref loan_parent, mc,
- LpInterior(_, InteriorField(ref field_name))) => {
- let enum_variant_info = match loan_parent.kind {
- LpDowncast(ref loan_parent_2, variant_def_id) =>
- Some((variant_def_id, loan_parent_2.clone())),
- LpExtend(..) | LpVar(..) | LpUpvar(..) =>
- None,
- };
- add_fragment_siblings_for_extension(
- this,
- tcx,
- gathered_fragments,
- loan_parent, mc, field_name, &lp, origin_id, enum_variant_info);
- }
- }
-}
-
-/// We have determined that `origin_lp` destructures to LpExtend(parent, original_field_name).
-/// Based on this, add move paths for all of the siblings of `origin_lp`.
-fn add_fragment_siblings_for_extension<'a, 'tcx>(this: &MoveData<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- gathered_fragments: &mut Vec<Fragment>,
- parent_lp: &Rc<LoanPath<'tcx>>,
- mc: mc::MutabilityCategory,
- origin_field_name: &mc::FieldName,
- origin_lp: &Rc<LoanPath<'tcx>>,
- origin_id: Option<ast::NodeId>,
- enum_variant_info: Option<(DefId,
- Rc<LoanPath<'tcx>>)>) {
- let parent_ty = parent_lp.to_type();
-
- let mut add_fragment_sibling_local = |field_name, variant_did| {
- add_fragment_sibling_core(
- this, tcx, gathered_fragments, parent_lp.clone(), mc, field_name, origin_lp,
- variant_did);
- };
-
- match parent_ty.sty {
- ty::TyTuple(ref v, _) => {
- let tuple_idx = match *origin_field_name {
- mc::PositionalField(tuple_idx) => tuple_idx,
- mc::NamedField(_) =>
- bug!("tuple type {:?} should not have named fields.",
- parent_ty),
- };
- let tuple_len = v.len();
- for i in 0..tuple_len {
- if i == tuple_idx { continue }
- let field_name = mc::PositionalField(i);
- add_fragment_sibling_local(field_name, None);
- }
- }
-
- ty::TyAdt(def, ..) => match def.adt_kind() {
- AdtKind::Struct => {
- match *origin_field_name {
- mc::NamedField(ast_name) => {
- for f in &def.struct_variant().fields {
- if f.name == ast_name {
- continue;
- }
- let field_name = mc::NamedField(f.name);
- add_fragment_sibling_local(field_name, None);
- }
- }
- mc::PositionalField(tuple_idx) => {
- for (i, _f) in def.struct_variant().fields.iter().enumerate() {
- if i == tuple_idx {
- continue
- }
- let field_name = mc::PositionalField(i);
- add_fragment_sibling_local(field_name, None);
- }
- }
- }
- }
- AdtKind::Union => {
- // Do nothing, all union fields are moved/assigned together.
- }
- AdtKind::Enum => {
- let variant = match enum_variant_info {
- Some((vid, ref _lp2)) => def.variant_with_id(vid),
- None => {
- assert!(def.is_univariant());
- &def.variants[0]
- }
- };
- match *origin_field_name {
- mc::NamedField(ast_name) => {
- for field in &variant.fields {
- if field.name == ast_name {
- continue;
- }
- let field_name = mc::NamedField(field.name);
- add_fragment_sibling_local(field_name, Some(variant.did));
- }
- }
- mc::PositionalField(tuple_idx) => {
- for (i, _f) in variant.fields.iter().enumerate() {
- if tuple_idx == i {
- continue;
- }
- let field_name = mc::PositionalField(i);
- add_fragment_sibling_local(field_name, None);
- }
- }
- }
- }
- },
-
- ref ty => {
- let span = origin_id.map_or(DUMMY_SP, |id| tcx.hir.span(id));
- span_bug!(span,
- "type {:?} ({:?}) is not fragmentable",
- parent_ty, ty);
- }
- }
-}
-
-/// Adds the single sibling `LpExtend(parent, new_field_name)` of `origin_lp` (the original
-/// loan-path).
-fn add_fragment_sibling_core<'a, 'tcx>(this: &MoveData<'tcx>,
- tcx: TyCtxt<'a, 'tcx, 'tcx>,
- gathered_fragments: &mut Vec<Fragment>,
- parent: Rc<LoanPath<'tcx>>,
- mc: mc::MutabilityCategory,
- new_field_name: mc::FieldName,
- origin_lp: &Rc<LoanPath<'tcx>>,
- enum_variant_did: Option<DefId>)
- -> MovePathIndex {
- let opt_variant_did = match parent.kind {
- LpDowncast(_, variant_did) => Some(variant_did),
- LpVar(..) | LpUpvar(..) | LpExtend(..) => enum_variant_did,
- };
-
- let loan_path_elem = LpInterior(opt_variant_did, InteriorField(new_field_name));
- let new_lp_type = match new_field_name {
- mc::NamedField(ast_name) =>
- tcx.named_element_ty(parent.to_type(), ast_name, opt_variant_did),
- mc::PositionalField(idx) =>
- tcx.positional_element_ty(parent.to_type(), idx, opt_variant_did),
- };
- let new_lp_variant = LpExtend(parent, mc, loan_path_elem);
- let new_lp = LoanPath::new(new_lp_variant, new_lp_type.unwrap());
- debug!("add_fragment_sibling_core(new_lp={:?}, origin_lp={:?})",
- new_lp, origin_lp);
- let mp = this.move_path(tcx, Rc::new(new_lp));
-
- // Do not worry about checking for duplicates here; we will sort
- // and dedup after all are added.
- gathered_fragments.push(Just(mp));
-
- mp
-}
use rustc_data_structures::indexed_set::IdxSetBuf;
use rustc_data_structures::indexed_vec::Idx;
use rustc_mir::util::patch::MirPatch;
-use rustc_mir::util::elaborate_drops::{DropFlagState, elaborate_drop};
+use rustc_mir::util::elaborate_drops::{DropFlagState, Unwind, elaborate_drop};
use rustc_mir::util::elaborate_drops::{DropElaborator, DropStyle, DropFlagMode};
use syntax::ast;
use syntax_pos::Span;
ctxt: self
},
terminator.source_info,
- data.is_cleanup,
location,
path,
target,
if data.is_cleanup {
- None
+ Unwind::InCleanup
} else {
- Some(Option::unwrap_or(unwind, resume_block))
+ Unwind::To(Option::unwrap_or(unwind, resume_block))
},
bb)
}
let bb = loc.block;
let data = &self.mir[bb];
let terminator = data.terminator();
+ assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
let assign = Statement {
kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
kind: TerminatorKind::Goto { target: target },
..*terminator
}),
- is_cleanup: data.is_cleanup,
+ is_cleanup: false,
});
match self.move_data().rev_lookup.find(location) {
ctxt: self
},
terminator.source_info,
- data.is_cleanup,
location,
path,
target,
- Some(unwind),
+ Unwind::To(unwind),
bb);
on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
self.set_drop_flag(Location { block: target, statement_index: 0 },
Rvalue::Ref(..) |
Rvalue::Discriminant(..) |
Rvalue::Len(..) |
- Rvalue::Box(..) => {
+ Rvalue::NullaryOp(NullOp::SizeOf, _) |
+ Rvalue::NullaryOp(NullOp::Box, _) => {
// This returns an rvalue with uninitialized contents. We can't
// move out of it here because it is an rvalue - assignments always
// completely initialize their lvalue.
attributes: &[ast::Attribute]) {
let tcx = bcx.tcx;
let def_id = tcx.hir.local_def_id(id);
- debug!("borrowck_mir({}) UNIMPLEMENTED", tcx.item_path_str(def_id));
+ debug!("borrowck_mir({:?}) UNIMPLEMENTED", def_id);
// It is safe for us to borrow `mir_validated()`: `optimized_mir`
// steals it, but it forces the `borrowck` query.
move_data: flowed_moves } =
build_borrowck_dataflow_data(bccx, &cfg, body_id);
- move_data::fragments::instrument_move_fragments(&flowed_moves.move_data,
- bccx.tcx,
- owner_id);
- move_data::fragments::build_unfragmented_map(bccx,
- &flowed_moves.move_data,
- owner_id);
-
check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
}
use rustc::hir;
use rustc::hir::intravisit::IdRange;
-#[path="fragments.rs"]
-pub mod fragments;
-
pub struct MoveData<'tcx> {
/// Move paths. See section "Move paths" in `README.md`.
pub paths: RefCell<Vec<MovePath<'tcx>>>,
/// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
pub assignee_ids: RefCell<NodeSet>,
-
- /// Path-fragments from moves in to or out of parts of structured data.
- pub fragments: RefCell<fragments::FragmentSets>,
}
pub struct FlowedMoveData<'a, 'tcx: 'a> {
var_assignments: RefCell::new(Vec::new()),
variant_matches: RefCell::new(Vec::new()),
assignee_ids: RefCell::new(NodeSet()),
- fragments: RefCell::new(fragments::FragmentSets::new()),
}
}
let path_index = self.move_path(tcx, lp.clone());
let move_index = MoveIndex(self.moves.borrow().len());
- self.fragments.borrow_mut().add_move(path_index);
-
let next_move = self.path_first_move(path_index);
self.set_path_first_move(path_index, move_index);
let path_index = self.move_path(tcx, lp.clone());
- self.fragments.borrow_mut().add_assignment(path_index);
-
match mode {
MutateMode::Init | MutateMode::JustWrite => {
self.assignee_ids.borrow_mut().insert(assignee_id);
let path_index = self.move_path(tcx, lp.clone());
let base_path_index = self.move_path(tcx, base_lp.clone());
- self.fragments.borrow_mut().add_assignment(path_index);
-
let variant_match = VariantMatch {
path: path_index,
base_path: base_path_index,
self.variant_matches.borrow_mut().push(variant_match);
}
- fn fixup_fragment_sets(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
- fragments::fixup_fragment_sets(self, tcx)
- }
-
/// Adds the gen/kills for the various moves and
/// assignments into the provided data flow contexts.
/// Moves are generated by moves and killed by assignments and
id_range,
move_data.var_assignments.borrow().len());
- move_data.fixup_fragment_sets(tcx);
-
move_data.add_gen_kills(bccx,
&mut dfcx_moves,
&mut dfcx_assign);
}
}
+impl<I: Idx, T> Default for IndexVec<I, T> {
+ #[inline]
+ fn default() -> Self {
+ Self::new()
+ }
+}
+
impl<I: Idx, T> Extend<T> for IndexVec<I, T> {
#[inline]
fn extend<J: IntoIterator<Item = T>>(&mut self, iter: J) {
use std::cell::RefCell;
use std::hash::Hash;
-use std::sync::Arc;
use rustc::dep_graph::DepNode;
use rustc::hir;
-use rustc::hir::def_id::{LOCAL_CRATE, CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
use rustc::hir::itemlikevisit::ItemLikeVisitor;
use rustc::ich::{Fingerprint, StableHashingContext};
use rustc::ty::TyCtxt;
// We want to incoporate these into the
// SVH.
}
- DepNode::FileMap(..) => {
- // These don't make a semantic
- // difference, filter them out.
- return None
- }
DepNode::AllLocalTraitImpls => {
// These are already covered by hashing
// the HIR.
visitor.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, macro_def);
}
- for filemap in tcx.sess
- .codemap()
- .files_untracked()
- .iter()
- .filter(|fm| !fm.is_imported()) {
- assert_eq!(LOCAL_CRATE.as_u32(), filemap.crate_of_origin);
- let def_id = DefId {
- krate: LOCAL_CRATE,
- index: CRATE_DEF_INDEX,
- };
- let name = Arc::new(filemap.name.clone());
- let dep_node = DepNode::FileMap(def_id, name);
- let mut hasher = IchHasher::new();
- filemap.hash_stable(&mut visitor.hcx, &mut hasher);
- let fingerprint = hasher.finish();
- visitor.hashes.insert(dep_node, fingerprint);
- }
-
visitor.compute_and_store_ich_for_trait_impls(krate);
});
match *dep_node {
DepNode::Krate |
DepNode::Hir(_) |
- DepNode::HirBody(_) |
- DepNode::FileMap(..) =>
+ DepNode::HirBody(_) =>
true,
DepNode::MetaData(def_id) |
DepNode::GlobalMetaData(def_id, _) => !def_id.is_local(),
Some(self.incremental_hashes_map[dep_node])
}
- DepNode::FileMap(def_id, ref name) => {
- if def_id.is_local() {
- // We will have been able to retrace the DefId (which is
- // always the local CRATE_DEF_INDEX), but the file with the
- // given name might have been removed, so we use get() in
- // order to allow for that case.
- self.incremental_hashes_map.get(dep_node).map(|x| *x)
- } else {
- Some(self.metadata_hash(DepNode::FileMap(def_id, name.clone()),
- def_id.krate,
- |this| &mut this.global_metadata_hashes))
- }
- }
-
// MetaData from other crates is an *input* to us.
// MetaData nodes from *our* crates are an *output*; we
// don't hash them, but we do compute a hash for them and
let def_id = DefId { krate: cnum, index: CRATE_DEF_INDEX };
let dep_node = match dep_node {
DepNode::GlobalMetaData(_, kind) => DepNode::GlobalMetaData(def_id, kind),
- DepNode::FileMap(_, name) => DepNode::FileMap(def_id, name),
other => {
bug!("unexpected DepNode variant: {:?}", other)
}
}
pub struct MissingDoc {
- /// Stack of IDs of struct definitions.
- struct_def_stack: Vec<ast::NodeId>,
-
- /// True if inside variant definition
- in_variant: bool,
-
/// Stack of whether #[doc(hidden)] is set
/// at each level which has lint attributes.
doc_hidden_stack: Vec<bool>,
impl MissingDoc {
pub fn new() -> MissingDoc {
MissingDoc {
- struct_def_stack: vec![],
- in_variant: false,
doc_hidden_stack: vec![false],
private_traits: HashSet::new(),
}
self.doc_hidden_stack.pop().expect("empty doc_hidden_stack");
}
- fn check_struct_def(&mut self,
- _: &LateContext,
- _: &hir::VariantData,
- _: ast::Name,
- _: &hir::Generics,
- item_id: ast::NodeId) {
- self.struct_def_stack.push(item_id);
- }
-
- fn check_struct_def_post(&mut self,
- _: &LateContext,
- _: &hir::VariantData,
- _: ast::Name,
- _: &hir::Generics,
- item_id: ast::NodeId) {
- let popped = self.struct_def_stack.pop().expect("empty struct_def_stack");
- assert!(popped == item_id);
- }
-
fn check_crate(&mut self, cx: &LateContext, krate: &hir::Crate) {
self.check_missing_docs_attrs(cx, None, &krate.attrs, krate.span, "crate");
}
fn check_struct_field(&mut self, cx: &LateContext, sf: &hir::StructField) {
if !sf.is_positional() {
- if sf.vis == hir::Public || self.in_variant {
- let cur_struct_def = *self.struct_def_stack
- .last()
- .expect("empty struct_def_stack");
- self.check_missing_docs_attrs(cx,
- Some(cur_struct_def),
- &sf.attrs,
- sf.span,
- "a struct field")
- }
+ self.check_missing_docs_attrs(cx,
+ Some(sf.id),
+ &sf.attrs,
+ sf.span,
+ "a struct field")
}
}
&v.node.attrs,
v.span,
"a variant");
- assert!(!self.in_variant);
- self.in_variant = true;
- }
-
- fn check_variant_post(&mut self, _: &LateContext, _: &hir::Variant, _: &hir::Generics) {
- assert!(self.in_variant);
- self.in_variant = false;
}
}
id: LintId::of(MISSING_FRAGMENT_SPECIFIER),
reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
},
+ FutureIncompatibleInfo {
+ id: LintId::of(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES),
+ reference: "issue #42238 <https://github.com/rust-lang/rust/issues/42238>",
+ },
FutureIncompatibleInfo {
id: LintId::of(ANONYMOUS_PARAMETERS),
reference: "issue #41686 <https://github.com/rust-lang/rust/issues/41686>",
closure_type => { cdata.closure_ty(def_id.index, tcx) }
inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
is_foreign_item => { cdata.is_foreign_item(def_id.index) }
+ is_default_impl => { cdata.is_default_impl(def_id.index) }
describe_def => { cdata.get_def(def_id.index) }
def_span => { cdata.get_span(def_id.index, &tcx.sess) }
stability => { cdata.get_stability(def_id.index) }
self.get_crate_data(did.krate).is_const_fn(did.index)
}
- fn is_default_impl(&self, impl_did: DefId) -> bool {
- self.dep_graph.read(DepNode::MetaData(impl_did));
- self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index)
- }
-
fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool
{
self.do_is_statically_included_foreign_item(def_id)
}
self.dep_graph.read(DepNode::MetaData(def_id));
- debug!("item_body({}): inlining item", tcx.item_path_str(def_id));
+ debug!("item_body({:?}): inlining item", def_id);
self.get_crate_data(def_id.krate).item_body(tcx, def_id.index)
}
drop(visible_parent_map);
self.visible_parent_map.borrow()
}
-}
\ No newline at end of file
+}
use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId, LOCAL_CRATE};
use rustc::hir::map::definitions::DefPathTable;
use rustc::dep_graph::{DepNode, GlobalMetaDataKind};
-use rustc::ich::{StableHashingContext, Fingerprint};
+use rustc::ich::Fingerprint;
use rustc::middle::dependency_format::Linkage;
use rustc::middle::lang_items;
use rustc::mir;
use rustc::util::nodemap::{FxHashMap, NodeSet};
use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
-use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
use std::hash::Hash;
use std::intrinsics;
use std::io::Cursor;
use std::path::Path;
use std::rc::Rc;
-use std::sync::Arc;
use std::u32;
use syntax::ast::{self, CRATE_NODE_ID};
use syntax::codemap::Spanned;
let codemap = self.tcx.sess.codemap();
let all_filemaps = codemap.files();
- let hcx = &mut StableHashingContext::new(self.tcx);
let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone();
let adapted = all_filemaps.iter()
adapted.name = abs_path;
Rc::new(adapted)
}
- });
-
- let filemaps: Vec<_> = if self.compute_ich {
- adapted.inspect(|filemap| {
- let mut hasher = StableHasher::new();
- filemap.hash_stable(hcx, &mut hasher);
- let fingerprint = hasher.finish();
- let dep_node = DepNode::FileMap((), Arc::new(filemap.name.clone()));
- self.metadata_hashes.global_hashes.push((dep_node, fingerprint));
- }).collect()
- } else {
- adapted.collect()
- };
+ })
+ .collect::<Vec<_>>();
- self.lazy_seq_ref(filemaps.iter().map(|fm| &**fm))
+ self.lazy_seq_ref(adapted.iter().map(|rc| &**rc))
}
fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
tcx: tcx,
ecx: ecx,
hcx: if compute_ich {
- Some((StableHashingContext::new(tcx), StableHasher::new()))
+ // We are always hashing spans for things in metadata because
+ // don't know if a downstream crate will use them or not.
+ // Except when -Zquery-dep-graph is specified because we don't
+ // want to mess up our tests.
+ let hcx = if tcx.sess.opts.debugging_opts.query_dep_graph {
+ StableHashingContext::new(tcx)
+ } else {
+ StableHashingContext::new(tcx).force_span_hashing()
+ };
+
+ Some((hcx, StableHasher::new()))
} else {
None
}
let value = this.hir.mirror(value);
let result = this.temp(expr.ty, expr_span);
// to start, malloc some memory of suitable type (thus far, uninitialized):
- this.cfg.push_assign(block, source_info, &result, Rvalue::Box(value.ty));
+ let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty);
+ this.cfg.push_assign(block, source_info, &result, box_);
this.in_scope(value_extents, block, |this| {
// schedule a shallow free of that memory, lest we unwind:
this.schedule_box_free(expr_span, value_extents, &result, value.ty);
elaborate_drops::elaborate_drop(
&mut elaborator,
source_info,
- false,
&dropee,
(),
return_block,
- Some(resume_block),
+ elaborate_drops::Unwind::To(resume_block),
START_BLOCK
);
elaborator.patch
Rvalue::CheckedBinaryOp(..) |
Rvalue::UnaryOp(..) |
Rvalue::Discriminant(..) |
- Rvalue::Box(..) |
+ Rvalue::NullaryOp(..) |
Rvalue::Aggregate(..) => {
// These variants don't contain regions.
}
/// Qualify a whole const, static initializer or const fn.
fn qualify_const(&mut self) -> Qualif {
- debug!("qualifying {} {}", self.mode, self.tcx.item_path_str(self.def_id));
+ debug!("qualifying {} {:?}", self.mode, self.def_id);
let mir = self.mir;
match *rvalue {
Rvalue::Use(_) |
Rvalue::Repeat(..) |
- Rvalue::UnaryOp(..) |
+ Rvalue::UnaryOp(UnOp::Neg, _) |
+ Rvalue::UnaryOp(UnOp::Not, _) |
+ Rvalue::NullaryOp(NullOp::SizeOf, _) |
Rvalue::CheckedBinaryOp(..) |
Rvalue::Cast(CastKind::ReifyFnPointer, ..) |
Rvalue::Cast(CastKind::UnsafeFnPointer, ..) |
if let ty::TyRawPtr(_) = lhs.ty(self.mir, self.tcx).sty {
assert!(op == BinOp::Eq || op == BinOp::Ne ||
op == BinOp::Le || op == BinOp::Lt ||
- op == BinOp::Ge || op == BinOp::Gt);
+ op == BinOp::Ge || op == BinOp::Gt ||
+ op == BinOp::Offset);
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
}
}
- Rvalue::Box(_) => {
+ Rvalue::NullaryOp(NullOp::Box, _) => {
self.add(Qualif::NOT_CONST);
if self.mode != Mode::Fn {
struct_span_err!(self.tcx.sess, self.span, E0010,
mir: &mut Mir<'tcx>) {
let item_id = src.item_id();
let def_id = tcx.hir.local_def_id(item_id);
- debug!("run_pass: {}", tcx.item_path_str(def_id));
+ debug!("run_pass: {:?}", def_id);
if tcx.sess.err_count() > 0 {
// compiling a broken program can obviously result in a
use std::fmt;
use rustc::hir;
use rustc::mir::*;
-use rustc::middle::const_val::ConstInt;
+use rustc::middle::const_val::{ConstInt, ConstVal};
use rustc::middle::lang_items;
use rustc::ty::{self, Ty};
use rustc::ty::subst::{Kind, Substs};
Deep
}
+#[derive(Copy, Clone, Debug)]
+pub enum Unwind {
+ To(BasicBlock),
+ InCleanup
+}
+
+impl Unwind {
+ fn is_cleanup(self) -> bool {
+ match self {
+ Unwind::To(..) => false,
+ Unwind::InCleanup => true
+ }
+ }
+
+ fn into_option(self) -> Option<BasicBlock> {
+ match self {
+ Unwind::To(bb) => Some(bb),
+ Unwind::InCleanup => None,
+ }
+ }
+
+ fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
+ match self {
+ Unwind::To(bb) => Unwind::To(f(bb)),
+ Unwind::InCleanup => Unwind::InCleanup
+ }
+ }
+}
+
pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
type Path : Copy + fmt::Debug;
elaborator: &'l mut D,
source_info: SourceInfo,
- is_cleanup: bool,
lvalue: &'l Lvalue<'tcx>,
path: D::Path,
succ: BasicBlock,
- unwind: Option<BasicBlock>,
+ unwind: Unwind,
}
pub fn elaborate_drop<'b, 'tcx, D>(
elaborator: &mut D,
source_info: SourceInfo,
- is_cleanup: bool,
lvalue: &Lvalue<'tcx>,
path: D::Path,
succ: BasicBlock,
- unwind: Option<BasicBlock>,
+ unwind: Unwind,
bb: BasicBlock)
where D: DropElaborator<'b, 'tcx>
{
- assert_eq!(unwind.is_none(), is_cleanup);
DropCtxt {
- elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
+ elaborator, source_info, lvalue, path, succ, unwind
}.elaborate_drop(bb)
}
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
location: self.lvalue.clone(),
target: self.succ,
- unwind: self.unwind
+ unwind: self.unwind.into_option(),
});
}
DropStyle::Conditional => {
- let is_cleanup = self.is_cleanup; // FIXME(#6393)
+ let unwind = self.unwind; // FIXME(#6393)
let succ = self.succ;
- let drop_bb = self.complete_drop(
- is_cleanup, Some(DropFlagMode::Deep), succ);
+ let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
target: drop_bb
});
}
fn drop_subpath(&mut self,
- is_cleanup: bool,
lvalue: &Lvalue<'tcx>,
path: Option<D::Path>,
succ: BasicBlock,
- unwind: Option<BasicBlock>)
+ unwind: Unwind)
-> BasicBlock
{
if let Some(path) = path {
DropCtxt {
elaborator: self.elaborator,
source_info: self.source_info,
- path, lvalue, succ, unwind, is_cleanup
+ path, lvalue, succ, unwind,
}.elaborated_drop_block()
} else {
debug!("drop_subpath: for rest field {:?}", lvalue);
DropCtxt {
elaborator: self.elaborator,
source_info: self.source_info,
- lvalue, succ, unwind, is_cleanup,
+ lvalue, succ, unwind,
// Using `self.path` here to condition the drop on
// our own drop flag.
path: self.path
- }.complete_drop(is_cleanup, None, succ)
+ }.complete_drop(None, succ, unwind)
}
}
/// Create one-half of the drop ladder for a list of fields, and return
- /// the list of steps in it in reverse order.
+ /// the list of steps in it in reverse order, with the first step
+ /// dropping 0 fields and so on.
///
/// `unwind_ladder` is such a list of steps in reverse order,
- /// which is called instead of the next step if the drop unwinds
- /// (the first field is never reached). If it is `None`, all
- /// unwind targets are left blank.
- fn drop_halfladder<'a>(&mut self,
- unwind_ladder: Option<&[BasicBlock]>,
- succ: BasicBlock,
- fields: &[(Lvalue<'tcx>, Option<D::Path>)],
- is_cleanup: bool)
- -> Vec<BasicBlock>
+ /// which is called if the matching step of the drop glue panics.
+ fn drop_halfladder(&mut self,
+ unwind_ladder: &[Unwind],
+ mut succ: BasicBlock,
+ fields: &[(Lvalue<'tcx>, Option<D::Path>)])
+ -> Vec<BasicBlock>
{
- let mut unwind_succ = if is_cleanup {
- None
- } else {
- self.unwind
- };
-
- let goto = TerminatorKind::Goto { target: succ };
- let mut succ = self.new_block(is_cleanup, goto);
-
- // Always clear the "master" drop flag at the bottom of the
- // ladder. This is needed because the "master" drop flag
- // protects the ADT's discriminant, which is invalidated
- // after the ADT is dropped.
- let succ_loc = Location { block: succ, statement_index: 0 };
- self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
+ Some(succ).into_iter().chain(
+ fields.iter().rev().zip(unwind_ladder)
+ .map(|(&(ref lv, path), &unwind_succ)| {
+ succ = self.drop_subpath(lv, path, succ, unwind_succ);
+ succ
+ })
+ ).collect()
+ }
- fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
- succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
- unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
- succ
- }).collect()
+ fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
+ // Clear the "master" drop flag at the end. This is needed
+ // because the "master" drop protects the ADT's discriminant,
+ // which is invalidated after the ADT is dropped.
+ let (succ, unwind) = (self.succ, self.unwind); // FIXME(#6393)
+ (
+ self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
+ unwind.map(|unwind| {
+ self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
+ })
+ )
}
/// Create a full drop ladder, consisting of 2 connected half-drop-ladders
/// ELAB(drop location.1 [target=.c2])
/// .c2:
/// ELAB(drop location.2 [target=`self.unwind`])
+ ///
+ /// NOTE: this does not clear the master drop flag, so you need
+ /// to point succ/unwind on a `drop_ladder_bottom`.
fn drop_ladder<'a>(&mut self,
- fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
- -> (BasicBlock, Option<BasicBlock>)
+ fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>,
+ succ: BasicBlock,
+ unwind: Unwind)
+ -> (BasicBlock, Unwind)
{
debug!("drop_ladder({:?}, {:?})", self, fields);
debug!("drop_ladder - fields needing drop: {:?}", fields);
- let unwind_ladder = if self.is_cleanup {
- None
+ let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
+ let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
+ let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
+ halfladder.into_iter().map(Unwind::To).collect()
} else {
- let unwind = self.unwind.unwrap(); // FIXME(#6393)
- Some(self.drop_halfladder(None, unwind, &fields, true))
+ unwind_ladder
};
- let succ = self.succ; // FIXME(#6393)
- let is_cleanup = self.is_cleanup;
let normal_ladder =
- self.drop_halfladder(unwind_ladder.as_ref().map(|x| &**x),
- succ, &fields, is_cleanup);
+ self.drop_halfladder(&unwind_ladder, succ, &fields);
- (normal_ladder.last().cloned().unwrap_or(succ),
- unwind_ladder.and_then(|l| l.last().cloned()).or(self.unwind))
+ (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
}
fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
self.elaborator.field_subpath(self.path, Field::new(i)))
}).collect();
- self.drop_ladder(fields).0
+ let (succ, unwind) = self.drop_ladder_bottom();
+ self.drop_ladder(fields, succ, unwind).0
}
fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
let interior_path = self.elaborator.deref_subpath(self.path);
let succ = self.succ; // FIXME(#6393)
- let is_cleanup = self.is_cleanup;
- let succ = self.box_free_block(ty, succ, is_cleanup);
- let unwind_succ = self.unwind.map(|u| {
- self.box_free_block(ty, u, true)
+ let unwind = self.unwind;
+ let succ = self.box_free_block(ty, succ, unwind);
+ let unwind_succ = self.unwind.map(|unwind| {
+ self.box_free_block(ty, unwind, Unwind::InCleanup)
});
- self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
+ self.drop_subpath(&interior, interior_path, succ, unwind_succ)
}
fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
source_info: self.source_info,
kind: TerminatorKind::Unreachable
}),
- is_cleanup: self.is_cleanup
+ is_cleanup: self.unwind.is_cleanup()
});
}
}
}
- fn open_drop_for_adt_contents<'a>(&mut self, adt: &'tcx ty::AdtDef,
- substs: &'tcx Substs<'tcx>)
- -> (BasicBlock, Option<BasicBlock>) {
- match adt.variants.len() {
- 1 => {
- let fields = self.move_paths_for_fields(
- self.lvalue,
- self.path,
- &adt.variants[0],
- substs
- );
- self.drop_ladder(fields)
- }
- _ => {
- let is_cleanup = self.is_cleanup;
- let succ = self.succ;
- let unwind = self.unwind; // FIXME(#6393)
+ fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
+ substs: &'tcx Substs<'tcx>)
+ -> (BasicBlock, Unwind) {
+ let (succ, unwind) = self.drop_ladder_bottom();
+ if adt.variants.len() == 1 {
+ let fields = self.move_paths_for_fields(
+ self.lvalue,
+ self.path,
+ &adt.variants[0],
+ substs
+ );
+ self.drop_ladder(fields, succ, unwind)
+ } else {
+ self.open_drop_for_multivariant(adt, substs, succ, unwind)
+ }
+ }
+
+ fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
+ substs: &'tcx Substs<'tcx>,
+ succ: BasicBlock,
+ unwind: Unwind)
+ -> (BasicBlock, Unwind) {
+ let mut values = Vec::with_capacity(adt.variants.len());
+ let mut normal_blocks = Vec::with_capacity(adt.variants.len());
+ let mut unwind_blocks = if unwind.is_cleanup() {
+ None
+ } else {
+ Some(Vec::with_capacity(adt.variants.len()))
+ };
- let mut values = Vec::with_capacity(adt.variants.len());
- let mut normal_blocks = Vec::with_capacity(adt.variants.len());
- let mut unwind_blocks = if is_cleanup {
- None
- } else {
- Some(Vec::with_capacity(adt.variants.len()))
- };
- let mut otherwise = None;
- let mut unwind_otherwise = None;
- for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
- let subpath = self.elaborator.downcast_subpath(
- self.path, variant_index);
- if let Some(variant_path) = subpath {
- let base_lv = self.lvalue.clone().elem(
- ProjectionElem::Downcast(adt, variant_index)
+ let mut have_otherwise = false;
+
+ for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
+ let subpath = self.elaborator.downcast_subpath(
+ self.path, variant_index);
+ if let Some(variant_path) = subpath {
+ let base_lv = self.lvalue.clone().elem(
+ ProjectionElem::Downcast(adt, variant_index)
);
- let fields = self.move_paths_for_fields(
- &base_lv,
- variant_path,
- &adt.variants[variant_index],
- substs);
- values.push(discr);
- if let Some(ref mut unwind_blocks) = unwind_blocks {
- // We can't use the half-ladder from the original
- // drop ladder, because this breaks the
- // "funclet can't have 2 successor funclets"
- // requirement from MSVC:
- //
- // switch unwind-switch
- // / \ / \
- // v1.0 v2.0 v2.0-unwind v1.0-unwind
- // | | / |
- // v1.1-unwind v2.1-unwind |
- // ^ |
- // \-------------------------------/
- //
- // Create a duplicate half-ladder to avoid that. We
- // could technically only do this on MSVC, but I
- // I want to minimize the divergence between MSVC
- // and non-MSVC.
-
- let unwind = unwind.unwrap();
- let halfladder = self.drop_halfladder(
- None, unwind, &fields, true);
- unwind_blocks.push(
- halfladder.last().cloned().unwrap_or(unwind)
- );
- }
- let (normal, _) = self.drop_ladder(fields);
- normal_blocks.push(normal);
- } else {
- // variant not found - drop the entire enum
- if let None = otherwise {
- otherwise = Some(self.complete_drop(
- is_cleanup,
- Some(DropFlagMode::Shallow),
- succ));
- unwind_otherwise = unwind.map(|unwind| self.complete_drop(
- true,
- Some(DropFlagMode::Shallow),
- unwind
- ));
- }
- }
- }
- if let Some(block) = otherwise {
- normal_blocks.push(block);
- if let Some(ref mut unwind_blocks) = unwind_blocks {
- unwind_blocks.push(unwind_otherwise.unwrap());
- }
- } else {
- values.pop();
+ let fields = self.move_paths_for_fields(
+ &base_lv,
+ variant_path,
+ &adt.variants[variant_index],
+ substs);
+ values.push(discr);
+ if let Unwind::To(unwind) = unwind {
+ // We can't use the half-ladder from the original
+ // drop ladder, because this breaks the
+ // "funclet can't have 2 successor funclets"
+ // requirement from MSVC:
+ //
+ // switch unwind-switch
+ // / \ / \
+ // v1.0 v2.0 v2.0-unwind v1.0-unwind
+ // | | / |
+ // v1.1-unwind v2.1-unwind |
+ // ^ |
+ // \-------------------------------/
+ //
+ // Create a duplicate half-ladder to avoid that. We
+ // could technically only do this on MSVC, but I
+ // I want to minimize the divergence between MSVC
+ // and non-MSVC.
+
+ let unwind_blocks = unwind_blocks.as_mut().unwrap();
+ let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
+ let halfladder =
+ self.drop_halfladder(&unwind_ladder, unwind, &fields);
+ unwind_blocks.push(halfladder.last().cloned().unwrap());
}
+ let (normal, _) = self.drop_ladder(fields, succ, unwind);
+ normal_blocks.push(normal);
+ } else {
+ have_otherwise = true;
+ }
+ }
- (self.adt_switch_block(is_cleanup, adt, normal_blocks, &values, succ),
- unwind_blocks.map(|unwind_blocks| {
- self.adt_switch_block(
- is_cleanup, adt, unwind_blocks, &values, unwind.unwrap()
- )
- }))
+ if have_otherwise {
+ normal_blocks.push(self.drop_block(succ, unwind));
+ if let Unwind::To(unwind) = unwind {
+ unwind_blocks.as_mut().unwrap().push(
+ self.drop_block(unwind, Unwind::InCleanup)
+ );
}
+ } else {
+ values.pop();
}
+
+ (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
+ unwind.map(|unwind| {
+ self.adt_switch_block(
+ adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
+ )
+ }))
}
fn adt_switch_block(&mut self,
- is_cleanup: bool,
adt: &'tcx ty::AdtDef,
blocks: Vec<BasicBlock>,
values: &[ConstInt],
- succ: BasicBlock)
+ succ: BasicBlock,
+ unwind: Unwind)
-> BasicBlock {
// If there are multiple variants, then if something
// is present within the enum the discriminant, tracked
let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
let discr = Lvalue::Local(self.new_temp(discr_ty));
let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
- let switch_block = self.elaborator.patch().new_block(BasicBlockData {
- statements: vec![
- Statement {
- source_info: self.source_info,
- kind: StatementKind::Assign(discr.clone(), discr_rv),
- }
- ],
+ let switch_block = BasicBlockData {
+ statements: vec![self.assign(&discr, discr_rv)],
terminator: Some(Terminator {
source_info: self.source_info,
kind: TerminatorKind::SwitchInt {
targets: blocks,
}
}),
- is_cleanup: is_cleanup,
- });
- self.drop_flag_test_block(is_cleanup, switch_block, succ)
+ is_cleanup: unwind.is_cleanup(),
+ };
+ let switch_block = self.elaborator.patch().new_block(switch_block);
+ self.drop_flag_test_block(switch_block, succ, unwind)
}
- fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<BasicBlock>))
+ fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
-> BasicBlock
{
debug!("destructor_call_block({:?}, {:?})", self, succ);
let ref_lvalue = self.new_temp(ref_ty);
let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
- self.elaborator.patch().new_block(BasicBlockData {
- statements: vec![Statement {
- source_info: self.source_info,
- kind: StatementKind::Assign(
- Lvalue::Local(ref_lvalue),
- Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
- )
- }],
+ let result = BasicBlockData {
+ statements: vec![self.assign(
+ &Lvalue::Local(ref_lvalue),
+ Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
+ )],
terminator: Some(Terminator {
kind: TerminatorKind::Call {
func: Operand::function_handle(tcx, drop_fn.def_id, substs,
self.source_info.span),
args: vec![Operand::Consume(Lvalue::Local(ref_lvalue))],
destination: Some((unit_temp, succ)),
- cleanup: unwind,
+ cleanup: unwind.into_option(),
},
source_info: self.source_info
}),
- is_cleanup: self.is_cleanup,
- })
+ is_cleanup: unwind.is_cleanup(),
+ };
+ self.elaborator.patch().new_block(result)
+ }
+
+ /// create a loop that drops an array:
+ ///
+
+ ///
+ /// loop-block:
+ /// can_go = cur == length_or_end
+ /// if can_go then succ else drop-block
+ /// drop-block:
+ /// if ptr_based {
+ /// ptr = cur
+ /// cur = cur.offset(1)
+ /// } else {
+ /// ptr = &mut LV[cur]
+ /// cur = cur + 1
+ /// }
+ /// drop(ptr)
+ fn drop_loop(&mut self,
+ succ: BasicBlock,
+ cur: &Lvalue<'tcx>,
+ length_or_end: &Lvalue<'tcx>,
+ ety: Ty<'tcx>,
+ unwind: Unwind,
+ ptr_based: bool)
+ -> BasicBlock
+ {
+ let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
+ let tcx = self.tcx();
+
+ let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
+ ty: ety,
+ mutbl: hir::Mutability::MutMutable
+ });
+ let ptr = &Lvalue::Local(self.new_temp(ref_ty));
+ let can_go = &Lvalue::Local(self.new_temp(tcx.types.bool));
+
+ let one = self.constant_usize(1);
+ let (ptr_next, cur_next) = if ptr_based {
+ (Rvalue::Use(use_(cur)),
+ Rvalue::BinaryOp(BinOp::Offset, use_(cur), one))
+ } else {
+ (Rvalue::Ref(
+ tcx.types.re_erased,
+ BorrowKind::Mut,
+ self.lvalue.clone().index(use_(cur))),
+ Rvalue::BinaryOp(BinOp::Add, use_(cur), one))
+ };
+
+ let drop_block = BasicBlockData {
+ statements: vec![
+ self.assign(ptr, ptr_next),
+ self.assign(cur, cur_next)
+ ],
+ is_cleanup: unwind.is_cleanup(),
+ terminator: Some(Terminator {
+ source_info: self.source_info,
+ // this gets overwritten by drop elaboration.
+ kind: TerminatorKind::Unreachable,
+ })
+ };
+ let drop_block = self.elaborator.patch().new_block(drop_block);
+
+ let loop_block = BasicBlockData {
+ statements: vec![
+ self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
+ use_(cur),
+ use_(length_or_end)))
+ ],
+ is_cleanup: unwind.is_cleanup(),
+ terminator: Some(Terminator {
+ source_info: self.source_info,
+ kind: TerminatorKind::if_(tcx, use_(can_go), succ, drop_block)
+ })
+ };
+ let loop_block = self.elaborator.patch().new_block(loop_block);
+
+ self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
+ location: ptr.clone().deref(),
+ target: loop_block,
+ unwind: unwind.into_option()
+ });
+
+ loop_block
+ }
+
+ fn open_drop_for_array(&mut self, ety: Ty<'tcx>) -> BasicBlock {
+ debug!("open_drop_for_array({:?})", ety);
+
+ // if size_of::<ety>() == 0 {
+ // index_based_loop
+ // } else {
+ // ptr_based_loop
+ // }
+
+ let tcx = self.tcx();
+
+ let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
+ let size = &Lvalue::Local(self.new_temp(tcx.types.usize));
+ let size_is_zero = &Lvalue::Local(self.new_temp(tcx.types.bool));
+ let base_block = BasicBlockData {
+ statements: vec![
+ self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
+ self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
+ use_(size),
+ self.constant_usize(0)))
+ ],
+ is_cleanup: self.unwind.is_cleanup(),
+ terminator: Some(Terminator {
+ source_info: self.source_info,
+ kind: TerminatorKind::if_(
+ tcx,
+ use_(size_is_zero),
+ self.drop_loop_pair(ety, false),
+ self.drop_loop_pair(ety, true)
+ )
+ })
+ };
+ self.elaborator.patch().new_block(base_block)
+ }
+
+ // create a pair of drop-loops of `lvalue`, which drops its contents
+ // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
+ // otherwise create an index loop.
+ fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
+ debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
+ let tcx = self.tcx();
+ let iter_ty = if ptr_based {
+ tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::MutMutable })
+ } else {
+ tcx.types.usize
+ };
+
+ let cur = Lvalue::Local(self.new_temp(iter_ty));
+ let length = Lvalue::Local(self.new_temp(tcx.types.usize));
+ let length_or_end = if ptr_based {
+ Lvalue::Local(self.new_temp(iter_ty))
+ } else {
+ length.clone()
+ };
+
+ let unwind = self.unwind.map(|unwind| {
+ self.drop_loop(unwind,
+ &cur,
+ &length_or_end,
+ ety,
+ Unwind::InCleanup,
+ ptr_based)
+ });
+
+ let succ = self.succ; // FIXME(#6393)
+ let loop_block = self.drop_loop(
+ succ,
+ &cur,
+ &length_or_end,
+ ety,
+ unwind,
+ ptr_based);
+
+ let zero = self.constant_usize(0);
+ let mut drop_block_stmts = vec![];
+ drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone())));
+ if ptr_based {
+ // cur = &LV[0];
+ // end = &LV[len];
+ drop_block_stmts.push(self.assign(&cur, Rvalue::Ref(
+ tcx.types.re_erased, BorrowKind::Mut,
+ self.lvalue.clone().index(zero.clone())
+ )));
+ drop_block_stmts.push(self.assign(&length_or_end, Rvalue::Ref(
+ tcx.types.re_erased, BorrowKind::Mut,
+ self.lvalue.clone().index(Operand::Consume(length.clone()))
+ )));
+ } else {
+ // index = 0 (length already pushed)
+ drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
+ }
+ let drop_block = self.elaborator.patch().new_block(BasicBlockData {
+ statements: drop_block_stmts,
+ is_cleanup: unwind.is_cleanup(),
+ terminator: Some(Terminator {
+ source_info: self.source_info,
+ kind: TerminatorKind::Goto { target: loop_block }
+ })
+ });
+
+ // FIXME(#34708): handle partially-dropped array/slice elements.
+ let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
+ self.drop_flag_test_block(reset_block, succ, unwind)
}
/// The slow-path - create an "open", elaborated drop for a type
/// ADT, both in the success case or if one of the destructors fail.
fn open_drop<'a>(&mut self) -> BasicBlock {
let ty = self.lvalue_ty(self.lvalue);
- let is_cleanup = self.is_cleanup; // FIXME(#6393)
- let succ = self.succ;
match ty.sty {
ty::TyClosure(def_id, substs) => {
let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
self.open_drop_for_adt(def, substs)
}
ty::TyDynamic(..) => {
- self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
+ let unwind = self.unwind; // FIXME(#6393)
+ let succ = self.succ;
+ self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
}
- ty::TyArray(..) | ty::TySlice(..) => {
- // FIXME(#34708): handle partially-dropped
- // array/slice elements.
- self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
+ ty::TyArray(ety, _) | ty::TySlice(ety) => {
+ self.open_drop_for_array(ety)
}
_ => bug!("open drop from non-ADT `{:?}`", ty)
}
/// if let Some(mode) = mode: FLAG(self.path)[mode] = false
/// drop(self.lv)
fn complete_drop<'a>(&mut self,
- is_cleanup: bool,
drop_mode: Option<DropFlagMode>,
- succ: BasicBlock) -> BasicBlock
+ succ: BasicBlock,
+ unwind: Unwind) -> BasicBlock
{
debug!("complete_drop({:?},{:?})", self, drop_mode);
- let drop_block = self.drop_block(is_cleanup, succ);
- if let Some(mode) = drop_mode {
- let block_start = Location { block: drop_block, statement_index: 0 };
- self.elaborator.clear_drop_flag(block_start, self.path, mode);
- }
+ let drop_block = self.drop_block(succ, unwind);
+ let drop_block = if let Some(mode) = drop_mode {
+ self.drop_flag_reset_block(mode, drop_block, unwind)
+ } else {
+ drop_block
+ };
+
+ self.drop_flag_test_block(drop_block, succ, unwind)
+ }
- self.drop_flag_test_block(is_cleanup, drop_block, succ)
+ fn drop_flag_reset_block(&mut self,
+ mode: DropFlagMode,
+ succ: BasicBlock,
+ unwind: Unwind) -> BasicBlock
+ {
+ debug!("drop_flag_reset_block({:?},{:?})", self, mode);
+
+ let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
+ let block_start = Location { block: block, statement_index: 0 };
+ self.elaborator.clear_drop_flag(block_start, self.path, mode);
+ block
}
fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
debug!("elaborated_drop_block({:?})", self);
- let is_cleanup = self.is_cleanup; // FIXME(#6393)
+ let unwind = self.unwind; // FIXME(#6393)
let succ = self.succ;
- let blk = self.drop_block(is_cleanup, succ);
+ let blk = self.drop_block(succ, unwind);
self.elaborate_drop(blk);
blk
}
&mut self,
ty: Ty<'tcx>,
target: BasicBlock,
- is_cleanup: bool
+ unwind: Unwind,
) -> BasicBlock {
- let block = self.unelaborated_free_block(ty, target, is_cleanup);
- self.drop_flag_test_block(is_cleanup, block, target)
+ let block = self.unelaborated_free_block(ty, target, unwind);
+ self.drop_flag_test_block(block, target, unwind)
}
fn unelaborated_free_block<'a>(
&mut self,
ty: Ty<'tcx>,
target: BasicBlock,
- is_cleanup: bool
+ unwind: Unwind
) -> BasicBlock {
let tcx = self.tcx();
let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
destination: Some((unit_temp, target)),
cleanup: None
}; // FIXME(#6393)
- let free_block = self.new_block(is_cleanup, call);
+ let free_block = self.new_block(unwind, call);
let block_start = Location { block: free_block, statement_index: 0 };
self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
free_block
}
- fn drop_block<'a>(&mut self, is_cleanup: bool, succ: BasicBlock) -> BasicBlock {
+ fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
let block = TerminatorKind::Drop {
location: self.lvalue.clone(),
- target: succ,
- unwind: if is_cleanup { None } else { self.unwind }
+ target: target,
+ unwind: unwind.into_option()
};
- self.new_block(is_cleanup, block)
+ self.new_block(unwind, block)
}
fn drop_flag_test_block(&mut self,
- is_cleanup: bool,
on_set: BasicBlock,
- on_unset: BasicBlock)
+ on_unset: BasicBlock,
+ unwind: Unwind)
-> BasicBlock
{
let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
- debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
- self, is_cleanup, on_set, style);
+ debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
+ self, on_set, on_unset, unwind, style);
match style {
DropStyle::Dead => on_unset,
DropStyle::Conditional | DropStyle::Open => {
let flag = self.elaborator.get_drop_flag(self.path).unwrap();
let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
- self.new_block(is_cleanup, term)
+ self.new_block(unwind, term)
}
}
}
fn new_block<'a>(&mut self,
- is_cleanup: bool,
+ unwind: Unwind,
k: TerminatorKind<'tcx>)
-> BasicBlock
{
terminator: Some(Terminator {
source_info: self.source_info, kind: k
}),
- is_cleanup: is_cleanup
+ is_cleanup: unwind.is_cleanup()
})
}
let mir = self.elaborator.mir();
self.elaborator.patch().terminator_loc(mir, bb)
}
+
+ fn constant_usize(&self, val: u16) -> Operand<'tcx> {
+ Operand::Constant(box Constant {
+ span: self.source_info.span,
+ ty: self.tcx().types.usize,
+ literal: Literal::Value { value: ConstVal::Integral(self.tcx().const_usize(val)) }
+ })
+ }
+
+ fn assign(&self, lhs: &Lvalue<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
+ Statement {
+ source_info: self.source_info,
+ kind: StatementKind::Assign(lhs.clone(), rhs)
+ }
+ }
}
Rvalue::CheckedBinaryOp(..) => "Rvalue::CheckedBinaryOp",
Rvalue::UnaryOp(..) => "Rvalue::UnaryOp",
Rvalue::Discriminant(..) => "Rvalue::Discriminant",
- Rvalue::Box(..) => "Rvalue::Box",
+ Rvalue::NullaryOp(..) => "Rvalue::NullaryOp",
Rvalue::Aggregate(ref kind, ref _operands) => {
// AggregateKind is not distinguished by visit API, so
// record it. (`super_rvalue` handles `_operands`.)
use llvm::{self, ValueRef, AttributePlace};
use base;
use builder::Builder;
-use common::{type_is_fat_ptr, C_uint};
+use common::{instance_ty, ty_fn_sig, type_is_fat_ptr, C_uint};
use context::CrateContext;
use cabi_x86;
use cabi_x86_64;
}
impl<'a, 'tcx> FnType<'tcx> {
+ pub fn of_instance(ccx: &CrateContext<'a, 'tcx>, instance: &ty::Instance<'tcx>)
+ -> Self {
+ let fn_ty = instance_ty(ccx.shared(), &instance);
+ let sig = ty_fn_sig(ccx, fn_ty);
+ let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
+ Self::new(ccx, sig, &[])
+ }
+
pub fn new(ccx: &CrateContext<'a, 'tcx>,
sig: ty::FnSig<'tcx>,
extra_args: &[Ty<'tcx>]) -> FnType<'tcx> {
pub fn unadjusted(ccx: &CrateContext<'a, 'tcx>,
sig: ty::FnSig<'tcx>,
extra_args: &[Ty<'tcx>]) -> FnType<'tcx> {
+ debug!("FnType::unadjusted({:?}, {:?})", sig, extra_args);
+
use self::Abi::*;
let cconv = match ccx.sess().target.target.adjust_abi(sig.abi) {
RustIntrinsic | PlatformIntrinsic |
#[derive(Debug, PartialEq)]
enum Disposition { Reused, Translated }
-pub fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- modules: &[ModuleTranslation]) {
+pub(crate) fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ modules: &[ModuleTranslation]) {
let _ignore = tcx.dep_graph.in_ignore();
if tcx.sess.opts.incremental.is_none() {
use super::rpath;
use super::msvc;
use metadata::METADATA_FILENAME;
-use session::config;
-use session::config::NoDebugInfo;
-use session::config::{OutputFilenames, Input, OutputType};
-use session::filesearch;
-use session::search_paths::PathKind;
-use session::Session;
-use middle::cstore::{self, LinkMeta, NativeLibrary, LibSource};
-use middle::cstore::{LinkagePreference, NativeLibraryKind};
-use middle::dependency_format::Linkage;
+use rustc::session::config::{self, NoDebugInfo, OutputFilenames, Input, OutputType};
+use rustc::session::filesearch;
+use rustc::session::search_paths::PathKind;
+use rustc::session::Session;
+use rustc::middle::cstore::{self, LinkMeta, NativeLibrary, LibSource, LinkagePreference,
+ NativeLibraryKind};
+use rustc::middle::dependency_format::Linkage;
use CrateTranslation;
-use util::common::time;
-use util::fs::fix_windows_verbatim_for_gcc;
+use rustc::util::common::time;
+use rustc::util::fs::fix_windows_verbatim_for_gcc;
use rustc::dep_graph::DepNode;
use rustc::hir::def_id::CrateNum;
use rustc::hir::svh::Svh;
use back::archive;
use back::symbol_export::{self, ExportedSymbols};
-use middle::dependency_format::Linkage;
+use rustc::middle::dependency_format::Linkage;
use rustc::hir::def_id::{LOCAL_CRATE, CrateNum};
use rustc_back::LinkerFlavor;
-use session::Session;
-use session::config::{self, CrateType, OptLevel, DebugInfoLevel};
+use rustc::session::Session;
+use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel};
use serialize::{json, Encoder};
/// For all the linkers we support, and information they might
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
- use session::Session;
+ use rustc::session::Session;
use super::arch::{host_arch, Arch};
use super::registry::LOCAL_MACHINE;
mod platform {
use std::path::PathBuf;
use std::process::Command;
- use session::Session;
+ use rustc::session::Session;
pub fn link_exe_cmd(_sess: &Session) -> (Command, Option<PathBuf>) {
(Command::new("link.exe"), None)
}
use context::SharedCrateContext;
use monomorphize::Instance;
-use util::nodemap::FxHashMap;
+use rustc::util::nodemap::FxHashMap;
use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
use rustc::session::config;
use rustc::ty::TyCtxt;
}
}
-pub fn exported_name_from_type_and_prefix<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
- t: Ty<'tcx>,
- prefix: &str)
- -> String {
- let hash = get_symbol_hash(tcx, None, t, None);
- let mut buffer = SymbolPathBuffer::new();
- buffer.push(prefix);
- buffer.finish(hash)
-}
-
// Name sanitation. LLVM will happily accept identifiers with weird names, but
// gas doesn't!
// gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
use back::link::{get_linker, remove};
use back::symbol_export::ExportedSymbols;
use rustc_incremental::{save_trans_partition, in_incr_comp_dir};
-use session::config::{OutputFilenames, OutputTypes, Passes, SomePasses, AllPasses, Sanitizer};
-use session::Session;
-use session::config::{self, OutputType};
+use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
+ AllPasses, Sanitizer};
+use rustc::session::Session;
use llvm;
use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef, ContextRef};
use llvm::SMDiagnosticRef;
use {CrateTranslation, ModuleLlvm, ModuleSource, ModuleTranslation};
-use util::common::{time, time_depth, set_time_depth};
-use util::common::path2cstr;
-use util::fs::link_or_copy;
+use rustc::util::common::{time, time_depth, set_time_depth, path2cstr};
+use rustc::util::fs::link_or_copy;
use errors::{self, Handler, Level, DiagnosticBuilder};
use errors::emitter::Emitter;
use syntax_pos::MultiSpan;
use llvm;
use metadata;
use rustc::hir::def_id::LOCAL_CRATE;
-use middle::lang_items::StartFnLangItem;
-use middle::cstore::EncodedMetadata;
+use rustc::middle::lang_items::StartFnLangItem;
+use rustc::middle::cstore::EncodedMetadata;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::dep_graph::AssertDepGraphSafe;
use rustc::middle::cstore::LinkMeta;
use rustc::hir::map as hir_map;
use rustc::util::common::time;
-use session::config::{self, NoDebugInfo};
+use rustc::session::config::{self, NoDebugInfo};
+use rustc::session::{self, DataTypeKind, Session};
use rustc_incremental::IncrementalHashesMap;
-use session::{self, DataTypeKind, Session};
use abi;
use mir::lvalue::LvalueRef;
use attributes;
use type_::Type;
use type_of;
use value::Value;
-use util::nodemap::{NodeSet, FxHashMap, FxHashSet};
+use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet};
use libc::c_uint;
use std::ffi::{CStr, CString};
use context::SharedCrateContext;
use common::{def_ty, instance_ty};
use monomorphize::{self, Instance};
-use util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
+use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
use trans_item::{TransItem, DefPathBasedNames, InstantiationMode};
_ => bug!(),
}
}
- mir::Rvalue::Box(..) => {
+ mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
let tcx = self.scx.tcx();
let exchange_malloc_fn_def_id = tcx
.lang_items
output.push(create_fn_trans_item(instance));
}
}
- ty::InstanceDef::DropGlue(_, Some(ty)) => {
- match ty.sty {
- ty::TyArray(ety, _) |
- ty::TySlice(ety)
- if is_direct_call =>
- {
- // drop of arrays/slices is translated in-line.
- visit_drop_use(scx, ety, false, output);
- }
- _ => {}
- };
+ ty::InstanceDef::DropGlue(_, Some(_)) => {
output.push(create_fn_trans_item(instance));
}
ty::InstanceDef::ClosureOnceShim { .. } |
use llvm::{True, False, Bool, OperandBundleDef};
use rustc::hir::def_id::DefId;
use rustc::hir::map::DefPathData;
-use middle::lang_items::LangItem;
+use rustc::middle::lang_items::LangItem;
use base;
use builder::Builder;
use consts;
}
}
-impl Clone for Funclet {
- fn clone(&self) -> Funclet {
- Funclet {
- cleanuppad: self.cleanuppad,
- operand: OperandBundleDef::new("funclet", &[self.cleanuppad]),
- }
- }
-}
-
pub fn val_ty(v: ValueRef) -> Type {
unsafe {
Type::from_ref(llvm::LLVMTypeOf(v))
use rustc::ty::subst::Substs;
use rustc::ty::{self, Ty, TyCtxt};
use rustc::ty::layout::{LayoutTyper, TyLayout};
-use session::config::NoDebugInfo;
-use session::Session;
-use session::config;
-use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
+use rustc::session::config::{self, NoDebugInfo};
+use rustc::session::Session;
+use rustc::util::nodemap::{NodeSet, DefIdMap, FxHashMap};
use std::ffi::{CStr, CString};
use std::cell::{Cell, RefCell};
use builder::Builder;
use declare;
use type_::Type;
-use session::config::NoDebugInfo;
+use rustc::session::config::NoDebugInfo;
use std::ptr;
use syntax::attr;
use super::type_names::compute_debuginfo_type_name;
use super::{CrateDebugContext};
use context::SharedCrateContext;
-use session::Session;
use llvm::{self, ValueRef};
use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor,
use type_::Type;
use rustc::ty::{self, AdtKind, Ty};
use rustc::ty::layout::{self, LayoutTyper};
-use session::config;
-use util::nodemap::FxHashMap;
+use rustc::session::{Session, config};
+use rustc::util::nodemap::FxHashMap;
use libc::{c_uint, c_longlong};
use std::ffi::CString;
use monomorphize::Instance;
use rustc::ty::{self, Ty};
use rustc::mir;
-use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
-use util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
+use rustc::session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
+use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
use libc::c_uint;
use std::cell::{Cell, RefCell};
let align = C_uint(bcx.ccx, align);
return (size, align);
}
+ assert!(!info.is_null());
match t.sty {
ty::TyAdt(def, substs) => {
let ccx = bcx.ccx;
extern crate rustc_errors as errors;
extern crate serialize;
-pub use rustc::session;
-pub use rustc::middle;
-pub use rustc::lint;
-pub use rustc::util;
-
pub use base::trans_crate;
pub use back::symbol_names::provide;
pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
pub mod back {
- pub use rustc::hir::svh;
-
- pub mod archive;
- pub mod linker;
+ mod archive;
+ pub(crate) mod linker;
pub mod link;
- pub mod lto;
- pub mod symbol_export;
- pub mod symbol_names;
+ mod lto;
+ pub(crate) mod symbol_export;
+ pub(crate) mod symbol_names;
pub mod write;
- pub mod msvc;
- pub mod rpath;
+ mod msvc;
+ mod rpath;
}
-pub mod diagnostics;
+mod diagnostics;
mod abi;
mod adt;
pub crate_name: Symbol,
pub modules: Vec<ModuleTranslation>,
pub metadata_module: ModuleTranslation,
- pub link: middle::cstore::LinkMeta,
- pub metadata: middle::cstore::EncodedMetadata,
+ pub link: rustc::middle::cstore::LinkMeta,
+ pub metadata: rustc::middle::cstore::EncodedMetadata,
pub exported_symbols: back::symbol_export::ExportedSymbols,
pub no_builtins: bool,
pub windows_subsystem: Option<String>,
Internal { funclet: mir::BasicBlock }
}
+impl CleanupKind {
+ pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
+ match self {
+ CleanupKind::NotCleanup => None,
+ CleanupKind::Funclet => Some(for_bb),
+ CleanupKind::Internal { funclet } => Some(funclet),
+ }
+ }
+}
+
pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
mir: &mir::Mir<'tcx>) {
result[succ] = CleanupKind::Internal { funclet: funclet };
}
CleanupKind::Funclet => {
- set_successor(funclet, succ);
+ if funclet != succ {
+ set_successor(funclet, succ);
+ }
}
CleanupKind::Internal { funclet: succ_funclet } => {
if funclet != succ_funclet {
use base::{self, Lifetime};
use callee;
use builder::Builder;
-use common::{self, Funclet};
-use common::{C_bool, C_str_slice, C_struct, C_u32, C_uint, C_undef};
+use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef};
use consts;
use machine::llalign_of_min;
use meth;
use monomorphize;
use type_of;
-use tvec;
use type_::Type;
-use rustc_data_structures::indexed_vec::IndexVec;
use syntax::symbol::Symbol;
use std::cmp;
use super::{MirContext, LocalRef};
-use super::analyze::CleanupKind;
use super::constant::Const;
use super::lvalue::{Alignment, LvalueRef};
use super::operand::OperandRef;
use super::operand::OperandValue::{Pair, Ref, Immediate};
impl<'a, 'tcx> MirContext<'a, 'tcx> {
- pub fn trans_block(&mut self, bb: mir::BasicBlock,
- funclets: &IndexVec<mir::BasicBlock, Option<Funclet>>) {
+ pub fn trans_block(&mut self, bb: mir::BasicBlock) {
let mut bcx = self.get_builder(bb);
let data = &self.mir[bb];
debug!("trans_block({:?}={:?})", bb, data);
- let funclet = match self.cleanup_kinds[bb] {
- CleanupKind::Internal { funclet } => funclets[funclet].as_ref(),
- _ => funclets[bb].as_ref(),
- };
+ for statement in &data.statements {
+ bcx = self.trans_statement(bcx, statement);
+ }
+
+ self.trans_terminator(bcx, bb, data.terminator());
+ }
+
+ fn trans_terminator(&mut self,
+ mut bcx: Builder<'a, 'tcx>,
+ bb: mir::BasicBlock,
+ terminator: &mir::Terminator<'tcx>)
+ {
+ debug!("trans_terminator: {:?}", terminator);
// Create the cleanup bundle, if needed.
+ let tcx = bcx.tcx();
+ let span = terminator.source_info.span;
+ let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
+ let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
+
let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
let cleanup_bundle = funclet.map(|l| l.bundle());
- let funclet_br = |this: &Self, bcx: Builder, bb: mir::BasicBlock| {
- let lltarget = this.blocks[bb];
- if let Some(cp) = cleanup_pad {
- match this.cleanup_kinds[bb] {
- CleanupKind::Funclet => {
- // micro-optimization: generate a `ret` rather than a jump
- // to a return block
- bcx.cleanup_ret(cp, Some(lltarget));
- }
- CleanupKind::Internal { .. } => bcx.br(lltarget),
- CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
+ let lltarget = |this: &mut Self, target: mir::BasicBlock| {
+ let lltarget = this.blocks[target];
+ let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
+ match (funclet_bb, target_funclet) {
+ (None, None) => (lltarget, false),
+ (Some(f), Some(t_f))
+ if f == t_f || !base::wants_msvc_seh(tcx.sess)
+ => (lltarget, false),
+ (None, Some(_)) => {
+ // jump *into* cleanup - need a landing pad if GNU
+ (this.landing_pad_to(target), false)
+ }
+ (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
+ (Some(_), Some(_)) => {
+ (this.landing_pad_to(target), true)
}
- } else {
- bcx.br(lltarget);
}
};
let llblock = |this: &mut Self, target: mir::BasicBlock| {
- let lltarget = this.blocks[target];
+ let (lltarget, is_cleanupret) = lltarget(this, target);
+ if is_cleanupret {
+ // MSVC cross-funclet jump - need a trampoline
+
+ debug!("llblock: creating cleanup trampoline for {:?}", target);
+ let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
+ let trampoline = this.new_block(name);
+ trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
+ trampoline.llbb()
+ } else {
+ lltarget
+ }
+ };
- if let Some(cp) = cleanup_pad {
- match this.cleanup_kinds[target] {
- CleanupKind::Funclet => {
- // MSVC cross-funclet jump - need a trampoline
+ let funclet_br = |this: &mut Self, bcx: Builder, target: mir::BasicBlock| {
+ let (lltarget, is_cleanupret) = lltarget(this, target);
+ if is_cleanupret {
+ // micro-optimization: generate a `ret` rather than a jump
+ // to a trampoline.
+ bcx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
+ } else {
+ bcx.br(lltarget);
+ }
+ };
- debug!("llblock: creating cleanup trampoline for {:?}", target);
- let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
- let trampoline = this.new_block(name);
- trampoline.cleanup_ret(cp, Some(lltarget));
- trampoline.llbb()
- }
- CleanupKind::Internal { .. } => lltarget,
- CleanupKind::NotCleanup =>
- bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
+ let do_call = |
+ this: &mut Self,
+ bcx: Builder<'a, 'tcx>,
+ fn_ty: FnType<'tcx>,
+ fn_ptr: ValueRef,
+ llargs: &[ValueRef],
+ destination: Option<(ReturnDest, ty::Ty<'tcx>, mir::BasicBlock)>,
+ cleanup: Option<mir::BasicBlock>
+ | {
+ if let Some(cleanup) = cleanup {
+ let ret_bcx = if let Some((_, _, target)) = destination {
+ this.blocks[target]
+ } else {
+ this.unreachable_block()
+ };
+ let invokeret = bcx.invoke(fn_ptr,
+ &llargs,
+ ret_bcx,
+ llblock(this, cleanup),
+ cleanup_bundle);
+ fn_ty.apply_attrs_callsite(invokeret);
+
+ if let Some((ret_dest, ret_ty, target)) = destination {
+ let ret_bcx = this.get_builder(target);
+ this.set_debug_loc(&ret_bcx, terminator.source_info);
+ let op = OperandRef {
+ val: Immediate(invokeret),
+ ty: ret_ty,
+ };
+ this.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
}
} else {
- if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
- (this.cleanup_kinds[bb], this.cleanup_kinds[target])
- {
- // jump *into* cleanup - need a landing pad if GNU
- this.landing_pad_to(target)
+ let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
+ fn_ty.apply_attrs_callsite(llret);
+
+ if let Some((ret_dest, ret_ty, target)) = destination {
+ let op = OperandRef {
+ val: Immediate(llret),
+ ty: ret_ty,
+ };
+ this.store_return(&bcx, ret_dest, &fn_ty.ret, op);
+ funclet_br(this, bcx, target);
} else {
- lltarget
+ bcx.unreachable();
}
}
};
- for statement in &data.statements {
- bcx = self.trans_statement(bcx, statement);
- }
-
- let terminator = data.terminator();
- debug!("trans_block: terminator: {:?}", terminator);
-
- let span = terminator.source_info.span;
self.set_debug_loc(&bcx, terminator.source_info);
match terminator.kind {
mir::TerminatorKind::Resume => {
}
let lvalue = self.trans_lvalue(&bcx, location);
+ let fn_ty = FnType::of_instance(bcx.ccx, &drop_fn);
let (drop_fn, need_extra) = match ty.sty {
ty::TyDynamic(..) => (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra),
false),
- ty::TyArray(ety, _) | ty::TySlice(ety) => {
- // FIXME: handle panics
- let drop_fn = monomorphize::resolve_drop_in_place(
- bcx.ccx.shared(), ety);
- let drop_fn = callee::get_fn(bcx.ccx, drop_fn);
- let bcx = tvec::slice_for_each(
- &bcx,
- lvalue.project_index(&bcx, C_uint(bcx.ccx, 0u64)),
- ety,
- lvalue.len(bcx.ccx),
- |bcx, llval, loop_bb| {
- self.set_debug_loc(&bcx, terminator.source_info);
- if let Some(unwind) = unwind {
- bcx.invoke(
- drop_fn,
- &[llval],
- loop_bb,
- llblock(self, unwind),
- cleanup_bundle
- );
- } else {
- bcx.call(drop_fn, &[llval], cleanup_bundle);
- bcx.br(loop_bb);
- }
- });
- funclet_br(self, bcx, target);
- return
- }
_ => (callee::get_fn(bcx.ccx, drop_fn), lvalue.has_extra())
};
let args = &[lvalue.llval, lvalue.llextra][..1 + need_extra as usize];
- if let Some(unwind) = unwind {
- bcx.invoke(
- drop_fn,
- args,
- self.blocks[target],
- llblock(self, unwind),
- cleanup_bundle
- );
- } else {
- bcx.call(drop_fn, args, cleanup_bundle);
- funclet_br(self, bcx, target);
- }
+ do_call(self, bcx, fn_ty, drop_fn, args,
+ Some((ReturnDest::Nothing, tcx.mk_nil(), target)),
+ unwind);
}
mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
// Obtain the panic entry point.
let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
let instance = ty::Instance::mono(bcx.tcx(), def_id);
+ let fn_ty = FnType::of_instance(bcx.ccx, &instance);
let llfn = callee::get_fn(bcx.ccx, instance);
// Translate the actual panic invoke/call.
- if let Some(unwind) = cleanup {
- bcx.invoke(llfn,
- &args,
- self.unreachable_block(),
- llblock(self, unwind),
- cleanup_bundle);
- } else {
- bcx.call(llfn, &args, cleanup_bundle);
- bcx.unreachable();
- }
+ do_call(self, bcx, fn_ty, llfn, &args, None, cleanup);
}
mir::TerminatorKind::DropAndReplace { .. } => {
- bug!("undesugared DropAndReplace in trans: {:?}", data);
+ bug!("undesugared DropAndReplace in trans: {:?}", terminator);
}
- mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
+ mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
// Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
let callee = self.trans_operand(&bcx, func);
_ => span_bug!(span, "no llfn for call"),
};
- // Many different ways to call a function handled here
- if let &Some(cleanup) = cleanup {
- let ret_bcx = if let Some((_, target)) = *destination {
- self.blocks[target]
- } else {
- self.unreachable_block()
- };
- let invokeret = bcx.invoke(fn_ptr,
- &llargs,
- ret_bcx,
- llblock(self, cleanup),
- cleanup_bundle);
- fn_ty.apply_attrs_callsite(invokeret);
-
- if let Some((_, target)) = *destination {
- let ret_bcx = self.get_builder(target);
- self.set_debug_loc(&ret_bcx, terminator.source_info);
- let op = OperandRef {
- val: Immediate(invokeret),
- ty: sig.output(),
- };
- self.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
- }
- } else {
- let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
- fn_ty.apply_attrs_callsite(llret);
- if let Some((_, target)) = *destination {
- let op = OperandRef {
- val: Immediate(llret),
- ty: sig.output(),
- };
- self.store_return(&bcx, ret_dest, &fn_ty.ret, op);
- funclet_br(self, bcx, target);
- } else {
- bcx.unreachable();
- }
- }
+ do_call(self, bcx, fn_ty, fn_ptr, &llargs,
+ destination.as_ref().map(|&(_, target)| (ret_dest, sig.output(), target)),
+ cleanup);
}
}
}
fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
if base::wants_msvc_seh(self.ccx.sess()) {
- return target_bb;
+ span_bug!(self.mir.span, "landing pad was not inserted?")
}
let bcx = self.new_block("cleanup");
Const::new(llval, operand.ty)
}
+ mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
+ assert!(self.ccx.shared().type_is_sized(ty));
+ let llval = C_uint(self.ccx, self.ccx.size_of(ty));
+ Const::new(llval, tcx.types.usize)
+ }
+
_ => span_bug!(span, "{:?} in constant", rvalue)
};
llvm::LLVMConstICmp(cmp, lhs, rhs)
}
}
+ mir::BinOp::Offset => unreachable!("BinOp::Offset in const-eval!")
}
}
}
use rustc::mir::tcx::LvalueTy;
use rustc::ty::subst::Substs;
use rustc::infer::TransNormalize;
-use session::config::FullDebugInfo;
+use rustc::session::config::FullDebugInfo;
use base;
use builder::Builder;
use common::{self, CrateContext, Funclet};
/// The funclet status of each basic block
cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
+ /// When targeting MSVC, this stores the cleanup info for each funclet
+ /// BB. This is initialized as we compute the funclets' head block in RPO.
+ funclets: &'a IndexVec<mir::BasicBlock, Option<Funclet>>,
+
/// This stores the landing-pad block for a given BB, computed lazily on GNU
/// and eagerly on MSVC.
landing_pads: IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
debuginfo::create_function_debug_context(ccx, instance, sig, llfn, mir);
let bcx = Builder::new_block(ccx, llfn, "start");
- let cleanup_kinds = analyze::cleanup_kinds(&mir);
+ if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
+ bcx.set_personality_fn(ccx.eh_personality());
+ }
+ let cleanup_kinds = analyze::cleanup_kinds(&mir);
// Allocate a `Block` for every basic block, except
// the start block, if nothing loops back to it.
let reentrant_start_block = !mir.predecessors_for(mir::START_BLOCK).is_empty();
// Compute debuginfo scopes from MIR scopes.
let scopes = debuginfo::create_mir_scopes(ccx, mir, &debug_context);
+ let (landing_pads, funclets) = create_funclets(&bcx, &cleanup_kinds, &block_bcxs);
let mut mircx = MirContext {
mir: mir,
blocks: block_bcxs,
unreachable_block: None,
cleanup_kinds: cleanup_kinds,
- landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
+ landing_pads: landing_pads,
+ funclets: &funclets,
scopes: scopes,
locals: IndexVec::new(),
debug_context: debug_context,
// emitting should be enabled.
debuginfo::start_emitting_source_locations(&mircx.debug_context);
- let funclets: IndexVec<mir::BasicBlock, Option<Funclet>> =
- mircx.cleanup_kinds.iter_enumerated().map(|(bb, cleanup_kind)| {
- if let CleanupKind::Funclet = *cleanup_kind {
- let bcx = mircx.get_builder(bb);
- unsafe {
- llvm::LLVMSetPersonalityFn(mircx.llfn, mircx.ccx.eh_personality());
- }
- if base::wants_msvc_seh(ccx.sess()) {
- return Some(Funclet::new(bcx.cleanup_pad(None, &[])));
- }
- }
-
- None
- }).collect();
-
let rpo = traversal::reverse_postorder(&mir);
let mut visited = BitVector::new(mir.basic_blocks().len());
// Translate the body of each block using reverse postorder
for (bb, _) in rpo {
visited.insert(bb.index());
- mircx.trans_block(bb, &funclets);
+ mircx.trans_block(bb);
}
// Remove blocks that haven't been visited, or have no
}
}
+fn create_funclets<'a, 'tcx>(
+ bcx: &Builder<'a, 'tcx>,
+ cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
+ block_bcxs: &IndexVec<mir::BasicBlock, BasicBlockRef>)
+ -> (IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
+ IndexVec<mir::BasicBlock, Option<Funclet>>)
+{
+ block_bcxs.iter_enumerated().zip(cleanup_kinds).map(|((bb, &llbb), cleanup_kind)| {
+ match *cleanup_kind {
+ CleanupKind::Funclet if base::wants_msvc_seh(bcx.sess()) => {
+ let cleanup_bcx = bcx.build_sibling_block(&format!("funclet_{:?}", bb));
+ let cleanup = cleanup_bcx.cleanup_pad(None, &[]);
+ cleanup_bcx.br(llbb);
+ (Some(cleanup_bcx.llbb()), Some(Funclet::new(cleanup)))
+ }
+ _ => (None, None)
+ }
+ }).unzip()
+}
+
/// Produce, for each argument, a `ValueRef` pointing at the
/// argument's value. As arguments are lvalues, these are always
/// indirect.
pub fn deref(self) -> LvalueRef<'tcx> {
let projected_ty = self.ty.builtin_deref(true, ty::NoPreference)
- .unwrap().ty;
+ .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self)).ty;
let (llptr, llextra) = match self.val {
OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
OperandValue::Pair(llptr, llextra) => (llptr, llextra),
use rustc::ty::layout::{Layout, LayoutTyper};
use rustc::mir::tcx::LvalueTy;
use rustc::mir;
-use middle::lang_items::ExchangeMallocFnLangItem;
+use rustc::middle::lang_items::ExchangeMallocFnLangItem;
use base;
use builder::Builder;
})
}
- mir::Rvalue::Box(content_ty) => {
+ mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
+ assert!(bcx.ccx.shared().type_is_sized(ty));
+ let val = C_uint(bcx.ccx, bcx.ccx.size_of(ty));
+ let tcx = bcx.tcx();
+ (bcx, OperandRef {
+ val: OperandValue::Immediate(val),
+ ty: tcx.types.usize,
+ })
+ }
+
+ mir::Rvalue::NullaryOp(mir::NullOp::Box, content_ty) => {
let content_ty: Ty<'tcx> = self.monomorphize(&content_ty);
let llty = type_of::type_of(bcx.ccx, content_ty);
let llsize = machine::llsize_of(bcx.ccx, llty);
mir::BinOp::BitOr => bcx.or(lhs, rhs),
mir::BinOp::BitAnd => bcx.and(lhs, rhs),
mir::BinOp::BitXor => bcx.xor(lhs, rhs),
+ mir::BinOp::Offset => bcx.inbounds_gep(lhs, &[rhs]),
mir::BinOp::Shl => common::build_unchecked_lshift(bcx, lhs, rhs),
mir::BinOp::Shr => common::build_unchecked_rshift(bcx, input_ty, lhs, rhs),
mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt |
mir::Rvalue::CheckedBinaryOp(..) |
mir::Rvalue::UnaryOp(..) |
mir::Rvalue::Discriminant(..) |
- mir::Rvalue::Box(..) |
+ mir::Rvalue::NullaryOp(..) |
mir::Rvalue::Use(..) => // (*)
true,
mir::Rvalue::Repeat(..) |
use syntax::ast::NodeId;
use syntax::symbol::{Symbol, InternedString};
use trans_item::{TransItem, InstantiationMode};
-use util::nodemap::{FxHashMap, FxHashSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
pub enum PartitioningStrategy {
/// Generate one codegen unit per source-level module.
use rustc::traits;
use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable};
use rustc::ty::wf::object_region_bounds;
+use rustc::lint::builtin::PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES;
use rustc_back::slice;
use require_c_abi_if_variadic;
use util::common::{ErrorReported, FN_OUTPUT_NAME};
match item_segment.parameters {
hir::AngleBracketedParameters(_) => {}
hir::ParenthesizedParameters(..) => {
- struct_span_err!(tcx.sess, span, E0214,
- "parenthesized parameters may only be used with a trait")
- .span_label(span, "only traits may use parentheses")
- .emit();
+ self.prohibit_parenthesized_params(item_segment, true);
return Substs::for_item(tcx, def_id, |_, _| {
tcx.types.re_static
self_ty: Ty<'tcx>)
-> ty::TraitRef<'tcx>
{
+ self.prohibit_type_params(trait_ref.path.segments.split_last().unwrap().1);
+
let trait_def_id = self.trait_def_id(trait_ref);
self.ast_path_to_mono_trait_ref(trait_ref.path.span,
trait_def_id,
debug!("ast_path_to_poly_trait_ref({:?}, def_id={:?})", trait_ref, trait_def_id);
+ self.prohibit_type_params(trait_ref.path.segments.split_last().unwrap().1);
+
let (substs, assoc_bindings) =
self.create_substs_for_ast_trait_ref(trait_ref.path.span,
trait_def_id,
dummy_self,
&mut projection_bounds);
+ for trait_bound in trait_bounds[1..].iter() {
+ // Sanity check for non-principal trait bounds
+ self.instantiate_poly_trait_ref(trait_bound,
+ dummy_self,
+ &mut vec![]);
+ }
+
let (auto_traits, trait_bounds) = split_auto_traits(tcx, &trait_bounds[1..]);
if !trait_bounds.is_empty() {
pub fn prohibit_type_params(&self, segments: &[hir::PathSegment]) {
for segment in segments {
+ if let hir::ParenthesizedParameters(_) = segment.parameters {
+ self.prohibit_parenthesized_params(segment, false);
+ break;
+ }
for typ in segment.parameters.types() {
struct_span_err!(self.tcx().sess, typ.span, E0109,
"type parameters are not allowed on this type")
}
}
+ pub fn prohibit_parenthesized_params(&self, segment: &hir::PathSegment, emit_error: bool) {
+ if let hir::ParenthesizedParameters(ref data) = segment.parameters {
+ if emit_error {
+ struct_span_err!(self.tcx().sess, data.span, E0214,
+ "parenthesized parameters may only be used with a trait")
+ .span_label(data.span, "only traits may use parentheses")
+ .emit();
+ } else {
+ let msg = "parenthesized parameters may only be used with a trait".to_string();
+ self.tcx().sess.add_lint(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
+ ast::CRATE_NODE_ID, data.span, msg);
+ }
+ }
+ }
+
pub fn prohibit_projection(&self, span: Span) {
let mut err = struct_span_err!(self.tcx().sess, span, E0229,
"associated type bindings are not allowed here");
use rustc::ty::subst::Subst;
use errors::DiagnosticBuilder;
use syntax::abi;
-use syntax::feature_gate;
use syntax::ptr::P;
use syntax_pos;
let node_id_a = self.tcx.hir.as_local_node_id(def_id_a).unwrap();
match b.sty {
ty::TyFnPtr(_) if self.tcx.with_freevars(node_id_a, |v| v.is_empty()) => {
- if !self.tcx.sess.features.borrow().closure_to_fn_coercion {
- feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
- "closure_to_fn_coercion",
- self.cause.span,
- feature_gate::GateIssue::Language,
- feature_gate::CLOSURE_TO_FN_COERCION);
- return self.unify_and(a, b, identity());
- }
// We coerce the closure, which has fn type
// `extern "rust-call" fn((arg0,arg1,...)) -> _`
// to
(&data.lifetimes[..], &data.types[..], data.infer_types, &data.bindings[..])
}
Some(&hir::ParenthesizedParameters(_)) => {
- span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
+ AstConv::prohibit_parenthesized_params(self, &segment.as_ref().unwrap().0,
+ false);
+ (&[][..], &[][..], true, &[][..])
}
None => (&[][..], &[][..], true, &[][..])
}
impl_trait_ref,
impl_polarity,
is_foreign_item,
+ is_default_impl,
..*providers
};
}
_ => bug!("is_foreign_item applied to non-local def-id {:?}", def_id)
}
}
+
+fn is_default_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+ def_id: DefId)
+ -> bool {
+ match tcx.hir.get_if_local(def_id) {
+ Some(hir_map::NodeItem(&hir::Item { node: hir::ItemDefaultImpl(..), .. }))
+ => true,
+ Some(_) => false,
+ _ => bug!("is_default_impl applied to non-local def-id {:?}", def_id)
+ }
+}
// E0217, // ambiguous associated type, defined in multiple supertraits
// E0218, // no associated type defined
// E0219, // associated type defined in higher-ranked supertrait
-// E0222, // Error code E0045 (variadic function must have C calling
+// E0222, // Error code E0045 (variadic function must have C or cdecl calling
// convention) duplicate
E0224, // at least one non-builtin train is required for an object type
E0227, // ambiguous lifetime bound, explicit lifetime bound required
decl: &hir::FnDecl,
abi: Abi,
span: Span) {
- if decl.variadic && abi != Abi::C {
+ if decl.variadic && !(abi == Abi::C || abi == Abi::Cdecl) {
let mut err = struct_span_err!(tcx.sess, span, E0045,
- "variadic function must have C calling convention");
- err.span_label(span, "variadics require C calling conventions")
- .emit();
+ "variadic function must have C or cdecl calling convention");
+ err.span_label(span, "variadics require C or cdecl calling convention").emit();
}
}
}
// If this is a defaulted impl, then bail out early here
- if tcx.sess.cstore.is_default_impl(did) {
+ if tcx.is_default_impl(did) {
return ret.push(clean::Item {
inner: clean::DefaultImplItem(clean::DefaultImpl {
// FIXME: this should be decoded
//
pub struct CodeMap {
- // The `files` field should not be visible outside of libsyntax so that we
- // can do proper dependency tracking.
pub(super) files: RefCell<Vec<Rc<FileMap>>>,
file_loader: Box<FileLoader>,
// This is used to apply the file path remapping as specified via
// -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
path_mapping: FilePathMapping,
- // The CodeMap will invoke this callback whenever a specific FileMap is
- // accessed. The callback starts out as a no-op but when the dependency
- // graph becomes available later during the compilation process, it is
- // be replaced with something that notifies the dep-tracking system.
- dep_tracking_callback: RefCell<Box<Fn(&FileMap)>>,
}
impl CodeMap {
files: RefCell::new(Vec::new()),
file_loader: Box::new(RealFileLoader),
path_mapping: path_mapping,
- dep_tracking_callback: RefCell::new(Box::new(|_| {})),
}
}
files: RefCell::new(Vec::new()),
file_loader: file_loader,
path_mapping: path_mapping,
- dep_tracking_callback: RefCell::new(Box::new(|_| {})),
}
}
&self.path_mapping
}
- pub fn set_dep_tracking_callback(&self, cb: Box<Fn(&FileMap)>) {
- *self.dep_tracking_callback.borrow_mut() = cb;
- }
-
pub fn file_exists(&self, path: &Path) -> bool {
self.file_loader.file_exists(path)
}
}
pub fn files(&self) -> Ref<Vec<Rc<FileMap>>> {
- let files = self.files.borrow();
- for file in files.iter() {
- (self.dep_tracking_callback.borrow())(file);
- }
- files
- }
-
- /// Only use this if you do your own dependency tracking!
- pub fn files_untracked(&self) -> Ref<Vec<Rc<FileMap>>> {
self.files.borrow()
}
let files = self.files.borrow();
let f = (*files)[idx].clone();
- (self.dep_tracking_callback.borrow())(&f);
-
match f.lookup_line(pos) {
Some(line) => Ok(FileMapAndLine { fm: f, line: line }),
None => Err(f)
pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
for fm in self.files.borrow().iter() {
if filename == fm.name {
- (self.dep_tracking_callback.borrow())(fm);
return Some(fm.clone());
}
}
pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
let idx = self.lookup_filemap_idx(bpos);
let fm = (*self.files.borrow())[idx].clone();
- (self.dep_tracking_callback.borrow())(&fm);
let offset = bpos - fm.start_pos;
FileMapAndBytePos {fm: fm, pos: offset}
}
let files = self.files.borrow();
let map = &(*files)[idx];
- (self.dep_tracking_callback.borrow())(map);
-
// The number of extra bytes due to multibyte chars in the FileMap
let mut total_extra_bytes = 0;
}
}
+fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
+ let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
+ let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
+ values.push(message);
+}
+
/// Given `lhses` and `rhses`, this is the new macro we create
fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
sp: Span,
rhses: &[quoted::TokenTree])
-> Box<MacResult+'cx> {
if cx.trace_macros() {
- let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
- let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
- values.push(format!("expands to `{}! {{ {} }}`", name, arg));
+ trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
}
// Which arm's failure should we report? (the one furthest along)
};
// rhs has holes ( `$id` and `$(...)` that need filled)
let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
+
+ if cx.trace_macros() {
+ trace_macros_note(cx, sp, format!("to `{}`", tts));
+ }
+
let directory = Directory {
path: cx.current_expansion.module.directory.clone(),
ownership: cx.current_expansion.directory_ownership,
// `extern "msp430-interrupt" fn()`
(active, abi_msp430_interrupt, "1.16.0", Some(38487)),
- // Coerces non capturing closures to function pointers
- (active, closure_to_fn_coercion, "1.17.0", Some(39817)),
-
// Used to identify crates that contain sanitizer runtimes
// rustc internal
(active, sanitizer_runtime, "1.17.0", None),
(accepted, loop_break_value, "1.19.0", Some(37339)),
// Permits numeric fields in struct expressions and patterns.
(accepted, relaxed_adts, "1.19.0", Some(35626)),
+ // Coerces non capturing closures to function pointers
+ (accepted, closure_to_fn_coercion, "1.19.0", Some(39817)),
);
// If you change this, please modify src/doc/unstable-book as well. You must
"rustc_attrs",
"internal rustc attributes will never be stable",
cfg_fn!(rustc_attrs))),
- ("rustc_move_fragments", Normal, Gated(Stability::Unstable,
- "rustc_attrs",
- "the `#[rustc_move_fragments]` attribute \
- is just used for rustc unit tests \
- and will never be stable",
- cfg_fn!(rustc_attrs))),
("rustc_mir", Whitelisted, Gated(Stability::Unstable,
"rustc_attrs",
"the `#[rustc_mir]` attribute \
pub const EXPLAIN_PLACEMENT_IN: &'static str =
"placement-in expression syntax is experimental and subject to change.";
-pub const CLOSURE_TO_FN_COERCION: &'static str =
- "non-capturing closure to fn coercion is experimental";
-
struct PostExpansionVisitor<'a> {
context: &'a Context<'a>,
}
extern "C" void LLVMRustWriteValueToString(LLVMValueRef V,
RustStringRef Str) {
RawRustStringOstream OS(Str);
- OS << "(";
- unwrap<llvm::Value>(V)->getType()->print(OS);
- OS << ":";
- unwrap<llvm::Value>(V)->print(OS);
- OS << ")";
+ if (!V) {
+ OS << "(null)";
+ } else {
+ OS << "(";
+ unwrap<llvm::Value>(V)->getType()->print(OS);
+ OS << ":";
+ unwrap<llvm::Value>(V)->print(OS);
+ OS << ")";
+ }
}
extern "C" bool LLVMRustLinkInExternalBitcode(LLVMModuleRef DstRef, char *BC,
// except according to those terms.
extern "Rust" { fn foo(x: u8, ...); } //~ ERROR E0045
- //~| NOTE variadics require C calling conventions
+ //~| NOTE variadics require C or cdecl calling convention
fn main() {
}
+++ /dev/null
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-stage0: new feature, remove this when SNAP
-// revisions: a b
-
-#[cfg(a)]
-mod a {
- const FOO: fn(u8) -> u8 = |v: u8| { v };
- //[a]~^ ERROR non-capturing closure to fn coercion is experimental
- //[a]~^^ ERROR mismatched types
-
- const BAR: [fn(&mut u32); 1] = [
- |v: &mut u32| *v += 1,
- //[a]~^ ERROR non-capturing closure to fn coercion is experimental
- //[a]~^^ ERROR mismatched types
- ];
-}
-
-#[cfg(b)]
-mod b {
- fn func_specific() -> (fn() -> u32) {
- || return 42
- //[b]~^ ERROR non-capturing closure to fn coercion is experimental
- //[b]~^^ ERROR mismatched types
- }
- fn foo() {
- // Items
- assert_eq!(func_specific()(), 42);
- let foo: fn(u8) -> u8 = |v: u8| { v };
- //[b]~^ ERROR non-capturing closure to fn coercion is experimental
- //[b]~^^ ERROR mismatched types
- }
-
-}
-
-
-
#[rustc_variance] //~ ERROR the `#[rustc_variance]` attribute is just used for rustc unit tests and will never be stable
#[rustc_error] //~ ERROR the `#[rustc_error]` attribute is just used for rustc unit tests and will never be stable
-#[rustc_move_fragments] //~ ERROR the `#[rustc_move_fragments]` attribute is just used for rustc unit tests and will never be stable
#[rustc_foo]
//~^ ERROR unless otherwise specified, attributes with the prefix `rustc_` are reserved for internal compiler diagnostics
//~| ERROR E0191
//~| NOTE missing associated type `Output` value
Sub;
- //~^ ERROR E0225
+ //~^ ERROR E0393
+ //~| NOTE missing reference to `RHS`
+ //~| NOTE because of the default `Self` reference, type parameters must be specified on object types
+ //~| ERROR E0225
//~| NOTE non-Send/Sync additional trait
fn main() { }
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(parenthesized_params_in_types_and_modules)]
+//~^ NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+#![allow(dead_code, unused_variables)]
+#![feature(conservative_impl_trait)]
+
+fn main() {
+ { fn f<X: ::std::marker()::Send>() {} }
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+
+ { fn f() -> impl ::std::marker()::Send { } }
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+}
+
+#[derive(Clone)]
+struct X;
+
+impl ::std::marker()::Copy for X {}
+//~^ ERROR parenthesized parameters may only be used with a trait
+//~| WARN previously accepted
+//~| NOTE issue #42238
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(parenthesized_params_in_types_and_modules)]
+//~^ NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+#![allow(dead_code, unused_variables)]
+
+fn main() {
+ let x: usize() = 1;
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+
+ let b: ::std::boxed()::Box<_> = Box::new(1);
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+
+ macro_rules! pathexpr {
+ ($p:path) => { $p }
+ }
+
+ let p = pathexpr!(::std::str()::from_utf8)(b"foo").unwrap();
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+
+ let p = pathexpr!(::std::str::from_utf8())(b"foo").unwrap();
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+
+ let o : Box<::std::marker()::Send> = Box::new(1);
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+
+ let o : Box<Send + ::std::marker()::Sync> = Box::new(1);
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+}
+
+fn foo<X:Default>() {
+ let d : X() = Default::default();
+ //~^ ERROR parenthesized parameters may only be used with a trait
+ //~| WARN previously accepted
+ //~| NOTE issue #42238
+}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const LENGTH: f64 = 2;
+
+struct Thing {
+ f: [[f64; 2]; LENGTH],
+ //~^ ERROR mismatched types
+ //~| expected usize, found f64
+}
+
+fn main() {
+ let _t = Thing { f: [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]] };
+}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-#![feature(closure_to_fn_coercion)]
-
fn main() {
let bar: fn(&mut u32) = |_| {};
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+#![allow(warnings)]
+
+enum E {
+ A = {
+ enum F { B }
+ 0
+ }
+}
+
+#[rustc_error]
+fn main() {}
+//~^ ERROR compilation successful
+
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// These are all fairly trivial cases: unused variables or direct
-// drops of substructure.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-#[rustc_move_fragments]
-pub fn test_noop() {
-}
-
-#[rustc_move_fragments]
-pub fn test_take(_x: D) {
- //~^ ERROR assigned_leaf_path: `$(local _x)`
-}
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_take_struct(_p: Pair<D, D>) {
- //~^ ERROR assigned_leaf_path: `$(local _p)`
-}
-
-#[rustc_move_fragments]
-pub fn test_drop_struct_part(p: Pair<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR moved_leaf_path: `$(local p).x`
- //~| ERROR unmoved_fragment: `$(local p).y`
- drop(p.x);
-}
-
-#[rustc_move_fragments]
-pub fn test_drop_tuple_part(p: (D, D)) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR moved_leaf_path: `$(local p).#0`
- //~| ERROR unmoved_fragment: `$(local p).#1`
- drop(p.0);
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// These are checking that enums are tracked; note that their output
-// paths include "downcasts" of the path to a particular enum.
-
-#![feature(rustc_attrs)]
-
-use self::Lonely::{Zero, One, Two};
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub enum Lonely<X,Y> { Zero, One(X), Two(X, Y) }
-
-#[rustc_move_fragments]
-pub fn test_match_partial(p: Lonely<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Zero)`
- match p {
- Zero => {}
- _ => {}
- }
-}
-
-#[rustc_move_fragments]
-pub fn test_match_full(p: Lonely<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Zero)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::One)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Two)`
- match p {
- Zero => {}
- One(..) => {}
- Two(..) => {}
- }
-}
-
-#[rustc_move_fragments]
-pub fn test_match_bind_one(p: Lonely<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Zero)`
- //~| ERROR parent_of_fragments: `($(local p) as Lonely::One)`
- //~| ERROR moved_leaf_path: `($(local p) as Lonely::One).#0`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Two)`
- //~| ERROR assigned_leaf_path: `$(local data)`
- match p {
- Zero => {}
- One(data) => {}
- Two(..) => {}
- }
-}
-
-#[rustc_move_fragments]
-pub fn test_match_bind_many(p: Lonely<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Zero)`
- //~| ERROR parent_of_fragments: `($(local p) as Lonely::One)`
- //~| ERROR moved_leaf_path: `($(local p) as Lonely::One).#0`
- //~| ERROR assigned_leaf_path: `$(local data)`
- //~| ERROR parent_of_fragments: `($(local p) as Lonely::Two)`
- //~| ERROR moved_leaf_path: `($(local p) as Lonely::Two).#0`
- //~| ERROR moved_leaf_path: `($(local p) as Lonely::Two).#1`
- //~| ERROR assigned_leaf_path: `$(local left)`
- //~| ERROR assigned_leaf_path: `$(local right)`
- match p {
- Zero => {}
- One(data) => {}
- Two(left, right) => {}
- }
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// This checks the handling of `_` within variants, especially when mixed
-// with bindings.
-
-#![feature(rustc_attrs)]
-
-use self::Lonely::{Zero, One, Two};
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub enum Lonely<X,Y> { Zero, One(X), Two(X, Y) }
-
-#[rustc_move_fragments]
-pub fn test_match_bind_and_underscore(p: Lonely<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::Zero)`
- //~| ERROR assigned_leaf_path: `($(local p) as Lonely::One)`
- //~| ERROR parent_of_fragments: `($(local p) as Lonely::Two)`
- //~| ERROR moved_leaf_path: `($(local p) as Lonely::Two).#0`
- //~| ERROR unmoved_fragment: `($(local p) as Lonely::Two).#1`
- //~| ERROR assigned_leaf_path: `$(local left)`
-
- match p {
- Zero => {}
-
- One(_) => {} // <-- does not fragment `($(local p) as One)` ...
-
- Two(left, _) => {} // <-- ... *does* fragment `($(local p) as Two)`.
- }
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// This checks that a move of deep structure is properly tracked. (An
-// early draft of the code did not properly traverse up through all of
-// the parents of the leaf fragment.)
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_move_substructure(pppp: Pair<Pair<Pair<Pair<D,D>, D>, D>, D>) {
- //~^ ERROR parent_of_fragments: `$(local pppp)`
- //~| ERROR parent_of_fragments: `$(local pppp).x`
- //~| ERROR parent_of_fragments: `$(local pppp).x.x`
- //~| ERROR unmoved_fragment: `$(local pppp).x.x.x`
- //~| ERROR moved_leaf_path: `$(local pppp).x.x.y`
- //~| ERROR unmoved_fragment: `$(local pppp).x.y`
- //~| ERROR unmoved_fragment: `$(local pppp).y`
- drop(pppp.x.x.y);
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// This is the first test that checks moving into local variables.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local(p: Pair<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR moved_leaf_path: `$(local p).x`
- //~| ERROR unmoved_fragment: `$(local p).y`
- //~| ERROR assigned_leaf_path: `$(local _x)`
- let _x = p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local_to_local(p: Pair<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR moved_leaf_path: `$(local p).x`
- //~| ERROR unmoved_fragment: `$(local p).y`
- //~| ERROR assigned_leaf_path: `$(local _x)`
- //~| ERROR moved_leaf_path: `$(local _x)`
- //~| ERROR assigned_leaf_path: `$(local _y)`
- let _x = p.x;
- let _y = _x;
-}
-
-// In the following fn's `test_move_field_to_local_delayed` and
-// `test_uninitialized_local` , the instrumentation reports that `_x`
-// is moved. This is unlike `test_move_field_to_local`, where `_x` is
-// just reported as an assigned_leaf_path. Presumably because this is
-// how we represent that it did not have an initializing expression at
-// the binding site.
-
-#[rustc_move_fragments]
-pub fn test_uninitialized_local(_p: Pair<D, D>) {
- //~^ ERROR assigned_leaf_path: `$(local _p)`
- //~| ERROR moved_leaf_path: `$(local _x)`
- let _x: D;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local_delayed(p: Pair<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR moved_leaf_path: `$(local p).x`
- //~| ERROR unmoved_fragment: `$(local p).y`
- //~| ERROR assigned_leaf_path: `$(local _x)`
- //~| ERROR moved_leaf_path: `$(local _x)`
- let _x;
- _x = p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_mut_to_local(mut p: Pair<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local mut p)`
- //~| ERROR moved_leaf_path: `$(local mut p).x`
- //~| ERROR unmoved_fragment: `$(local mut p).y`
- //~| ERROR assigned_leaf_path: `$(local _x)`
- let _x = p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local_to_local_mut(p: Pair<D, D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR moved_leaf_path: `$(local p).x`
- //~| ERROR unmoved_fragment: `$(local p).y`
- //~| ERROR assigned_leaf_path: `$(local mut _x)`
- //~| ERROR moved_leaf_path: `$(local mut _x)`
- //~| ERROR assigned_leaf_path: `$(local _y)`
- let mut _x = p.x;
- let _y = _x;
-}
-
-pub fn main() {}
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// Test that moving into a field (i.e. overwriting it) fragments the
-// receiver.
-
-#![feature(rustc_attrs)]
-
-use std::mem::drop;
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_overwrite_uninit_field<Z>(z: Z) {
- //~^ ERROR parent_of_fragments: `$(local mut p)`
- //~| ERROR assigned_leaf_path: `$(local z)`
- //~| ERROR moved_leaf_path: `$(local z)`
- //~| ERROR assigned_leaf_path: `$(local mut p).x`
- //~| ERROR unmoved_fragment: `$(local mut p).y`
-
- let mut p: Pair<Z,Z>;
- p.x = z;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_moved_field<Z>(mut p: Pair<Z,Z>, z: Z) {
- //~^ ERROR parent_of_fragments: `$(local mut p)`
- //~| ERROR assigned_leaf_path: `$(local z)`
- //~| ERROR moved_leaf_path: `$(local z)`
- //~| ERROR assigned_leaf_path: `$(local mut p).y`
- //~| ERROR unmoved_fragment: `$(local mut p).x`
-
- drop(p);
- p.y = z;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_same_field<Z>(mut p: Pair<Z,Z>) {
- //~^ ERROR parent_of_fragments: `$(local mut p)`
- //~| ERROR moved_leaf_path: `$(local mut p).x`
- //~| ERROR assigned_leaf_path: `$(local mut p).x`
- //~| ERROR unmoved_fragment: `$(local mut p).y`
-
- p.x = p.x;
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// Test that moving a Box<T> fragments its containing structure, for
-// both moving out of the structure (i.e. reading `*p.x`) and writing
-// into the container (i.e. writing `*p.x`).
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_deref_box_field(p: Pair<Box<D>, Box<D>>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR parent_of_fragments: `$(local p).x`
- //~| ERROR moved_leaf_path: `$(local p).x.*`
- //~| ERROR unmoved_fragment: `$(local p).y`
- //~| ERROR assigned_leaf_path: `$(local i)`
- let i : D = *p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_deref_box_field(mut p: Pair<Box<D>, Box<D>>) {
- //~^ ERROR parent_of_fragments: `$(local mut p)`
- //~| ERROR parent_of_fragments: `$(local mut p).x`
- //~| ERROR assigned_leaf_path: `$(local mut p).x.*`
- //~| ERROR unmoved_fragment: `$(local mut p).y`
- *p.x = D { d: 3 };
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// Test that assigning into a `&T` within structured container does
-// *not* fragment its containing structure.
-//
-// Compare against the `Box<T>` handling in move-fragments-7.rs. Note
-// also that in this case we cannot do a move out of `&T`, so we only
-// test writing `*p.x` here.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_overwrite_deref_ampersand_field<'a>(p: Pair<&'a mut D, &'a D>) {
- //~^ ERROR parent_of_fragments: `$(local p)`
- //~| ERROR parent_of_fragments: `$(local p).x`
- //~| ERROR assigned_leaf_path: `$(local p).x.*`
- //~| ERROR unmoved_fragment: `$(local p).y`
- *p.x = D { d: 3 };
-}
-
-pub fn main() { }
+++ /dev/null
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test moving array structures, e.g. `[T; 3]` as well as moving
-// elements in and out of such arrays.
-//
-// Note also that the `test_move_array_then_overwrite` tests represent
-// cases that we probably should make illegal.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-#[rustc_move_fragments]
-pub fn test_move_array_via_return(a: [D; 3]) -> [D; 3] {
- //~^ ERROR assigned_leaf_path: `$(local a)`
- //~| ERROR moved_leaf_path: `$(local a)`
- return a;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_array_into_recv(a: [D; 3], recv: &mut [D; 3]) {
- //~^ ERROR parent_of_fragments: `$(local recv)`
- //~| ERROR assigned_leaf_path: `$(local a)`
- //~| ERROR moved_leaf_path: `$(local a)`
- //~| ERROR assigned_leaf_path: `$(local recv).*`
- *recv = a;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_array_elem(mut a: [D; 3], i: usize, d: D) {
- //~^ ERROR parent_of_fragments: `$(local mut a)`
- //~| ERROR assigned_leaf_path: `$(local i)`
- //~| ERROR assigned_leaf_path: `$(local d)`
- //~| ERROR moved_leaf_path: `$(local d)`
- //~| ERROR assigned_leaf_path: `$(local mut a).[]`
- //~| ERROR unmoved_fragment: `$(allbutone $(local mut a).[])`
- a[i] = d;
-}
-
-pub fn main() { }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-fn baz(f: extern "cdecl" fn(usize, ...)) {
- //~^ ERROR: variadic function must have C calling convention
+// ignore-arm stdcall isn't suppported
+
+fn baz(f: extern "stdcall" fn(usize, ...)) {
+ //~^ ERROR: variadic function must have C or cdecl calling convention
f(22, 44);
}
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-extern "cdecl" {
- fn printf(_: *const u8, ...); //~ ERROR: variadic function must have C calling convention
+// ignore-arm stdcall isn't suppported
+
+extern "stdcall" {
+ fn printf(_: *const u8, ...); //~ ERROR: variadic function must have C or cdecl calling
}
extern {
// except according to those terms.
// no-prefer-dynamic
+// compile-flags: -Z query-dep-graph
#![crate_type="rlib"]
// option. This file may not be copied, modified, or distributed
// except according to those terms.
+// compile-flags: -Z query-dep-graph
+
#![crate_type="rlib"]
#[cfg(rpass1)]
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// check that we clear the "ADT master drop flag" even when there are
+// no fields to be dropped.
+
+fn main() {
+ let e;
+ if cond() {
+ e = E::F(K);
+ if let E::F(_k) = e {
+ // older versions of rustc used to not clear the
+ // drop flag for `e` in this path.
+ }
+ }
+}
+
+fn cond() -> bool { false }
+
+struct K;
+
+enum E {
+ F(K),
+ G(Box<E>)
+}
+
+// END RUST SOURCE
+// fn main() -> () {
+// let mut _0: ();
+// scope 1 {
+// let _1: E; // `e`
+// scope 2 {
+// let _6: K;
+// }
+// }
+// let mut _2: bool;
+// let mut _3: ();
+// let mut _4: E;
+// let mut _5: K;
+// let mut _7: isize;
+// let mut _8: bool; // drop flag for `e`
+// let mut _9: bool;
+// let mut _10: bool;
+// let mut _11: isize;
+// let mut _12: isize;
+//
+// bb0: {
+// _8 = const false;
+// _10 = const false;
+// _9 = const false;
+// StorageLive(_1);
+// StorageLive(_2);
+// _2 = const cond() -> [return: bb3, unwind: bb2];
+// }
+//
+// bb1: {
+// resume;
+// }
+//
+// bb2: {
+// goto -> bb1;
+// }
+//
+// bb3: {
+// switchInt(_2) -> [0u8: bb5, otherwise: bb4];
+// }
+//
+// bb4: {
+// StorageLive(_4);
+// StorageLive(_5);
+// _5 = K::{{constructor}};
+// _4 = E::F(_5,);
+// StorageDead(_5);
+// goto -> bb15;
+// }
+//
+// bb5: {
+// _0 = ();
+// goto -> bb12;
+// }
+//
+// bb6: {
+// goto -> bb2;
+// }
+//
+// bb7: {
+// goto -> bb8;
+// }
+//
+// bb8: {
+// StorageDead(_4);
+// _7 = discriminant(_1);
+// switchInt(_7) -> [0isize: bb10, otherwise: bb9];
+// }
+//
+// bb9: {
+// _0 = ();
+// goto -> bb11;
+// }
+//
+// bb10: {
+// StorageLive(_6);
+// _10 = const false;
+// _6 = ((_1 as F).0: K);
+// _0 = ();
+// goto -> bb11;
+// }
+//
+// bb11: {
+// StorageDead(_6);
+// goto -> bb12;
+// }
+//
+// bb12: {
+// StorageDead(_2);
+// goto -> bb22;
+// }
+//
+// bb13: {
+// StorageDead(_1);
+// return;
+// }
+//
+// bb14: {
+// _8 = const true;
+// _9 = const true;
+// _10 = const true;
+// _1 = _4;
+// goto -> bb6;
+// }
+//
+// bb15: {
+// _8 = const true;
+// _9 = const true;
+// _10 = const true;
+// _1 = _4;
+// goto -> bb7;
+// }
+//
+// bb16: {
+// _8 = const false; // clear the drop flag - must always be reached
+// goto -> bb13;
+// }
+//
+// bb17: {
+// _8 = const false;
+// goto -> bb1;
+// }
+//
+// bb18: {
+// goto -> bb17;
+// }
+//
+// bb19: {
+// drop(_1) -> [return: bb16, unwind: bb17];
+// }
+//
+// bb20: {
+// drop(_1) -> bb17;
+// }
+//
+// bb21: {
+// _11 = discriminant(_1);
+// switchInt(_11) -> [0isize: bb16, otherwise: bb19];
+// }
+//
+// bb22: {
+// switchInt(_8) -> [0u8: bb16, otherwise: bb21];
+// }
+//
+// bb23: {
+// _12 = discriminant(_1);
+// switchInt(_12) -> [0isize: bb18, otherwise: bb20];
+// }
+//
+// bb24: {
+// switchInt(_8) -> [0u8: bb17, otherwise: bb23];
+// }
+// }
// option. This file may not be copied, modified, or distributed
// except according to those terms.
-// ignore-stage0: new feature, remove this when SNAP
-
-#![feature(closure_to_fn_coercion)]
-
const FOO: fn(u8) -> u8 = |v: u8| { v };
const BAR: [fn(&mut u32); 5] = [
// except according to those terms.
// Ensure that we deduce expected argument types when a `fn()` type is expected (#41755)
-#![feature(closure_to_fn_coercion)]
fn foo(f: fn(Vec<u32>) -> usize) { }
fn main() {
};
}
+struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
+fn struct_dynamic_drop(a: &Allocator, c0: bool, c1: bool, c: bool) {
+ for i in 0..2 {
+ let x;
+ let y;
+ if (c0 && i == 0) || (c1 && i == 1) {
+ x = (a.alloc(), a.alloc(), a.alloc());
+ y = TwoPtrs(a.alloc(), a.alloc());
+ if c {
+ drop(x.1);
+ drop(y.0);
+ }
+ }
+ }
+}
+
fn assignment2(a: &Allocator, c0: bool, c1: bool) {
let mut _v = a.alloc();
let mut _w = a.alloc();
}
}
+fn array_simple(a: &Allocator) {
+ let _x = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn vec_simple(a: &Allocator) {
+ let _x = vec![a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
fn run_test<F>(mut f: F)
where F: FnMut(&Allocator)
{
run_test(|a| assignment1(a, false));
run_test(|a| assignment1(a, true));
+ run_test(|a| array_simple(a));
+ run_test(|a| vec_simple(a));
+
+ run_test(|a| struct_dynamic_drop(a, false, false, false));
+ run_test(|a| struct_dynamic_drop(a, false, false, true));
+ run_test(|a| struct_dynamic_drop(a, false, true, false));
+ run_test(|a| struct_dynamic_drop(a, false, true, true));
+ run_test(|a| struct_dynamic_drop(a, true, false, false));
+ run_test(|a| struct_dynamic_drop(a, true, false, true));
+ run_test(|a| struct_dynamic_drop(a, true, true, false));
+ run_test(|a| struct_dynamic_drop(a, true, true, true));
+
run_test_nopanic(|a| union1(a));
}
--- /dev/null
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() { let _ = g(Some(E::F(K))); }
+
+type R = Result<(), ()>;
+struct K;
+
+enum E {
+ F(K), // must not be built-in type
+ #[allow(dead_code)]
+ G(Box<E>, Box<E>),
+}
+
+fn translate(x: R) -> R { x }
+
+fn g(mut status: Option<E>) -> R {
+ loop {
+ match status {
+ Some(infix_or_postfix) => match infix_or_postfix {
+ E::F(_op) => { // <- must be captured by value
+ match Ok(()) {
+ Err(err) => return Err(err),
+ Ok(_) => {},
+ };
+ }
+ _ => (),
+ },
+ _ => match translate(Err(())) {
+ Err(err) => return Err(err),
+ Ok(_) => {},
+ }
+ }
+ status = None;
+ }
+}
--- /dev/null
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #42210.
+
+// compile-flags: -g
+
+trait Foo {
+ fn foo() { }
+}
+
+struct Bar;
+
+trait Baz {
+}
+
+impl Foo for (Bar, Baz) { }
+
+
+fn main() {
+ <(Bar, Baz) as Foo>::foo()
+}
14 | println!("Hello, World!");
| ^^^^^^^^^^^^^^^^^^^^^^^^^^
|
- = note: expands to `println! { "Hello, World!" }`
- = note: expands to `print! { concat ! ( "Hello, World!" , "/n" ) }`
+ = note: expanding `println! { "Hello, World!" }`
+ = note: to `print ! ( concat ! ( "Hello, World!" , "/n" ) )`
+ = note: expanding `print! { concat ! ( "Hello, World!" , "/n" ) }`
+ = note: to `$crate :: io :: _print ( format_args ! ( concat ! ( "Hello, World!" , "/n" ) )
+ )`
-Subproject commit 9fcdbb44fec18d10c086b6aba8143bc06a199761
+Subproject commit 82733b01471a2c62bb1cec966d888c52ff118914