]> git.lizzy.rs Git - rust.git/commitdiff
Auto merge of #42264 - GuillaumeGomez:new-error-codes, r=Susurrus
authorbors <bors@rust-lang.org>
Mon, 29 May 2017 21:55:57 +0000 (21:55 +0000)
committerbors <bors@rust-lang.org>
Mon, 29 May 2017 21:55:57 +0000 (21:55 +0000)
New error codes

Part of #42229.

124 files changed:
README.md
src/Cargo.lock
src/bootstrap/config.toml.example
src/bootstrap/dist.rs
src/bootstrap/flags.rs
src/bootstrap/install.rs
src/bootstrap/mk/Makefile.in
src/bootstrap/step.rs
src/ci/docker/cross/Dockerfile
src/ci/docker/cross/install-mips-musl.sh [new file with mode: 0755]
src/ci/docker/cross/install-mipsel-musl.sh [new file with mode: 0755]
src/doc/unstable-book/src/SUMMARY.md
src/doc/unstable-book/src/language-features/closure-to-fn-coercion.md [deleted file]
src/liballoc/arc.rs
src/liballoc/rc.rs
src/libcollections/binary_heap.rs
src/libcore/cmp.rs
src/libcore/iter/mod.rs
src/libcore/tests/iter.rs
src/libcore/tests/lib.rs
src/librustc/dep_graph/dep_node.rs
src/librustc/dep_graph/dep_tracking_map.rs
src/librustc/dep_graph/graph.rs
src/librustc/ich/caching_codemap_view.rs
src/librustc/ich/hcx.rs
src/librustc/ich/impls_mir.rs
src/librustc/lib.rs
src/librustc/lint/builtin.rs
src/librustc/middle/cstore.rs
src/librustc/mir/mod.rs
src/librustc/mir/tcx.rs
src/librustc/mir/visit.rs
src/librustc/session/mod.rs
src/librustc/ty/context.rs
src/librustc/ty/item_path.rs
src/librustc/ty/maps.rs
src/librustc/ty/mod.rs
src/librustc/ty/util.rs
src/librustc_borrowck/borrowck/fragments.rs [deleted file]
src/librustc_borrowck/borrowck/mir/elaborate_drops.rs
src/librustc_borrowck/borrowck/mir/gather_moves.rs
src/librustc_borrowck/borrowck/mir/mod.rs
src/librustc_borrowck/borrowck/mod.rs
src/librustc_borrowck/borrowck/move_data.rs
src/librustc_data_structures/indexed_vec.rs
src/librustc_incremental/calculate_svh/mod.rs
src/librustc_incremental/persist/hash.rs
src/librustc_lint/builtin.rs
src/librustc_lint/lib.rs
src/librustc_metadata/cstore_impl.rs
src/librustc_metadata/encoder.rs
src/librustc_metadata/isolated_encoder.rs
src/librustc_mir/build/expr/as_rvalue.rs
src/librustc_mir/shim.rs
src/librustc_mir/transform/erase_regions.rs
src/librustc_mir/transform/qualify_consts.rs
src/librustc_mir/transform/type_check.rs
src/librustc_mir/util/elaborate_drops.rs
src/librustc_passes/mir_stats.rs
src/librustc_trans/abi.rs
src/librustc_trans/assert_module_sources.rs
src/librustc_trans/back/link.rs
src/librustc_trans/back/linker.rs
src/librustc_trans/back/msvc/mod.rs
src/librustc_trans/back/symbol_export.rs
src/librustc_trans/back/symbol_names.rs
src/librustc_trans/back/write.rs
src/librustc_trans/base.rs
src/librustc_trans/collector.rs
src/librustc_trans/common.rs
src/librustc_trans/context.rs
src/librustc_trans/debuginfo/gdb.rs
src/librustc_trans/debuginfo/metadata.rs
src/librustc_trans/debuginfo/mod.rs
src/librustc_trans/glue.rs
src/librustc_trans/lib.rs
src/librustc_trans/mir/analyze.rs
src/librustc_trans/mir/block.rs
src/librustc_trans/mir/constant.rs
src/librustc_trans/mir/mod.rs
src/librustc_trans/mir/operand.rs
src/librustc_trans/mir/rvalue.rs
src/librustc_trans/partitioning.rs
src/librustc_typeck/astconv.rs
src/librustc_typeck/check/coercion.rs
src/librustc_typeck/check/mod.rs
src/librustc_typeck/collect.rs
src/librustc_typeck/diagnostics.rs
src/librustc_typeck/lib.rs
src/librustdoc/clean/inline.rs
src/libsyntax/codemap.rs
src/libsyntax/ext/tt/macro_rules.rs
src/libsyntax/feature_gate.rs
src/rustllvm/RustWrapper.cpp
src/test/compile-fail/E0045.rs
src/test/compile-fail/feature-gate-closure_to_fn_coercion.rs [deleted file]
src/test/compile-fail/feature-gate-rustc-attrs.rs
src/test/compile-fail/issue-22560.rs
src/test/compile-fail/issue-32995-2.rs [new file with mode: 0644]
src/test/compile-fail/issue-32995.rs [new file with mode: 0644]
src/test/compile-fail/issue-39974.rs [new file with mode: 0644]
src/test/compile-fail/issue-40000.rs
src/test/compile-fail/issue-40350.rs [new file with mode: 0644]
src/test/compile-fail/move-fragments-1.rs [deleted file]
src/test/compile-fail/move-fragments-2.rs [deleted file]
src/test/compile-fail/move-fragments-3.rs [deleted file]
src/test/compile-fail/move-fragments-4.rs [deleted file]
src/test/compile-fail/move-fragments-5.rs [deleted file]
src/test/compile-fail/move-fragments-6.rs [deleted file]
src/test/compile-fail/move-fragments-7.rs [deleted file]
src/test/compile-fail/move-fragments-8.rs [deleted file]
src/test/compile-fail/move-fragments-9.rs [deleted file]
src/test/compile-fail/variadic-ffi-2.rs
src/test/compile-fail/variadic-ffi.rs
src/test/incremental/rlib_cross_crate/auxiliary/a.rs
src/test/incremental/type_alias_cross_crate/auxiliary/a.rs
src/test/mir-opt/issue-41888.rs [new file with mode: 0644]
src/test/run-pass/closure-to-fn-coercion.rs
src/test/run-pass/closure_to_fn_coercion-expected-types.rs
src/test/run-pass/dynamic-drop.rs
src/test/run-pass/issue-41888.rs [new file with mode: 0644]
src/test/run-pass/issue-42210.rs [new file with mode: 0644]
src/test/ui/macros/trace-macro.stderr
src/tools/cargo

index f387b4be6008f4b4722e9264d73921179fa8a91f..dbb5bf9ce38d63204d32cba2561c59350c9efe88 100644 (file)
--- a/README.md
+++ b/README.md
@@ -35,7 +35,7 @@ Read ["Installing Rust"] from [The Book].
 3. Build and install:
 
     ```sh
-    $ ./x.py build && sudo ./x.py dist --install
+    $ ./x.py build && sudo ./x.py install
     ```
 
     > ***Note:*** Install locations can be adjusted by copying the config file
@@ -43,7 +43,7 @@ Read ["Installing Rust"] from [The Book].
     > adjusting the `prefix` option under `[install]`. Various other options are
     > also supported, and are documented in the config file.
 
-    When complete, `sudo ./x.py dist --install` will place several programs into
+    When complete, `sudo ./x.py install` will place several programs into
     `/usr/local/bin`: `rustc`, the Rust compiler, and `rustdoc`, the
     API-documentation tool. This install does not include [Cargo],
     Rust's package manager, which you may also want to build.
@@ -96,7 +96,7 @@ build.
 4. Navigate to Rust's source code (or clone it), then build it:
 
    ```sh
-   $ ./x.py build && ./x.py dist --install
+   $ ./x.py build && ./x.py install
    ```
 
 #### MSVC
index e23bdbd9fd87f0788bc930f5cab9fc85389eb1d2..d55dd919bdf28cf9446672dc69b69bd9205990ef 100644 (file)
@@ -165,14 +165,14 @@ dependencies = [
  "hamcrest 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "kernel32-sys 0.2.2 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)",
  "miow 0.2.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "num_cpus 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "openssl 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
  "psapi-sys 0.1.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)",
- "semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_derive 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
  "serde_ignored 0.0.3 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -472,7 +472,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "bitflags 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
  "libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)",
- "libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)",
+ "libgit2-sys 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)",
  "openssl-probe 0.1.1 (registry+https://github.com/rust-lang/crates.io-index)",
  "openssl-sys 0.9.12 (registry+https://github.com/rust-lang/crates.io-index)",
  "url 1.4.0 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -600,7 +600,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 
 [[package]]
 name = "libgit2-sys"
-version = "0.6.10"
+version = "0.6.11"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "cmake 0.1.23 (registry+https://github.com/rust-lang/crates.io-index)",
@@ -1481,10 +1481,11 @@ dependencies = [
 
 [[package]]
 name = "semver"
-version = "0.6.0"
+version = "0.7.0"
 source = "registry+https://github.com/rust-lang/crates.io-index"
 dependencies = [
  "semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)",
+ "serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)",
 ]
 
 [[package]]
@@ -2056,7 +2057,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum languageserver-types 0.10.0 (registry+https://github.com/rust-lang/crates.io-index)" = "97c2985bfcbbcb0189cfa25e1c10c1ac7111df2b6214b652c690127aefdf4e5b"
 "checksum lazy_static 0.2.8 (registry+https://github.com/rust-lang/crates.io-index)" = "3b37545ab726dd833ec6420aaba8231c5b320814b9029ad585555d2a03e94fbf"
 "checksum libc 0.2.22 (registry+https://github.com/rust-lang/crates.io-index)" = "babb8281da88cba992fa1f4ddec7d63ed96280a1a53ec9b919fd37b53d71e502"
-"checksum libgit2-sys 0.6.10 (registry+https://github.com/rust-lang/crates.io-index)" = "dd89dd7196d5fa35b659c3eaf3c1b14b9bd961bfd1a07dfca49adeb8a6aa3763"
+"checksum libgit2-sys 0.6.11 (registry+https://github.com/rust-lang/crates.io-index)" = "d9dc31ee90fb179b706d35fb672e91d0b74e950d7fb4ea7eae3c0f5ecbf2d3d3"
 "checksum libssh2-sys 0.2.6 (registry+https://github.com/rust-lang/crates.io-index)" = "0db4ec23611747ef772db1c4d650f8bd762f07b461727ec998f953c614024b75"
 "checksum libz-sys 1.0.13 (registry+https://github.com/rust-lang/crates.io-index)" = "e5ee912a45d686d393d5ac87fac15ba0ba18daae14e8e7543c63ebf7fb7e970c"
 "checksum log 0.3.7 (registry+https://github.com/rust-lang/crates.io-index)" = "5141eca02775a762cc6cd564d8d2c50f67c0ea3a372cbf1c51592b3e029e10ad"
@@ -2104,7 +2105,7 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
 "checksum rustc-serialize 0.3.24 (registry+https://github.com/rust-lang/crates.io-index)" = "dcf128d1287d2ea9d80910b5f1120d0b8eede3fbf1abe91c40d39ea7d51e6fda"
 "checksum rustfmt 0.8.4 (git+https://github.com/rust-lang-nursery/rustfmt)" = "<none>"
 "checksum same-file 0.1.3 (registry+https://github.com/rust-lang/crates.io-index)" = "d931a44fdaa43b8637009e7632a02adc4f2b2e0733c08caa4cf00e8da4a117a7"
-"checksum semver 0.6.0 (registry+https://github.com/rust-lang/crates.io-index)" = "7a3186ec9e65071a2095434b1f5bb24838d4e8e130f584c790f6033c79943537"
+"checksum semver 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "3fdd61b85a0fa777f7fb7c454b9189b2941b110d1385ce84d7f76efdf1606a85"
 "checksum semver-parser 0.7.0 (registry+https://github.com/rust-lang/crates.io-index)" = "388a1df253eca08550bef6c72392cfe7c30914bf41df5269b68cbd6ff8f570a3"
 "checksum serde 0.9.15 (registry+https://github.com/rust-lang/crates.io-index)" = "34b623917345a631dc9608d5194cc206b3fe6c3554cd1c75b937e55e285254af"
 "checksum serde 1.0.6 (registry+https://github.com/rust-lang/crates.io-index)" = "38a3db3a5757f68069aba764b793823ea9fb9717c42c016f8903f8add50f508a"
index 95cca96f7fcc027f1143a2f0a79393f6a651f7de..0eb6c4c82c4dd7df5dffe4400d4255622cc8b574 100644 (file)
 # Note that this address should not contain a trailing slash as file names will
 # be appended to it.
 #upload-addr = "https://example.com/folder"
+
+# Whether to build a plain source tarball to upload
+# We disable that on Windows not to override the one already uploaded on S3
+# as the one built on Windows will contain backslashes in paths causing problems
+# on linux
+#src-tarball = true
index 14ddcc7cb323250963c4ff8daca123511ed762a7..a2be2cad8fbadb636d91d64d7b753bfc2323ec1f 100644 (file)
 use channel;
 use util::{cp_r, libdir, is_dylib, cp_filtered, copy, exe};
 
-fn pkgname(build: &Build, component: &str) -> String {
+pub fn pkgname(build: &Build, component: &str) -> String {
     if component == "cargo" {
         format!("{}-{}", component, build.cargo_package_vers())
     } else if component == "rls" {
-        format!("{}-{}", component, build.package_vers(&build.release_num("rls")))
+        format!("{}-{}", component, build.rls_package_vers())
     } else {
         assert!(component.starts_with("rust"));
         format!("{}-{}", component, build.rust_package_vers())
@@ -489,39 +489,8 @@ pub fn analysis(build: &Build, compiler: &Compiler, target: &str) {
     t!(fs::remove_dir_all(&image));
 }
 
-const CARGO_VENDOR_VERSION: &'static str = "0.1.4";
-
-/// Creates the `rust-src` installer component and the plain source tarball
-pub fn rust_src(build: &Build) {
-    if !build.config.rust_dist_src {
-        return
-    }
-
-    println!("Dist src");
-
-    // Make sure that the root folder of tarball has the correct name
-    let plain_name = format!("rustc-{}-src", build.rust_package_vers());
-    let plain_dst_src = tmpdir(build).join(&plain_name);
-    let _ = fs::remove_dir_all(&plain_dst_src);
-    t!(fs::create_dir_all(&plain_dst_src));
-
-    // This is the set of root paths which will become part of the source package
-    let src_files = [
-        "COPYRIGHT",
-        "LICENSE-APACHE",
-        "LICENSE-MIT",
-        "CONTRIBUTING.md",
-        "README.md",
-        "RELEASES.md",
-        "configure",
-        "x.py",
-    ];
-    let src_dirs = [
-        "man",
-        "src",
-    ];
-
-    let filter_fn = move |path: &Path| {
+fn copy_src_dirs(build: &Build, src_dirs: &[&str], exclude_dirs: &[&str], dst_dir: &Path) {
+    fn filter_fn(exclude_dirs: &[&str], dir: &str, path: &Path) -> bool {
         let spath = match path.to_str() {
             Some(path) => path,
             None => return false,
@@ -537,6 +506,11 @@ pub fn rust_src(build: &Build) {
             }
         }
 
+        let full_path = Path::new(dir).join(path);
+        if exclude_dirs.iter().any(|excl| full_path == Path::new(excl)) {
+            return false;
+        }
+
         let excludes = [
             "CVS", "RCS", "SCCS", ".git", ".gitignore", ".gitmodules",
             ".gitattributes", ".cvsignore", ".svn", ".arch-ids", "{arch}",
@@ -546,63 +520,19 @@ pub fn rust_src(build: &Build) {
         !path.iter()
              .map(|s| s.to_str().unwrap())
              .any(|s| excludes.contains(&s))
-    };
-
-    // Copy the directories using our filter
-    for item in &src_dirs {
-        let dst = &plain_dst_src.join(item);
-        t!(fs::create_dir(dst));
-        cp_filtered(&build.src.join(item), dst, &filter_fn);
-    }
-    // Copy the files normally
-    for item in &src_files {
-        copy(&build.src.join(item), &plain_dst_src.join(item));
-    }
-
-    // If we're building from git sources, we need to vendor a complete distribution.
-    if build.src_is_git {
-        // Get cargo-vendor installed, if it isn't already.
-        let mut has_cargo_vendor = false;
-        let mut cmd = Command::new(&build.cargo);
-        for line in output(cmd.arg("install").arg("--list")).lines() {
-            has_cargo_vendor |= line.starts_with("cargo-vendor ");
-        }
-        if !has_cargo_vendor {
-            let mut cmd = Command::new(&build.cargo);
-            cmd.arg("install")
-               .arg("--force")
-               .arg("--debug")
-               .arg("--vers").arg(CARGO_VENDOR_VERSION)
-               .arg("cargo-vendor")
-               .env("RUSTC", &build.rustc);
-            build.run(&mut cmd);
-        }
-
-        // Vendor all Cargo dependencies
-        let mut cmd = Command::new(&build.cargo);
-        cmd.arg("vendor")
-           .current_dir(&plain_dst_src.join("src"));
-        build.run(&mut cmd);
     }
 
-    // Create the version file
-    write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
-
-    // Create plain source tarball
-    let mut tarball = rust_src_location(build);
-    tarball.set_extension(""); // strip .gz
-    tarball.set_extension(""); // strip .tar
-    if let Some(dir) = tarball.parent() {
-        t!(fs::create_dir_all(dir));
+    // Copy the directories using our filter
+    for item in src_dirs {
+        let dst = &dst_dir.join(item);
+        t!(fs::create_dir_all(dst));
+        cp_filtered(&build.src.join(item), dst, &|path| filter_fn(exclude_dirs, item, path));
     }
-    let mut cmd = rust_installer(build);
-    cmd.arg("tarball")
-       .arg("--input").arg(&plain_name)
-       .arg("--output").arg(&tarball)
-       .arg("--work-dir=.")
-       .current_dir(tmpdir(build));
-    build.run(&mut cmd);
+}
 
+/// Creates the `rust-src` installer component
+pub fn rust_src(build: &Build) {
+    println!("Dist src");
 
     let name = pkgname(build, "rust-src");
     let image = tmpdir(build).join(format!("{}-image", name));
@@ -619,6 +549,7 @@ pub fn rust_src(build: &Build) {
         "src/liballoc",
         "src/liballoc_jemalloc",
         "src/liballoc_system",
+        "src/libbacktrace",
         "src/libcollections",
         "src/libcompiler_builtins",
         "src/libcore",
@@ -634,13 +565,18 @@ pub fn rust_src(build: &Build) {
         "src/libstd_unicode",
         "src/libunwind",
         "src/rustc/libc_shim",
+        "src/libtest",
+        "src/libterm",
+        "src/libgetopts",
+        "src/compiler-rt",
+        "src/jemalloc",
+    ];
+    let std_src_dirs_exclude = [
+        "src/compiler-rt/test",
+        "src/jemalloc/test/unit",
     ];
 
-    for item in &std_src_dirs {
-        let dst = &dst_src.join(item);
-        t!(fs::create_dir_all(dst));
-        cp_r(&plain_dst_src.join(item), dst);
-    }
+    copy_src_dirs(build, &std_src_dirs[..], &std_src_dirs_exclude[..], &dst_src);
 
     // Create source tarball in rust-installer format
     let mut cmd = rust_installer(build);
@@ -657,7 +593,86 @@ pub fn rust_src(build: &Build) {
     build.run(&mut cmd);
 
     t!(fs::remove_dir_all(&image));
-    t!(fs::remove_dir_all(&plain_dst_src));
+}
+
+const CARGO_VENDOR_VERSION: &'static str = "0.1.4";
+
+/// Creates the plain source tarball
+pub fn plain_source_tarball(build: &Build) {
+    println!("Create plain source tarball");
+
+    // Make sure that the root folder of tarball has the correct name
+    let plain_name = format!("{}-src", pkgname(build, "rustc"));
+    let plain_dst_src = tmpdir(build).join(&plain_name);
+    let _ = fs::remove_dir_all(&plain_dst_src);
+    t!(fs::create_dir_all(&plain_dst_src));
+
+    // This is the set of root paths which will become part of the source package
+    let src_files = [
+        "COPYRIGHT",
+        "LICENSE-APACHE",
+        "LICENSE-MIT",
+        "CONTRIBUTING.md",
+        "README.md",
+        "RELEASES.md",
+        "configure",
+        "x.py",
+    ];
+    let src_dirs = [
+        "man",
+        "src",
+    ];
+
+    copy_src_dirs(build, &src_dirs[..], &[], &plain_dst_src);
+
+    // Copy the files normally
+    for item in &src_files {
+        copy(&build.src.join(item), &plain_dst_src.join(item));
+    }
+
+    // Create the version file
+    write_file(&plain_dst_src.join("version"), build.rust_version().as_bytes());
+
+    // If we're building from git sources, we need to vendor a complete distribution.
+    if build.src_is_git {
+        // Get cargo-vendor installed, if it isn't already.
+        let mut has_cargo_vendor = false;
+        let mut cmd = Command::new(&build.cargo);
+        for line in output(cmd.arg("install").arg("--list")).lines() {
+            has_cargo_vendor |= line.starts_with("cargo-vendor ");
+        }
+        if !has_cargo_vendor {
+            let mut cmd = Command::new(&build.cargo);
+            cmd.arg("install")
+               .arg("--force")
+               .arg("--debug")
+               .arg("--vers").arg(CARGO_VENDOR_VERSION)
+               .arg("cargo-vendor")
+               .env("RUSTC", &build.rustc);
+            build.run(&mut cmd);
+        }
+
+        // Vendor all Cargo dependencies
+        let mut cmd = Command::new(&build.cargo);
+        cmd.arg("vendor")
+           .current_dir(&plain_dst_src.join("src"));
+        build.run(&mut cmd);
+    }
+
+    // Create plain source tarball
+    let mut tarball = rust_src_location(build);
+    tarball.set_extension(""); // strip .gz
+    tarball.set_extension(""); // strip .tar
+    if let Some(dir) = tarball.parent() {
+        t!(fs::create_dir_all(dir));
+    }
+    let mut cmd = rust_installer(build);
+    cmd.arg("tarball")
+       .arg("--input").arg(&plain_name)
+       .arg("--output").arg(&tarball)
+       .arg("--work-dir=.")
+       .current_dir(tmpdir(build));
+    build.run(&mut cmd);
 }
 
 fn install(src: &Path, dstdir: &Path, perms: u32) {
index a1466d68a135aa4950b13eb9771c182626c162a8..fe4e18ab622cde83e701d96014314189a8775849 100644 (file)
@@ -69,7 +69,9 @@ pub enum Subcommand {
     Clean,
     Dist {
         paths: Vec<PathBuf>,
-        install: bool,
+    },
+    Install {
+        paths: Vec<PathBuf>,
     },
 }
 
@@ -85,7 +87,8 @@ pub fn parse(args: &[String]) -> Flags {
     bench       Build and run some benchmarks
     doc         Build documentation
     clean       Clean out build directories
-    dist        Build and/or install distribution artifacts
+    dist        Build distribution artifacts
+    install     Install distribution artifacts
 
 To learn more about a subcommand, run `./x.py <subcommand> -h`");
 
@@ -125,7 +128,8 @@ pub fn parse(args: &[String]) -> Flags {
                                         || (s == "bench")
                                         || (s == "doc")
                                         || (s == "clean")
-                                        || (s == "dist"));
+                                        || (s == "dist")
+                                        || (s == "install"));
         let subcommand = match possible_subcommands.first() {
             Some(s) => s,
             None => {
@@ -139,7 +143,6 @@ pub fn parse(args: &[String]) -> Flags {
         match subcommand.as_str() {
             "test"  => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
             "bench" => { opts.optmulti("", "test-args", "extra arguments", "ARGS"); },
-            "dist"  => { opts.optflag("", "install", "run installer as well"); },
             _ => { },
         };
 
@@ -281,7 +284,11 @@ pub fn parse(args: &[String]) -> Flags {
             "dist" => {
                 Subcommand::Dist {
                     paths: paths,
-                    install: matches.opt_present("install"),
+                }
+            }
+            "install" => {
+                Subcommand::Install {
+                    paths: paths,
                 }
             }
             _ => {
index dce0b1670e181612c25fb045b992e036154e3d99..21e21628dc9471249753b50ca7731f8902427f23 100644 (file)
@@ -19,7 +19,7 @@
 use std::process::Command;
 
 use Build;
-use dist::{sanitize_sh, tmpdir};
+use dist::{pkgname, sanitize_sh, tmpdir};
 
 pub struct Installer<'a> {
     build: &'a Build,
@@ -29,6 +29,13 @@ pub struct Installer<'a> {
     bindir: PathBuf,
     libdir: PathBuf,
     mandir: PathBuf,
+    empty_dir: PathBuf,
+}
+
+impl<'a> Drop for Installer<'a> {
+    fn drop(&mut self) {
+        t!(fs::remove_dir_all(&self.empty_dir));
+    }
 }
 
 impl<'a> Installer<'a> {
@@ -61,6 +68,10 @@ pub fn new(build: &'a Build) -> Installer<'a> {
         let libdir = add_destdir(&libdir, &destdir);
         let mandir = add_destdir(&mandir, &destdir);
 
+        let empty_dir = build.out.join("tmp/empty_dir");
+
+        t!(fs::create_dir_all(&empty_dir));
+
         Installer {
             build,
             prefix,
@@ -69,52 +80,49 @@ pub fn new(build: &'a Build) -> Installer<'a> {
             bindir,
             libdir,
             mandir,
+            empty_dir,
         }
     }
 
-    /// Installs everything.
-    pub fn install(&self, stage: u32, host: &str) {
-        let empty_dir = self.build.out.join("tmp/empty_dir");
-        t!(fs::create_dir_all(&empty_dir));
-
-        if self.build.config.docs {
-            self.install_sh("docs", "rust-docs", &self.build.rust_package_vers(),
-                            stage, Some(host), &empty_dir);
-        }
+    pub fn install_docs(&self, stage: u32, host: &str) {
+        self.install_sh("docs", "rust-docs", stage, Some(host));
+    }
 
+    pub fn install_std(&self, stage: u32) {
         for target in self.build.config.target.iter() {
-            self.install_sh("std", "rust-std", &self.build.rust_package_vers(),
-                            stage, Some(target), &empty_dir);
+            self.install_sh("std", "rust-std", stage, Some(target));
         }
+    }
 
-        if self.build.config.extended {
-            self.install_sh("cargo", "cargo", &self.build.cargo_package_vers(),
-                            stage, Some(host), &empty_dir);
-            self.install_sh("rls", "rls", &self.build.rls_package_vers(),
-                            stage, Some(host), &empty_dir);
-            self.install_sh("analysis", "rust-analysis", &self.build.rust_package_vers(),
-                            stage, Some(host), &empty_dir);
-            self.install_sh("src", "rust-src", &self.build.rust_package_vers(),
-                            stage, None, &empty_dir);
-        }
+    pub fn install_cargo(&self, stage: u32, host: &str) {
+        self.install_sh("cargo", "cargo", stage, Some(host));
+    }
 
-        self.install_sh("rustc", "rustc", &self.build.rust_package_vers(),
-                        stage, Some(host), &empty_dir);
+    pub fn install_rls(&self, stage: u32, host: &str) {
+        self.install_sh("rls", "rls", stage, Some(host));
+    }
+
+    pub fn install_analysis(&self, stage: u32, host: &str) {
+        self.install_sh("analysis", "rust-analysis", stage, Some(host));
+    }
 
-        t!(fs::remove_dir_all(&empty_dir));
+    pub fn install_src(&self, stage: u32) {
+        self.install_sh("src", "rust-src", stage, None);
+    }
+    pub fn install_rustc(&self, stage: u32, host: &str) {
+        self.install_sh("rustc", "rustc", stage, Some(host));
     }
 
-    fn install_sh(&self, package: &str, name: &str, version: &str,
-                  stage: u32, host: Option<&str>,  empty_dir: &Path) {
+    fn install_sh(&self, package: &str, name: &str, stage: u32, host: Option<&str>) {
         println!("Install {} stage{} ({:?})", package, stage, host);
         let package_name = if let Some(host) = host {
-            format!("{}-{}-{}", name, version, host)
+            format!("{}-{}", pkgname(self.build, name), host)
         } else {
-            format!("{}-{}", name, version)
+            pkgname(self.build, name)
         };
 
         let mut cmd = Command::new("sh");
-        cmd.current_dir(empty_dir)
+        cmd.current_dir(&self.empty_dir)
            .arg(sanitize_sh(&tmpdir(self.build).join(&package_name).join("install.sh")))
            .arg(format!("--prefix={}", sanitize_sh(&self.prefix)))
            .arg(format!("--sysconfdir={}", sanitize_sh(&self.sysconfdir)))
index a5df741e2bfc83965d5f6fbc722fd90dd6c030bb..47c792a510b1b265e1cf9225c8420b400df3898d 100644 (file)
@@ -69,7 +69,7 @@ distcheck:
        $(Q)$(BOOTSTRAP) dist $(BOOTSTRAP_ARGS)
        $(Q)$(BOOTSTRAP) test distcheck $(BOOTSTRAP_ARGS)
 install:
-       $(Q)$(BOOTSTRAP) dist --install $(BOOTSTRAP_ARGS)
+       $(Q)$(BOOTSTRAP) install $(BOOTSTRAP_ARGS)
 tidy:
        $(Q)$(BOOTSTRAP) test src/tools/tidy $(BOOTSTRAP_ARGS)
 prepare:
index 57915446e1d1aee8e94586284d7b2546abd5c8fb..16029a8a0cccd49c68fb55bf675fd6ac6a818f2b 100644 (file)
@@ -492,6 +492,7 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .host(true)
          .run(move |s| check::docs(build, &s.compiler()));
     rules.test("check-distcheck", "distcheck")
+         .dep(|s| s.name("dist-plain-source-tarball"))
          .dep(|s| s.name("dist-src"))
          .run(move |_| check::distcheck(build));
 
@@ -734,6 +735,13 @@ fn crate_rule<'a, 'b>(build: &'a Build,
                  dist::mingw(build, s.target)
              }
          });
+    rules.dist("dist-plain-source-tarball", "src")
+         .default(build.config.rust_dist_src)
+         .host(true)
+         .only_build(true)
+         .only_host_build(true)
+         .dep(move |s| tool_rust_installer(build, s))
+         .run(move |_| dist::plain_source_tarball(build));
     rules.dist("dist-src", "src")
          .default(true)
          .host(true)
@@ -759,9 +767,6 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .dep(|s| s.name("tool-rls"))
          .dep(move |s| tool_rust_installer(build, s))
          .run(move |s| dist::rls(build, s.stage, s.target));
-    rules.dist("install", "path/to/nowhere")
-         .dep(|s| s.name("default:dist"))
-         .run(move |s| install::Installer::new(build).install(s.stage, s.target));
     rules.dist("dist-cargo", "cargo")
          .host(true)
          .only_host_build(true)
@@ -789,6 +794,47 @@ fn crate_rule<'a, 'b>(build: &'a Build,
          .dep(move |s| s.name("tool-build-manifest").target(&build.config.build).stage(0))
          .run(move |_| dist::hash_and_sign(build));
 
+    rules.install("install-docs", "src/doc")
+         .default(build.config.docs)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-docs"))
+         .run(move |s| install::Installer::new(build).install_docs(s.stage, s.target));
+    rules.install("install-std", "src/libstd")
+         .default(true)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-std"))
+         .run(move |s| install::Installer::new(build).install_std(s.stage));
+    rules.install("install-cargo", "cargo")
+         .default(build.config.extended)
+         .host(true)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-cargo"))
+         .run(move |s| install::Installer::new(build).install_cargo(s.stage, s.target));
+    rules.install("install-rls", "rls")
+         .default(build.config.extended)
+         .host(true)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-rls"))
+         .run(move |s| install::Installer::new(build).install_rls(s.stage, s.target));
+    rules.install("install-analysis", "analysis")
+         .default(build.config.extended)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-analysis"))
+         .run(move |s| install::Installer::new(build).install_analysis(s.stage, s.target));
+    rules.install("install-src", "src")
+         .default(build.config.extended)
+         .host(true)
+         .only_build(true)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-src"))
+         .run(move |s| install::Installer::new(build).install_src(s.stage));
+    rules.install("install-rustc", "src/librustc")
+         .default(true)
+         .host(true)
+         .only_host_build(true)
+         .dep(|s| s.name("dist-rustc"))
+         .run(move |s| install::Installer::new(build).install_rustc(s.stage, s.target));
+
     rules.verify();
     return rules;
 
@@ -902,6 +948,7 @@ enum Kind {
     Bench,
     Dist,
     Doc,
+    Install,
 }
 
 impl<'a> Rule<'a> {
@@ -1033,6 +1080,12 @@ fn dist<'b>(&'b mut self, name: &'a str, path: &'a str)
         self.rule(name, path, Kind::Dist)
     }
 
+    /// Same as `build`, but for `Kind::Install`.
+    fn install<'b>(&'b mut self, name: &'a str, path: &'a str)
+                -> RuleBuilder<'a, 'b> {
+        self.rule(name, path, Kind::Install)
+    }
+
     fn rule<'b>(&'b mut self,
                 name: &'a str,
                 path: &'a str,
@@ -1073,6 +1126,7 @@ pub fn get_help(&self, command: &str) -> Option<String> {
             "test" => Kind::Test,
             "bench" => Kind::Bench,
             "dist" => Kind::Dist,
+            "install" => Kind::Install,
             _ => return None,
         };
         let rules = self.rules.values().filter(|r| r.kind == kind);
@@ -1122,13 +1176,8 @@ fn plan(&self) -> Vec<Step<'a>> {
             Subcommand::Doc { ref paths } => (Kind::Doc, &paths[..]),
             Subcommand::Test { ref paths, test_args: _ } => (Kind::Test, &paths[..]),
             Subcommand::Bench { ref paths, test_args: _ } => (Kind::Bench, &paths[..]),
-            Subcommand::Dist { ref paths, install } => {
-                if install {
-                    return vec![self.sbuild.name("install")]
-                } else {
-                    (Kind::Dist, &paths[..])
-                }
-            }
+            Subcommand::Dist { ref paths } => (Kind::Dist, &paths[..]),
+            Subcommand::Install { ref paths } => (Kind::Install, &paths[..]),
             Subcommand::Clean => panic!(),
         };
 
@@ -1347,10 +1396,6 @@ mod tests {
     use config::Config;
     use flags::Flags;
 
-    macro_rules! a {
-        ($($a:expr),*) => (vec![$($a.to_string()),*])
-    }
-
     fn build(args: &[&str],
              extra_host: &[&str],
              extra_target: &[&str]) -> Build {
index 7759d91e1bb635c41ba6b89a426616ebda65a293..b4399a8d53ff0f6288d12dcb28950bb70712451d 100644 (file)
@@ -21,14 +21,9 @@ RUN apt-get update && apt-get install -y --no-install-recommends \
   libssl-dev \
   pkg-config
 
-RUN curl -o /usr/local/bin/sccache \
-      https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/2017-05-12-sccache-x86_64-unknown-linux-musl && \
-      chmod +x /usr/local/bin/sccache
-
-RUN curl -OL https://github.com/Yelp/dumb-init/releases/download/v1.2.0/dumb-init_1.2.0_amd64.deb && \
-    dpkg -i dumb-init_*.deb && \
-    rm dumb-init_*.deb
-ENTRYPOINT ["/usr/bin/dumb-init", "--"]
+# dumb-init
+COPY scripts/dumb-init.sh /scripts/
+RUN sh /scripts/dumb-init.sh
 
 WORKDIR /tmp
 
@@ -38,23 +33,11 @@ RUN ./build-rumprun.sh
 COPY cross/build-arm-musl.sh /tmp/
 RUN ./build-arm-musl.sh
 
-# originally from
-# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
-RUN mkdir /usr/local/mips-linux-musl
-RUN curl -L https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror/OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2 | \
-      tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
-RUN for file in /usr/local/mips-linux-musl/bin/mips-openwrt-linux-*; do \
-      ln -s $file /usr/local/bin/`basename $file`; \
-    done
+COPY cross/install-mips-musl.sh /tmp/
+RUN ./install-mips-musl.sh
 
-# Note that this originally came from:
-# https://downloads.openwrt.org/snapshots/trunk/malta/generic/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
-RUN mkdir /usr/local/mipsel-linux-musl
-RUN curl -L https://s3.amazonaws.com/rust-lang-ci/libc/OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2 | \
-      tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
-RUN for file in /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-*; do \
-      ln -s $file /usr/local/bin/`basename $file`; \
-    done
+COPY cross/install-mipsel-musl.sh /tmp/
+RUN ./install-mipsel-musl.sh
 
 ENV TARGETS=asmjs-unknown-emscripten
 ENV TARGETS=$TARGETS,wasm32-unknown-emscripten
@@ -80,3 +63,10 @@ ENV RUST_CONFIGURE_ARGS \
       --musl-root-armhf=/usr/local/arm-linux-musleabihf \
       --musl-root-armv7=/usr/local/armv7-linux-musleabihf
 ENV SCRIPT python2.7 ../x.py dist --target $TARGETS
+
+# sccache
+COPY scripts/sccache.sh /scripts/
+RUN sh /scripts/sccache.sh
+
+# init
+ENTRYPOINT ["/usr/bin/dumb-init", "--"]
diff --git a/src/ci/docker/cross/install-mips-musl.sh b/src/ci/docker/cross/install-mips-musl.sh
new file mode 100755 (executable)
index 0000000..416bb75
--- /dev/null
@@ -0,0 +1,24 @@
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+mkdir /usr/local/mips-linux-musl
+
+# originally from
+# https://downloads.openwrt.org/snapshots/trunk/ar71xx/generic/
+# OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2
+URL="https://s3.amazonaws.com/rust-lang-ci/rust-ci-mirror"
+FILE="OpenWrt-Toolchain-ar71xx-generic_gcc-5.3.0_musl-1.1.16.Linux-x86_64.tar.bz2"
+curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mips-linux-musl --strip-components=2
+
+for file in /usr/local/mips-linux-musl/bin/mips-openwrt-linux-*; do
+  ln -s $file /usr/local/bin/`basename $file`
+done
diff --git a/src/ci/docker/cross/install-mipsel-musl.sh b/src/ci/docker/cross/install-mipsel-musl.sh
new file mode 100755 (executable)
index 0000000..9744b24
--- /dev/null
@@ -0,0 +1,24 @@
+# Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+# file at the top-level directory of this distribution and at
+# http://rust-lang.org/COPYRIGHT.
+#
+# Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+# http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+# <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+# option. This file may not be copied, modified, or distributed
+# except according to those terms.
+
+set -ex
+
+mkdir /usr/local/mipsel-linux-musl
+
+# Note that this originally came from:
+# https://downloads.openwrt.org/snapshots/trunk/malta/generic/
+# OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2
+URL="https://s3.amazonaws.com/rust-lang-ci/libc"
+FILE="OpenWrt-Toolchain-malta-le_gcc-5.3.0_musl-1.1.15.Linux-x86_64.tar.bz2"
+curl -L "$URL/$FILE" | tar xjf - -C /usr/local/mipsel-linux-musl --strip-components=2
+
+for file in /usr/local/mipsel-linux-musl/bin/mipsel-openwrt-linux-*; do
+  ln -s $file /usr/local/bin/`basename $file`
+done
index 3b37c47e392807c65fb223b3f03a278b1ba031c1..8b70e8be38a2719aa2935944c628e8292adb7b18 100644 (file)
@@ -25,7 +25,6 @@
     - [cfg_target_has_atomic](language-features/cfg-target-has-atomic.md)
     - [cfg_target_thread_local](language-features/cfg-target-thread-local.md)
     - [cfg_target_vendor](language-features/cfg-target-vendor.md)
-    - [closure_to_fn_coercion](language-features/closure-to-fn-coercion.md)
     - [compiler_builtins](language-features/compiler-builtins.md)
     - [concat_idents](language-features/concat-idents.md)
     - [conservative_impl_trait](language-features/conservative-impl-trait.md)
     - [io](library-features/io.md)
     - [ip](library-features/ip.md)
     - [iter_rfind](library-features/iter-rfind.md)
+    - [iterator_step_by](library-features/iterator-step-by.md)
     - [libstd_io_internals](library-features/libstd-io-internals.md)
     - [libstd_sys_internals](library-features/libstd-sys-internals.md)
     - [libstd_thread_internals](library-features/libstd-thread-internals.md)
diff --git a/src/doc/unstable-book/src/language-features/closure-to-fn-coercion.md b/src/doc/unstable-book/src/language-features/closure-to-fn-coercion.md
deleted file mode 100644 (file)
index 4e3b735..0000000
+++ /dev/null
@@ -1,7 +0,0 @@
-# `closure_to_fn_coercion`
-
-The tracking issue for this feature is: [#39817]
-
-[#39817]: https://github.com/rust-lang/rust/issues/39817
-
-------------------------
index 27ecefe043b1e333b7788a4444c74ff519c35be3..5faf4dcccaf9134a0b01d3ceb4313818359603cf 100644 (file)
 /// strong `Arc` pointers from parent nodes to children, and [`Weak`][weak]
 /// pointers from children back to their parents.
 ///
+/// # Cloning references
+///
+/// Creating a new reference from an existing reference counted pointer is done using the
+/// `Clone` trait implemented for [`Arc<T>`][`arc`] and [`Weak<T>`][`weak`].
+///
+/// ```
+/// use std::sync::Arc;
+/// let foo = Arc::new(vec![1.0, 2.0, 3.0]);
+/// // The two syntaxes below are equivalent.
+/// let a = foo.clone();
+/// let b = Arc::clone(&foo);
+/// // a and b both point to the same memory location as foo.
+/// ```
+///
+/// The `Arc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+/// the meaning of the code. In the example above, this syntax makes it easier to see that
+/// this code is creating a new reference rather than copying the whole content of foo.
+///
 /// ## `Deref` behavior
 ///
 /// `Arc<T>` automatically dereferences to `T` (via the [`Deref`][deref] trait),
 /// let five = Arc::new(5);
 ///
 /// for _ in 0..10 {
-///     let five = five.clone();
+///     let five = Arc::clone(&five);
 ///
 ///     thread::spawn(move || {
 ///         println!("{:?}", five);
 /// let val = Arc::new(AtomicUsize::new(5));
 ///
 /// for _ in 0..10 {
-///     let val = val.clone();
+///     let val = Arc::clone(&val);
 ///
 ///     thread::spawn(move || {
 ///         let v = val.fetch_add(1, Ordering::SeqCst);
@@ -282,7 +300,7 @@ pub fn new(data: T) -> Arc<T> {
     /// assert_eq!(Arc::try_unwrap(x), Ok(3));
     ///
     /// let x = Arc::new(4);
-    /// let _y = x.clone();
+    /// let _y = Arc::clone(&x);
     /// assert_eq!(*Arc::try_unwrap(x).unwrap_err(), 4);
     /// ```
     #[inline]
@@ -451,7 +469,7 @@ pub fn weak_count(this: &Self) -> usize {
     /// use std::sync::Arc;
     ///
     /// let five = Arc::new(5);
-    /// let _also_five = five.clone();
+    /// let _also_five = Arc::clone(&five);
     ///
     /// // This assertion is deterministic because we haven't shared
     /// // the `Arc` between threads.
@@ -499,7 +517,7 @@ unsafe fn drop_slow(&mut self) {
     /// use std::sync::Arc;
     ///
     /// let five = Arc::new(5);
-    /// let same_five = five.clone();
+    /// let same_five = Arc::clone(&five);
     /// let other_five = Arc::new(5);
     ///
     /// assert!(Arc::ptr_eq(&five, &same_five));
@@ -524,7 +542,7 @@ impl<T: ?Sized> Clone for Arc<T> {
     ///
     /// let five = Arc::new(5);
     ///
-    /// five.clone();
+    /// Arc::clone(&five);
     /// ```
     #[inline]
     fn clone(&self) -> Arc<T> {
@@ -591,7 +609,7 @@ impl<T: Clone> Arc<T> {
     /// let mut data = Arc::new(5);
     ///
     /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
-    /// let mut other_data = data.clone();      // Won't clone inner data
+    /// let mut other_data = Arc::clone(&data); // Won't clone inner data
     /// *Arc::make_mut(&mut data) += 1;         // Clones inner data
     /// *Arc::make_mut(&mut data) += 1;         // Won't clone anything
     /// *Arc::make_mut(&mut other_data) *= 2;   // Won't clone anything
@@ -679,7 +697,7 @@ impl<T: ?Sized> Arc<T> {
     /// *Arc::get_mut(&mut x).unwrap() = 4;
     /// assert_eq!(*x, 4);
     ///
-    /// let _y = x.clone();
+    /// let _y = Arc::clone(&x);
     /// assert!(Arc::get_mut(&mut x).is_none());
     /// ```
     #[inline]
@@ -751,7 +769,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Arc<T> {
     /// }
     ///
     /// let foo  = Arc::new(Foo);
-    /// let foo2 = foo.clone();
+    /// let foo2 = Arc::clone(&foo);
     ///
     /// drop(foo);    // Doesn't print anything
     /// drop(foo2);   // Prints "dropped!"
@@ -903,11 +921,11 @@ impl<T: ?Sized> Clone for Weak<T> {
     /// # Examples
     ///
     /// ```
-    /// use std::sync::Arc;
+    /// use std::sync::{Arc, Weak};
     ///
     /// let weak_five = Arc::downgrade(&Arc::new(5));
     ///
-    /// weak_five.clone();
+    /// Weak::clone(&weak_five);
     /// ```
     #[inline]
     fn clone(&self) -> Weak<T> {
@@ -956,7 +974,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// # Examples
     ///
     /// ```
-    /// use std::sync::Arc;
+    /// use std::sync::{Arc, Weak};
     ///
     /// struct Foo;
     ///
@@ -968,7 +986,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     ///
     /// let foo = Arc::new(Foo);
     /// let weak_foo = Arc::downgrade(&foo);
-    /// let other_weak_foo = weak_foo.clone();
+    /// let other_weak_foo = Weak::clone(&weak_foo);
     ///
     /// drop(weak_foo);   // Doesn't print anything
     /// drop(foo);        // Prints "dropped!"
index d6dbf77bfac770466ff3dfb8de43e83397d217fd..33951b911dd51b469903fbfcd8b1b8b69ea86fea 100644 (file)
 //! [`Weak<T>`][`Weak`] does not auto-dereference to `T`, because the value may have
 //! already been destroyed.
 //!
+//! # Cloning references
+//!
+//! Creating a new reference from an existing reference counted pointer is done using the
+//! `Clone` trait implemented for [`Rc<T>`][`Rc`] and [`Weak<T>`][`Weak`].
+//!
+//! ```
+//! use std::rc::Rc;
+//! let foo = Rc::new(vec![1.0, 2.0, 3.0]);
+//! // The two syntaxes below are equivalent.
+//! let a = foo.clone();
+//! let b = Rc::clone(&foo);
+//! // a and b both point to the same memory location as foo.
+//! ```
+//!
+//! The `Rc::clone(&from)` syntax is the most idiomatic because it conveys more explicitly
+//! the meaning of the code. In the example above, this syntax makes it easier to see that
+//! this code is creating a new reference rather than copying the whole content of foo.
+//!
 //! # Examples
 //!
 //! Consider a scenario where a set of `Gadget`s are owned by a given `Owner`.
 //!     // the reference count in the process.
 //!     let gadget1 = Gadget {
 //!         id: 1,
-//!         owner: gadget_owner.clone(),
+//!         owner: Rc::clone(&gadget_owner),
 //!     };
 //!     let gadget2 = Gadget {
 //!         id: 2,
-//!         owner: gadget_owner.clone(),
+//!         owner: Rc::clone(&gadget_owner),
 //!     };
 //!
 //!     // Dispose of our local variable `gadget_owner`.
 //!     let gadget1 = Rc::new(
 //!         Gadget {
 //!             id: 1,
-//!             owner: gadget_owner.clone(),
+//!             owner: Rc::clone(&gadget_owner),
 //!         }
 //!     );
 //!     let gadget2 = Rc::new(
 //!         Gadget {
 //!             id: 2,
-//!             owner: gadget_owner.clone(),
+//!             owner: Rc::clone(&gadget_owner),
 //!         }
 //!     );
 //!
@@ -316,7 +334,7 @@ pub fn new(value: T) -> Rc<T> {
     /// assert_eq!(Rc::try_unwrap(x), Ok(3));
     ///
     /// let x = Rc::new(4);
-    /// let _y = x.clone();
+    /// let _y = Rc::clone(&x);
     /// assert_eq!(*Rc::try_unwrap(x).unwrap_err(), 4);
     /// ```
     #[inline]
@@ -508,7 +526,7 @@ pub fn weak_count(this: &Self) -> usize {
     /// use std::rc::Rc;
     ///
     /// let five = Rc::new(5);
-    /// let _also_five = five.clone();
+    /// let _also_five = Rc::clone(&five);
     ///
     /// assert_eq!(2, Rc::strong_count(&five));
     /// ```
@@ -550,7 +568,7 @@ fn is_unique(this: &Self) -> bool {
     /// *Rc::get_mut(&mut x).unwrap() = 4;
     /// assert_eq!(*x, 4);
     ///
-    /// let _y = x.clone();
+    /// let _y = Rc::clone(&x);
     /// assert!(Rc::get_mut(&mut x).is_none());
     /// ```
     #[inline]
@@ -576,7 +594,7 @@ pub fn get_mut(this: &mut Self) -> Option<&mut T> {
     /// use std::rc::Rc;
     ///
     /// let five = Rc::new(5);
-    /// let same_five = five.clone();
+    /// let same_five = Rc::clone(&five);
     /// let other_five = Rc::new(5);
     ///
     /// assert!(Rc::ptr_eq(&five, &same_five));
@@ -608,7 +626,7 @@ impl<T: Clone> Rc<T> {
     /// let mut data = Rc::new(5);
     ///
     /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
-    /// let mut other_data = data.clone();    // Won't clone inner data
+    /// let mut other_data = Rc::clone(&data);    // Won't clone inner data
     /// *Rc::make_mut(&mut data) += 1;        // Clones inner data
     /// *Rc::make_mut(&mut data) += 1;        // Won't clone anything
     /// *Rc::make_mut(&mut other_data) *= 2;  // Won't clone anything
@@ -680,7 +698,7 @@ unsafe impl<#[may_dangle] T: ?Sized> Drop for Rc<T> {
     /// }
     ///
     /// let foo  = Rc::new(Foo);
-    /// let foo2 = foo.clone();
+    /// let foo2 = Rc::clone(&foo);
     ///
     /// drop(foo);    // Doesn't print anything
     /// drop(foo2);   // Prints "dropped!"
@@ -720,7 +738,7 @@ impl<T: ?Sized> Clone for Rc<T> {
     ///
     /// let five = Rc::new(5);
     ///
-    /// five.clone();
+    /// Rc::clone(&five);
     /// ```
     #[inline]
     fn clone(&self) -> Rc<T> {
@@ -1050,7 +1068,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     /// # Examples
     ///
     /// ```
-    /// use std::rc::Rc;
+    /// use std::rc::{Rc, Weak};
     ///
     /// struct Foo;
     ///
@@ -1062,7 +1080,7 @@ impl<T: ?Sized> Drop for Weak<T> {
     ///
     /// let foo = Rc::new(Foo);
     /// let weak_foo = Rc::downgrade(&foo);
-    /// let other_weak_foo = weak_foo.clone();
+    /// let other_weak_foo = Weak::clone(&weak_foo);
     ///
     /// drop(weak_foo);   // Doesn't print anything
     /// drop(foo);        // Prints "dropped!"
@@ -1090,11 +1108,11 @@ impl<T: ?Sized> Clone for Weak<T> {
     /// # Examples
     ///
     /// ```
-    /// use std::rc::Rc;
+    /// use std::rc::{Rc, Weak};
     ///
     /// let weak_five = Rc::downgrade(&Rc::new(5));
     ///
-    /// weak_five.clone();
+    /// Weak::clone(&weak_five);
     /// ```
     #[inline]
     fn clone(&self) -> Weak<T> {
index 4e9dea09f79a0d0a654e882a9699dc12f27b9ce7..988f88516255731e47a4f43cb9cdc9987b574afc 100644 (file)
 //! // instead of a max-heap.
 //! impl Ord for State {
 //!     fn cmp(&self, other: &State) -> Ordering {
-//!         // Notice that the we flip the ordering here
+//!         // Notice that the we flip the ordering on costs.
+//!         // In case of a tie we compare positions - this step is necessary
+//!         // to make implementations of `PartialEq` and `Ord` consistent.
 //!         other.cost.cmp(&self.cost)
+//!             .then_with(|| self.position.cmp(&other.position))
 //!     }
 //! }
 //!
index d4544dadaeb0c6af0d70693baa787223a25e6859..661cf73c7f30e3f36daf2da8d55eea9399ca1ed1 100644 (file)
 /// the rule that `eq` is a strict inverse of `ne`; that is, `!(a == b)` if and
 /// only if `a != b`.
 ///
+/// Implementations of `PartialEq`, `PartialOrd`, and `Ord` *must* agree with
+/// each other. It's easy to accidentally make them disagree by deriving some
+/// of the traits and manually implementing others.
+///
 /// An example implementation for a domain in which two books are considered
 /// the same book if their ISBN matches, even if the formats differ:
 ///
@@ -386,6 +390,10 @@ fn cmp(&self, other: &Reverse<T>) -> Ordering {
 /// Then you must define an implementation for `cmp()`. You may find it useful to use
 /// `cmp()` on your type's fields.
 ///
+/// Implementations of `PartialEq`, `PartialOrd`, and `Ord` *must* agree with each other. It's
+/// easy to accidentally make them disagree by deriving some of the traits and manually
+/// implementing others.
+///
 /// Here's an example where you want to sort people by height only, disregarding `id`
 /// and `name`:
 ///
@@ -474,8 +482,8 @@ fn partial_cmp(&self, other: &Ordering) -> Option<Ordering> {
 ///
 /// ## How can I implement `PartialOrd`?
 ///
-/// PartialOrd only requires implementation of the `partial_cmp` method, with the others generated
-/// from default implementations.
+/// `PartialOrd` only requires implementation of the `partial_cmp` method, with the others
+/// generated from default implementations.
 ///
 /// However it remains possible to implement the others separately for types which do not have a
 /// total order. For example, for floating point numbers, `NaN < 0 == false` and `NaN >= 0 ==
@@ -483,6 +491,10 @@ fn partial_cmp(&self, other: &Ordering) -> Option<Ordering> {
 ///
 /// `PartialOrd` requires your type to be `PartialEq`.
 ///
+/// Implementations of `PartialEq`, `PartialOrd`, and `Ord` *must* agree with each other. It's
+/// easy to accidentally make them disagree by deriving some of the traits and manually
+/// implementing others.
+///
 /// If your type is `Ord`, you can implement `partial_cmp()` by using `cmp()`:
 ///
 /// ```
index 5eefa59e7ea10198f7f9419c39395d0c4058153b..07aed65f7a05709b9725529e27b977c1b91e3aef 100644 (file)
@@ -520,7 +520,7 @@ fn size_hint(&self) -> (usize, Option<usize>) {
 #[unstable(feature = "fused", issue = "35602")]
 impl<I> FusedIterator for Cycle<I> where I: Clone + Iterator {}
 
-/// An iterator that steps by n elements every iteration.
+/// An adapter for stepping iterators by a custom amount.
 ///
 /// This `struct` is created by the [`step_by`] method on [`Iterator`]. See
 /// its documentation for more.
@@ -553,8 +553,27 @@ fn next(&mut self) -> Option<Self::Item> {
             self.iter.nth(self.step)
         }
     }
+
+    #[inline]
+    fn size_hint(&self) -> (usize, Option<usize>) {
+        let inner_hint = self.iter.size_hint();
+
+        if self.first_take {
+            let f = |n| if n == 0 { 0 } else { 1 + (n-1)/(self.step+1) };
+            (f(inner_hint.0), inner_hint.1.map(f))
+        } else {
+            let f = |n| n / (self.step+1);
+            (f(inner_hint.0), inner_hint.1.map(f))
+        }
+    }
 }
 
+// StepBy can only make the iterator shorter, so the len will still fit.
+#[unstable(feature = "iterator_step_by",
+           reason = "unstable replacement of Range::step_by",
+           issue = "27741")]
+impl<I> ExactSizeIterator for StepBy<I> where I: ExactSizeIterator {}
+
 /// An iterator that strings two iterators together.
 ///
 /// This `struct` is created by the [`chain`] method on [`Iterator`]. See its
index ad91ba9be58f210f87032b2b62e4f597c108cfe1..4030eaf2b23330f7701f988f38316a1e670d2289 100644 (file)
@@ -171,6 +171,79 @@ fn test_iterator_step_by_zero() {
     it.next();
 }
 
+#[test]
+fn test_iterator_step_by_size_hint() {
+    struct StubSizeHint(usize, Option<usize>);
+    impl Iterator for StubSizeHint {
+        type Item = ();
+        fn next(&mut self) -> Option<()> {
+            self.0 -= 1;
+            if let Some(ref mut upper) = self.1 {
+                *upper -= 1;
+            }
+            Some(())
+        }
+        fn size_hint(&self) -> (usize, Option<usize>) {
+            (self.0, self.1)
+        }
+    }
+
+    // The two checks in each case are needed because the logic
+    // is different before the first call to `next()`.
+
+    let mut it = StubSizeHint(10, Some(10)).step_by(1);
+    assert_eq!(it.size_hint(), (10, Some(10)));
+    it.next();
+    assert_eq!(it.size_hint(), (9, Some(9)));
+
+    // exact multiple
+    let mut it = StubSizeHint(10, Some(10)).step_by(3);
+    assert_eq!(it.size_hint(), (4, Some(4)));
+    it.next();
+    assert_eq!(it.size_hint(), (3, Some(3)));
+
+    // larger base range, but not enough to get another element
+    let mut it = StubSizeHint(12, Some(12)).step_by(3);
+    assert_eq!(it.size_hint(), (4, Some(4)));
+    it.next();
+    assert_eq!(it.size_hint(), (3, Some(3)));
+
+    // smaller base range, so fewer resulting elements
+    let mut it = StubSizeHint(9, Some(9)).step_by(3);
+    assert_eq!(it.size_hint(), (3, Some(3)));
+    it.next();
+    assert_eq!(it.size_hint(), (2, Some(2)));
+
+    // infinite upper bound
+    let mut it = StubSizeHint(usize::MAX, None).step_by(1);
+    assert_eq!(it.size_hint(), (usize::MAX, None));
+    it.next();
+    assert_eq!(it.size_hint(), (usize::MAX-1, None));
+
+    // still infinite with larger step
+    let mut it = StubSizeHint(7, None).step_by(3);
+    assert_eq!(it.size_hint(), (3, None));
+    it.next();
+    assert_eq!(it.size_hint(), (2, None));
+
+    // propagates ExactSizeIterator
+    let a = [1,2,3,4,5];
+    let it = a.iter().step_by(2);
+    assert_eq!(it.len(), 3);
+
+    // Cannot be TrustedLen as a step greater than one makes an iterator
+    // with (usize::MAX, None) no longer meet the safety requirements
+    trait TrustedLenCheck { fn test(self) -> bool; }
+    impl<T:Iterator> TrustedLenCheck for T {
+        default fn test(self) -> bool { false }
+    }
+    impl<T:TrustedLen> TrustedLenCheck for T {
+        fn test(self) -> bool { true }
+    }
+    assert!(TrustedLenCheck::test(a.iter()));
+    assert!(!TrustedLenCheck::test(a.iter().step_by(1)));
+}
+
 #[test]
 fn test_filter_map() {
     let it = (0..).step_by(1).take(10)
index 8c4cd1d0c84583b20e961f9151c0e363686dbcac..e9f62dfbaed76c140bd866b11d5babea9c205884 100644 (file)
 #![feature(slice_patterns)]
 #![feature(sort_internals)]
 #![feature(sort_unstable)]
+#![feature(specialization)]
 #![feature(step_by)]
 #![feature(step_trait)]
 #![feature(test)]
+#![feature(trusted_len)]
 #![feature(try_from)]
 #![feature(unicode)]
 #![feature(unique)]
index 291d0d7c937ed6a3a15621ee013ab8952161d449..38ad473e4042fc015fdf2881acb4c74612c87178 100644 (file)
@@ -176,7 +176,6 @@ pub enum DepNode<D: Clone + Debug> {
     IsMirAvailable(D),
     ItemAttrs(D),
     FnArgNames(D),
-    FileMap(D, Arc<String>),
 }
 
 impl<D: Clone + Debug> DepNode<D> {
@@ -307,7 +306,6 @@ pub fn map_def<E, OP>(&self, mut op: OP) -> Option<DepNode<E>>
             ConstIsRvaluePromotableToStatic(ref d) => op(d).map(ConstIsRvaluePromotableToStatic),
             IsMirAvailable(ref d) => op(d).map(IsMirAvailable),
             GlobalMetaData(ref d, kind) => op(d).map(|d| GlobalMetaData(d, kind)),
-            FileMap(ref d, ref file_name) => op(d).map(|d| FileMap(d, file_name.clone())),
         }
     }
 }
index b6a2360211cac92610410d081de314d4c45a2148..7a246c814d3ec9b2fd983cc2cf66ea6a5e1eddd5 100644 (file)
@@ -11,7 +11,6 @@
 use hir::def_id::DefId;
 use rustc_data_structures::fx::FxHashMap;
 use std::cell::RefCell;
-use std::collections::hash_map::Entry;
 use std::ops::Index;
 use std::hash::Hash;
 use std::marker::PhantomData;
@@ -50,29 +49,11 @@ fn read(&self, k: &M::Key) {
         self.graph.read(dep_node);
     }
 
-    /// Registers a (synthetic) write to the key `k`. Usually this is
-    /// invoked automatically by `insert`.
-    fn write(&self, k: &M::Key) {
-        let dep_node = M::to_dep_node(k);
-        self.graph.write(dep_node);
-    }
-
     pub fn get(&self, k: &M::Key) -> Option<&M::Value> {
         self.read(k);
         self.map.get(k)
     }
 
-    pub fn insert(&mut self, k: M::Key, v: M::Value) {
-        self.write(&k);
-        let old_value = self.map.insert(k, v);
-        assert!(old_value.is_none());
-    }
-
-    pub fn entry(&mut self, k: M::Key) -> Entry<M::Key, M::Value> {
-        self.write(&k);
-        self.map.entry(k)
-    }
-
     pub fn contains_key(&self, k: &M::Key) -> bool {
         self.read(k);
         self.map.contains_key(k)
index 8be5d4327e72ec2c134e7746ecaa11985fc69b3c..18eb4e5d0ad73474f292d030f3fdf7e1ce518804 100644 (file)
@@ -117,12 +117,6 @@ pub fn read(&self, v: DepNode<DefId>) {
         }
     }
 
-    pub fn write(&self, v: DepNode<DefId>) {
-        if self.data.thread.is_enqueue_enabled() {
-            self.data.thread.enqueue(DepMessage::Write(v));
-        }
-    }
-
     /// Indicates that a previous work product exists for `v`. This is
     /// invoked during initial start-up based on what nodes are clean
     /// (and what files exist in the incr. directory).
index b21c3a2b216000e4072b3e2b11dbcccc637c6345..9aecd8ad83602cf515c565a7dd243aa2e0b01778 100644 (file)
@@ -8,11 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use dep_graph::{DepGraph, DepNode};
-use hir::def_id::{DefId, CrateNum, CRATE_DEF_INDEX};
-use rustc_data_structures::bitvec::BitVector;
 use std::rc::Rc;
-use std::sync::Arc;
 use syntax::codemap::CodeMap;
 use syntax_pos::{BytePos, FileMap};
 use ty::TyCtxt;
@@ -31,14 +27,12 @@ pub struct CachingCodemapView<'tcx> {
     codemap: &'tcx CodeMap,
     line_cache: [CacheEntry; 3],
     time_stamp: usize,
-    dep_graph: DepGraph,
-    dep_tracking_reads: BitVector,
 }
 
 impl<'tcx> CachingCodemapView<'tcx> {
     pub fn new<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CachingCodemapView<'tcx> {
         let codemap = tcx.sess.codemap();
-        let files = codemap.files_untracked();
+        let files = codemap.files();
         let first_file = files[0].clone();
         let entry = CacheEntry {
             time_stamp: 0,
@@ -50,11 +44,9 @@ pub fn new<'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>) -> CachingCodemapView<'tcx> {
         };
 
         CachingCodemapView {
-            dep_graph: tcx.dep_graph.clone(),
             codemap: codemap,
             line_cache: [entry.clone(), entry.clone(), entry.clone()],
             time_stamp: 0,
-            dep_tracking_reads: BitVector::new(files.len()),
         }
     }
 
@@ -67,9 +59,6 @@ pub fn byte_pos_to_line_and_col(&mut self,
         for cache_entry in self.line_cache.iter_mut() {
             if pos >= cache_entry.line_start && pos < cache_entry.line_end {
                 cache_entry.time_stamp = self.time_stamp;
-                if self.dep_tracking_reads.insert(cache_entry.file_index) {
-                    self.dep_graph.read(dep_node(cache_entry));
-                }
 
                 return Some((cache_entry.file.clone(),
                              cache_entry.line_number,
@@ -90,7 +79,7 @@ pub fn byte_pos_to_line_and_col(&mut self,
         // If the entry doesn't point to the correct file, fix it up
         if pos < cache_entry.file.start_pos || pos >= cache_entry.file.end_pos {
             let file_valid;
-            let files = self.codemap.files_untracked();
+            let files = self.codemap.files();
 
             if files.len() > 0 {
                 let file_index = self.codemap.lookup_filemap_idx(pos);
@@ -120,21 +109,8 @@ pub fn byte_pos_to_line_and_col(&mut self,
         cache_entry.line_end = line_bounds.1;
         cache_entry.time_stamp = self.time_stamp;
 
-        if self.dep_tracking_reads.insert(cache_entry.file_index) {
-            self.dep_graph.read(dep_node(cache_entry));
-        }
-
         return Some((cache_entry.file.clone(),
                      cache_entry.line_number,
                      pos - cache_entry.line_start));
     }
 }
-
-fn dep_node(cache_entry: &CacheEntry) -> DepNode<DefId> {
-    let def_id = DefId {
-        krate: CrateNum::from_u32(cache_entry.file.crate_of_origin),
-        index: CRATE_DEF_INDEX,
-    };
-    let name = Arc::new(cache_entry.file.name.clone());
-    DepNode::FileMap(def_id, name)
-}
index f25ec8ecd4d71beee729bf73ba0107d38cc38fa8..f8dddc42e48ccf8d1c2d2e044f38711f83b83546 100644 (file)
@@ -74,6 +74,11 @@ pub fn new(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>) -> Self {
         }
     }
 
+    pub fn force_span_hashing(mut self) -> Self {
+        self.hash_spans = true;
+        self
+    }
+
     #[inline]
     pub fn while_hashing_hir_bodies<F: FnOnce(&mut Self)>(&mut self,
                                                           hash_bodies: bool,
index 3ff8ffb35054ad4be9764f6c80ea5b4f8b1481a1..edaeb596fe58ddb728cdb16eaf0cf52ab253fe19 100644 (file)
 impl_stable_hash_for!(enum mir::Mutability { Mut, Not });
 impl_stable_hash_for!(enum mir::BorrowKind { Shared, Unique, Mut });
 impl_stable_hash_for!(enum mir::LocalKind { Var, Temp, Arg, ReturnPointer });
-impl_stable_hash_for!(struct mir::LocalDecl<'tcx> { mutability, ty, name, source_info,
-is_user_variable});
+impl_stable_hash_for!(struct mir::LocalDecl<'tcx> {
+    mutability,
+    ty,
+    name,
+    source_info,
+    is_user_variable
+});
 impl_stable_hash_for!(struct mir::UpvarDecl { debug_name, by_ref });
 impl_stable_hash_for!(struct mir::BasicBlockData<'tcx> { statements, terminator, is_cleanup });
-impl_stable_hash_for!(struct mir::Terminator<'tcx> { source_info, kind });
+
+impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Terminator<'tcx> {
+    #[inline]
+    fn hash_stable<W: StableHasherResult>(&self,
+                                          hcx: &mut StableHashingContext<'a, 'tcx>,
+                                          hasher: &mut StableHasher<W>) {
+        let mir::Terminator {
+            ref kind,
+            ref source_info,
+        } = *self;
+
+        let hash_spans_unconditionally = match *kind {
+            mir::TerminatorKind::Assert { .. } => {
+                // Assert terminators generate a panic message that contains the
+                // source location, so we always have to feed its span into the
+                // ICH.
+                true
+            }
+            mir::TerminatorKind::Goto { .. } |
+            mir::TerminatorKind::SwitchInt { .. } |
+            mir::TerminatorKind::Resume |
+            mir::TerminatorKind::Return |
+            mir::TerminatorKind::Unreachable |
+            mir::TerminatorKind::Drop { .. } |
+            mir::TerminatorKind::DropAndReplace { .. } |
+            mir::TerminatorKind::Call { .. } => false,
+        };
+
+        if hash_spans_unconditionally {
+            hcx.while_hashing_spans(true, |hcx| {
+                source_info.hash_stable(hcx, hasher);
+            })
+        } else {
+            source_info.hash_stable(hcx, hasher);
+        }
+
+        kind.hash_stable(hcx, hasher);
+    }
+}
+
 
 impl<'a, 'tcx> HashStable<StableHashingContext<'a, 'tcx>> for mir::Local {
     #[inline]
@@ -315,7 +359,8 @@ fn hash_stable<W: StableHasherResult>(&self,
             mir::Rvalue::Discriminant(ref lvalue) => {
                 lvalue.hash_stable(hcx, hasher);
             }
-            mir::Rvalue::Box(ty) => {
+            mir::Rvalue::NullaryOp(op, ty) => {
+                op.hash_stable(hcx, hasher);
                 ty.hash_stable(hcx, hasher);
             }
             mir::Rvalue::Aggregate(ref kind, ref operands) => {
@@ -374,7 +419,8 @@ fn hash_stable<W: StableHasherResult>(&self,
     Le,
     Ne,
     Ge,
-    Gt
+    Gt,
+    Offset
 });
 
 impl_stable_hash_for!(enum mir::UnOp {
@@ -382,6 +428,10 @@ fn hash_stable<W: StableHasherResult>(&self,
     Neg
 });
 
+impl_stable_hash_for!(enum mir::NullOp {
+    Box,
+    SizeOf
+});
 
 impl_stable_hash_for!(struct mir::Constant<'tcx> { span, ty, literal });
 
index d1d9dd4853d7baa19ee5a43266f8d8321f1ec131..2a877aca53b7cb618d7b4bed10b97c50df360317 100644 (file)
@@ -47,7 +47,7 @@
 #![cfg_attr(stage0, feature(staged_api))]
 #![cfg_attr(stage0, feature(loop_break_value))]
 
-#![recursion_limit="128"]
+#![recursion_limit="192"]
 
 extern crate arena;
 extern crate core;
index 07140f71aebaa1b68253f71e3aef28372800d397..0bc1be70174e6d64a0412c3820a27a7c6c656686 100644 (file)
     "detects missing fragment specifiers in unused `macro_rules!` patterns"
 }
 
+declare_lint! {
+    pub PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
+    Warn,
+    "detects parenthesized generic parameters in type and module names"
+}
+
 declare_lint! {
     pub DEPRECATED,
     Warn,
@@ -293,6 +299,7 @@ fn get_lints(&self) -> LintArray {
             LEGACY_IMPORTS,
             LEGACY_CONSTRUCTOR_VISIBILITY,
             MISSING_FRAGMENT_SPECIFIER,
+            PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
             DEPRECATED
         )
     }
index 8ad1db7859566d41d26147eeb7777084321e5dd1..6597db9e19bd9e2d49cf0b4c2d39bb1148f70145 100644 (file)
@@ -245,7 +245,6 @@ pub trait CrateStore {
 
     // flags
     fn is_const_fn(&self, did: DefId) -> bool;
-    fn is_default_impl(&self, impl_did: DefId) -> bool;
     fn is_dllimport_foreign_item(&self, def: DefId) -> bool;
     fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool;
 
@@ -364,7 +363,6 @@ fn associated_item_cloned(&self, def: DefId) -> ty::AssociatedItem
 
     // flags
     fn is_const_fn(&self, did: DefId) -> bool { bug!("is_const_fn") }
-    fn is_default_impl(&self, impl_did: DefId) -> bool { bug!("is_default_impl") }
     fn is_dllimport_foreign_item(&self, id: DefId) -> bool { false }
     fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool { false }
 
index fe2ad498e99610635faf0a4889717dc7a7422904..80c42917196dba4ce5c151ea14e4a3a3e7024455 100644 (file)
@@ -1046,6 +1046,7 @@ pub enum Rvalue<'tcx> {
     BinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>),
     CheckedBinaryOp(BinOp, Operand<'tcx>, Operand<'tcx>),
 
+    NullaryOp(NullOp, Ty<'tcx>),
     UnaryOp(UnOp, Operand<'tcx>),
 
     /// Read the discriminant of an ADT.
@@ -1054,9 +1055,6 @@ pub enum Rvalue<'tcx> {
     /// be defined to return, say, a 0) if ADT is not an enum.
     Discriminant(Lvalue<'tcx>),
 
-    /// Creates an *uninitialized* Box
-    Box(Ty<'tcx>),
-
     /// Create an aggregate value, like a tuple or struct.  This is
     /// only needed because we want to distinguish `dest = Foo { x:
     /// ..., y: ... }` from `dest.x = ...; dest.y = ...;` in the case
@@ -1132,6 +1130,8 @@ pub enum BinOp {
     Ge,
     /// The `>` operator (greater than)
     Gt,
+    /// The `ptr.offset` operator
+    Offset,
 }
 
 impl BinOp {
@@ -1144,6 +1144,14 @@ pub fn is_checkable(self) -> bool {
     }
 }
 
+#[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
+pub enum NullOp {
+    /// Return the size of a value of that type
+    SizeOf,
+    /// Create a new uninitialized box for a value of that type
+    Box,
+}
+
 #[derive(Copy, Clone, Debug, PartialEq, Eq, RustcEncodable, RustcDecodable)]
 pub enum UnOp {
     /// The `!` operator for logical inversion
@@ -1167,7 +1175,7 @@ fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
             }
             UnaryOp(ref op, ref a) => write!(fmt, "{:?}({:?})", op, a),
             Discriminant(ref lval) => write!(fmt, "discriminant({:?})", lval),
-            Box(ref t) => write!(fmt, "Box({:?})", t),
+            NullaryOp(ref op, ref t) => write!(fmt, "{:?}({:?})", op, t),
             Ref(_, borrow_kind, ref lv) => {
                 let kind_str = match borrow_kind {
                     BorrowKind::Shared => "",
@@ -1601,7 +1609,7 @@ fn super_fold_with<'gcx: 'tcx, F: TypeFolder<'gcx, 'tcx>>(&self, folder: &mut F)
                 CheckedBinaryOp(op, rhs.fold_with(folder), lhs.fold_with(folder)),
             UnaryOp(op, ref val) => UnaryOp(op, val.fold_with(folder)),
             Discriminant(ref lval) => Discriminant(lval.fold_with(folder)),
-            Box(ty) => Box(ty.fold_with(folder)),
+            NullaryOp(op, ty) => NullaryOp(op, ty.fold_with(folder)),
             Aggregate(ref kind, ref fields) => {
                 let kind = box match **kind {
                     AggregateKind::Array(ty) => AggregateKind::Array(ty.fold_with(folder)),
@@ -1629,7 +1637,7 @@ fn super_visit_with<V: TypeVisitor<'tcx>>(&self, visitor: &mut V) -> bool {
                 rhs.visit_with(visitor) || lhs.visit_with(visitor),
             UnaryOp(_, ref val) => val.visit_with(visitor),
             Discriminant(ref lval) => lval.visit_with(visitor),
-            Box(ty) => ty.visit_with(visitor),
+            NullaryOp(_, ty) => ty.visit_with(visitor),
             Aggregate(ref kind, ref fields) => {
                 (match **kind {
                     AggregateKind::Array(ty) => ty.visit_with(visitor),
index 7bc1dc58c29d298ed84a42b603973487732efef2..6078778a61d502977940df21bf2c4d75e4752cac 100644 (file)
@@ -166,7 +166,8 @@ pub fn ty<'a, 'gcx>(&self, mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'
                 let ty = op.ty(tcx, lhs_ty, rhs_ty);
                 tcx.intern_tup(&[ty, tcx.types.bool], false)
             }
-            Rvalue::UnaryOp(_, ref operand) => {
+            Rvalue::UnaryOp(UnOp::Not, ref operand) |
+            Rvalue::UnaryOp(UnOp::Neg, ref operand) => {
                 operand.ty(mir, tcx)
             }
             Rvalue::Discriminant(ref lval) => {
@@ -179,9 +180,8 @@ pub fn ty<'a, 'gcx>(&self, mir: &Mir<'tcx>, tcx: TyCtxt<'a, 'gcx, 'tcx>) -> Ty<'
                     bug!("Rvalue::Discriminant on Lvalue of type {:?}", ty);
                 }
             }
-            Rvalue::Box(t) => {
-                tcx.mk_box(t)
-            }
+            Rvalue::NullaryOp(NullOp::Box, t) => tcx.mk_box(t),
+            Rvalue::NullaryOp(NullOp::SizeOf, _) => tcx.types.usize,
             Rvalue::Aggregate(ref ak, ref ops) => {
                 match **ak {
                     AggregateKind::Array(ty) => {
@@ -227,7 +227,7 @@ pub fn ty<'a, 'gcx>(&self, tcx: TyCtxt<'a, 'gcx, 'tcx>,
                 assert_eq!(lhs_ty, rhs_ty);
                 lhs_ty
             }
-            &BinOp::Shl | &BinOp::Shr => {
+            &BinOp::Shl | &BinOp::Shr | &BinOp::Offset => {
                 lhs_ty // lhs_ty can be != rhs_ty
             }
             &BinOp::Eq | &BinOp::Lt | &BinOp::Le |
@@ -270,7 +270,8 @@ pub fn to_hir_binop(self) -> hir::BinOp_ {
             BinOp::Lt => hir::BinOp_::BiLt,
             BinOp::Gt => hir::BinOp_::BiGt,
             BinOp::Le => hir::BinOp_::BiLe,
-            BinOp::Ge => hir::BinOp_::BiGe
+            BinOp::Ge => hir::BinOp_::BiGe,
+            BinOp::Offset => unreachable!()
         }
     }
 }
index 557fedadeba62155cb0a6cb1ce5767b68f76b35b..780ce736bfd3cd1b517b762cc34647db95b38161 100644 (file)
@@ -509,7 +509,7 @@ fn super_rvalue(&mut self,
                         self.visit_lvalue(lvalue, LvalueContext::Inspect, location);
                     }
 
-                    Rvalue::Box(ref $($mutability)* ty) => {
+                    Rvalue::NullaryOp(_op, ref $($mutability)* ty) => {
                         self.visit_ty(ty);
                     }
 
index 814246330a4c2f7b8da5e640ebbb536c83910423..28531893659e61f05bb291ccd26e177f6ca4d9e3 100644 (file)
@@ -11,8 +11,9 @@
 pub use self::code_stats::{CodeStats, DataTypeKind, FieldInfo};
 pub use self::code_stats::{SizeKind, TypeSizeInfo, VariantInfo};
 
-use dep_graph::{DepGraph, DepNode};
-use hir::def_id::{DefId, CrateNum, DefIndex, CRATE_DEF_INDEX};
+use dep_graph::DepGraph;
+use hir::def_id::{CrateNum, DefIndex};
+
 use lint;
 use middle::cstore::CrateStore;
 use middle::dependency_format;
@@ -32,7 +33,7 @@
 use syntax::symbol::Symbol;
 use syntax::{ast, codemap};
 use syntax::feature_gate::AttributeType;
-use syntax_pos::{Span, MultiSpan, FileMap};
+use syntax_pos::{Span, MultiSpan};
 
 use rustc_back::{LinkerFlavor, PanicStrategy};
 use rustc_back::target::Target;
@@ -46,7 +47,6 @@
 use std::rc::Rc;
 use std::fmt;
 use std::time::Duration;
-use std::sync::Arc;
 
 mod code_stats;
 pub mod config;
@@ -626,21 +626,6 @@ pub fn build_session_(sopts: config::Options,
     };
     let target_cfg = config::build_target_config(&sopts, &span_diagnostic);
 
-    // Hook up the codemap with a callback that allows it to register FileMap
-    // accesses with the dependency graph.
-    let cm_depgraph = dep_graph.clone();
-    let codemap_dep_tracking_callback = Box::new(move |filemap: &FileMap| {
-        let def_id = DefId {
-            krate: CrateNum::from_u32(filemap.crate_of_origin),
-            index: CRATE_DEF_INDEX,
-        };
-        let name = Arc::new(filemap.name.clone());
-        let dep_node = DepNode::FileMap(def_id, name);
-
-        cm_depgraph.read(dep_node);
-    });
-    codemap.set_dep_tracking_callback(codemap_dep_tracking_callback);
-
     let p_s = parse::ParseSess::with_span_handler(span_diagnostic, codemap);
     let default_sysroot = match sopts.maybe_sysroot {
         Some(_) => None,
index 5ee0b1c9e5ea44ba421bc9a29b03e29e84135709..7316d45dc21ae577bd696af66ef5d284e6fd0217 100644 (file)
@@ -40,7 +40,7 @@
 use ty::inhabitedness::DefIdForest;
 use ty::maps;
 use ty::steal::Steal;
-use util::nodemap::{NodeMap, NodeSet, DefIdMap, DefIdSet};
+use util::nodemap::{NodeMap, NodeSet, DefIdSet};
 use util::nodemap::{FxHashMap, FxHashSet};
 use rustc_data_structures::accumulate_vec::AccumulateVec;
 
@@ -499,33 +499,6 @@ pub struct GlobalCtxt<'tcx> {
     /// Maps Expr NodeId's to `true` iff `&expr` can have 'static lifetime.
     pub rvalue_promotable_to_static: RefCell<NodeMap<bool>>,
 
-    /// Maps Fn items to a collection of fragment infos.
-    ///
-    /// The main goal is to identify data (each of which may be moved
-    /// or assigned) whose subparts are not moved nor assigned
-    /// (i.e. their state is *unfragmented*) and corresponding ast
-    /// nodes where the path to that data is moved or assigned.
-    ///
-    /// In the long term, unfragmented values will have their
-    /// destructor entirely driven by a single stack-local drop-flag,
-    /// and their parents, the collections of the unfragmented values
-    /// (or more simply, "fragmented values"), are mapped to the
-    /// corresponding collections of stack-local drop-flags.
-    ///
-    /// (However, in the short term that is not the case; e.g. some
-    /// unfragmented paths still need to be zeroed, namely when they
-    /// reference parent data from an outer scope that was not
-    /// entirely moved, and therefore that needs to be zeroed so that
-    /// we do not get double-drop when we hit the end of the parent
-    /// scope.)
-    ///
-    /// Also: currently the table solely holds keys for node-ids of
-    /// unfragmented values (see `FragmentInfo` enum definition), but
-    /// longer-term we will need to also store mappings from
-    /// fragmented data to the set of unfragmented pieces that
-    /// constitute it.
-    pub fragment_infos: RefCell<DefIdMap<Vec<ty::FragmentInfo>>>,
-
     /// The definite name of the current crate after taking into account
     /// attributes, commandline parameters, etc.
     pub crate_name: Symbol,
@@ -716,7 +689,7 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             export_map: resolutions.export_map,
             fulfilled_predicates: RefCell::new(fulfilled_predicates),
             hir: hir,
-            maps: maps::Maps::new(dep_graph, providers),
+            maps: maps::Maps::new(providers),
             mir_passes,
             freevars: RefCell::new(resolutions.freevars),
             maybe_unused_trait_imports: resolutions.maybe_unused_trait_imports,
@@ -730,7 +703,6 @@ pub fn create_and_enter<F, R>(s: &'tcx Session,
             selection_cache: traits::SelectionCache::new(),
             evaluation_cache: traits::EvaluationCache::new(),
             rvalue_promotable_to_static: RefCell::new(NodeMap()),
-            fragment_infos: RefCell::new(DefIdMap()),
             crate_name: Symbol::intern(crate_name),
             data_layout: data_layout,
             layout_cache: RefCell::new(FxHashMap()),
index 16d5d1187fc8bf5de52ee2b6cd934f5e709f8896..78536b53ba8791092df5fe6020d6568101e406a2 100644 (file)
@@ -218,7 +218,7 @@ fn push_impl_path<T>(self,
 
         // Always use types for non-local impls, where types are always
         // available, and filename/line-number is mostly uninteresting.
-        let use_types = !impl_def_id.is_local() || {
+        let use_types = !self.is_default_impl(impl_def_id) && (!impl_def_id.is_local() || {
             // Otherwise, use filename/line-number if forced.
             let force_no_types = FORCE_IMPL_FILENAME_LINE.with(|f| f.get());
             !force_no_types && {
@@ -226,7 +226,7 @@ fn push_impl_path<T>(self,
                 ty::queries::impl_trait_ref::try_get(self, DUMMY_SP, impl_def_id).is_ok() &&
                     ty::queries::type_of::try_get(self, DUMMY_SP, impl_def_id).is_ok()
             }
-        };
+        });
 
         if !use_types {
             return self.push_impl_path_fallback(buffer, impl_def_id);
index fb352e5be89383c4743522ba1abe85ab54217aea..cfb9e648d3b7eb1dd4c0d18c649080decd35403d 100644 (file)
@@ -8,7 +8,7 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-use dep_graph::{DepGraph, DepNode, DepTrackingMap, DepTrackingMapConfig};
+use dep_graph::{DepNode, DepTrackingMapConfig};
 use hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefId, LOCAL_CRATE};
 use hir::def::Def;
 use hir;
 use util::nodemap::{DefIdSet, NodeSet};
 
 use rustc_data_structures::indexed_vec::IndexVec;
+use rustc_data_structures::fx::FxHashMap;
 use std::cell::{RefCell, RefMut};
 use std::fmt::Debug;
 use std::hash::Hash;
+use std::marker::PhantomData;
 use std::mem;
 use std::collections::BTreeMap;
 use std::ops::Deref;
@@ -180,6 +182,20 @@ fn from_cycle_error<'a>(_: TyCtxt<'a, 'tcx, 'tcx>) -> Self {
     }
 }
 
+struct QueryMap<D: QueryDescription> {
+    phantom: PhantomData<D>,
+    map: FxHashMap<D::Key, D::Value>,
+}
+
+impl<M: QueryDescription> QueryMap<M> {
+    fn new() -> QueryMap<M> {
+        QueryMap {
+            phantom: PhantomData,
+            map: FxHashMap(),
+        }
+    }
+}
+
 pub struct CycleError<'a, 'tcx: 'a> {
     span: Span,
     cycle: RefMut<'a, [(Span, Query<'tcx>)]>,
@@ -463,13 +479,12 @@ macro_rules! define_maps {
         }
 
         impl<$tcx> Maps<$tcx> {
-            pub fn new(dep_graph: DepGraph,
-                       providers: IndexVec<CrateNum, Providers<$tcx>>)
+            pub fn new(providers: IndexVec<CrateNum, Providers<$tcx>>)
                        -> Self {
                 Maps {
                     providers,
                     query_stack: RefCell::new(vec![]),
-                    $($name: RefCell::new(DepTrackingMap::new(dep_graph.clone()))),*
+                    $($name: RefCell::new(QueryMap::new())),*
                 }
             }
         }
@@ -521,7 +536,7 @@ fn try_get_with<F, R>(tcx: TyCtxt<'a, $tcx, 'lcx>,
                        key,
                        span);
 
-                if let Some(result) = tcx.maps.$name.borrow().get(&key) {
+                if let Some(result) = tcx.maps.$name.borrow().map.get(&key) {
                     return Ok(f(result));
                 }
 
@@ -539,21 +554,19 @@ fn try_get_with<F, R>(tcx: TyCtxt<'a, $tcx, 'lcx>,
                     provider(tcx.global_tcx(), key)
                 })?;
 
-                Ok(f(tcx.maps.$name.borrow_mut().entry(key).or_insert(result)))
+                Ok(f(tcx.maps.$name.borrow_mut().map.entry(key).or_insert(result)))
             }
 
             pub fn try_get(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K)
                            -> Result<$V, CycleError<'a, $tcx>> {
+                // We register the `read` here, but not in `force`, since
+                // `force` does not give access to the value produced (and thus
+                // we actually don't read it).
+                tcx.dep_graph.read(Self::to_dep_node(&key));
                 Self::try_get_with(tcx, span, key, Clone::clone)
             }
 
             pub fn force(tcx: TyCtxt<'a, $tcx, 'lcx>, span: Span, key: $K) {
-                // FIXME(eddyb) Move away from using `DepTrackingMap`
-                // so we don't have to explicitly ignore a false edge:
-                // we can't observe a value dependency, only side-effects,
-                // through `force`, and once everything has been updated,
-                // perhaps only diagnostics, if those, will remain.
-                let _ignore = tcx.dep_graph.in_ignore();
                 match Self::try_get_with(tcx, span, key, |_| ()) {
                     Ok(()) => {}
                     Err(e) => tcx.report_cycle(e)
@@ -644,7 +657,7 @@ pub struct Maps<$tcx> {
             tcx: $tcx,
             input: $input,
             output: ($($output)*
-                     $(#[$attr])* $($pub)* $name: RefCell<DepTrackingMap<queries::$name<$tcx>>>,)
+                     $(#[$attr])* $($pub)* $name: RefCell<QueryMap<queries::$name<$tcx>>>,)
         }
     };
 
@@ -774,6 +787,9 @@ fn default() -> Self {
     /// True if this is a foreign item (i.e., linked via `extern { ... }`).
     [] is_foreign_item: IsForeignItem(DefId) -> bool,
 
+    /// True if this is a default impl (aka impl Foo for ..)
+    [] is_default_impl: ItemSignature(DefId) -> bool,
+
     /// Get a map with the variance of every item; use `item_variance`
     /// instead.
     [] crate_variances: crate_variances(CrateNum) -> Rc<ty::CrateVariancesMap>,
index 963ee9b7165e5c28acff4414b27755618f2c81ac..aeffd71a0964980e430de3b550c30f9e4e5be233 100644 (file)
@@ -444,17 +444,6 @@ pub struct CReaderCacheKey {
     pub pos: usize,
 }
 
-/// Describes the fragment-state associated with a NodeId.
-///
-/// Currently only unfragmented paths have entries in the table,
-/// but longer-term this enum is expected to expand to also
-/// include data for fragmented paths.
-#[derive(Copy, Clone, Debug)]
-pub enum FragmentInfo {
-    Moved { var: NodeId, move_expr: NodeId },
-    Assigned { var: NodeId, assign_expr: NodeId, assignee_id: NodeId },
-}
-
 // Flags that we track on types. These flags are propagated upwards
 // through the type during type construction, so that we can quickly
 // check whether the type has various kinds of types in it without
index 01fed11fc97affd7e8ad22cb0f275e7b5ec1faef..8edae027dbfbc45b9cb78e960b9471db5787ba09 100644 (file)
@@ -266,13 +266,29 @@ pub fn named_element_ty(self,
     /// if not a structure at all. Corresponds to the only possible unsized
     /// field, and its type can be used to determine unsizing strategy.
     pub fn struct_tail(self, mut ty: Ty<'tcx>) -> Ty<'tcx> {
-        while let TyAdt(def, substs) = ty.sty {
-            if !def.is_struct() {
-                break;
-            }
-            match def.struct_variant().fields.last() {
-                Some(f) => ty = f.ty(self, substs),
-                None => break,
+        loop {
+            match ty.sty {
+                ty::TyAdt(def, substs) => {
+                    if !def.is_struct() {
+                        break;
+                    }
+                    match def.struct_variant().fields.last() {
+                        Some(f) => ty = f.ty(self, substs),
+                        None => break,
+                    }
+                }
+
+                ty::TyTuple(tys, _) => {
+                    if let Some((&last_ty, _)) = tys.split_last() {
+                        ty = last_ty;
+                    } else {
+                        break;
+                    }
+                }
+
+                _ => {
+                    break;
+                }
             }
         }
         ty
@@ -584,6 +600,15 @@ pub fn empty_substs_for_def_id(self, item_def_id: DefId) -> &'tcx ty::Substs<'tc
             bug!("empty_substs_for_def_id: {:?} has type parameters", item_def_id)
         })
     }
+
+    pub fn const_usize(&self, val: u16) -> ConstInt {
+        match self.sess.target.uint_type {
+            ast::UintTy::U16 => ConstInt::Usize(ConstUsize::Us16(val as u16)),
+            ast::UintTy::U32 => ConstInt::Usize(ConstUsize::Us32(val as u32)),
+            ast::UintTy::U64 => ConstInt::Usize(ConstUsize::Us64(val as u64)),
+            _ => bug!(),
+        }
+    }
 }
 
 pub struct TypeIdHasher<'a, 'gcx: 'a+'tcx, 'tcx: 'a, W> {
diff --git a/src/librustc_borrowck/borrowck/fragments.rs b/src/librustc_borrowck/borrowck/fragments.rs
deleted file mode 100644 (file)
index b728d4d..0000000
+++ /dev/null
@@ -1,542 +0,0 @@
-// Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-//! Helper routines used for fragmenting structural paths due to moves for
-//! tracking drop obligations. Please see the extensive comments in the
-//! section "Structural fragments" in `README.md`.
-
-use self::Fragment::*;
-
-use borrowck::InteriorKind::{InteriorField, InteriorElement};
-use borrowck::{self, LoanPath};
-use borrowck::LoanPathKind::{LpVar, LpUpvar, LpDowncast, LpExtend};
-use borrowck::LoanPathElem::{LpDeref, LpInterior};
-use borrowck::move_data::InvalidMovePathIndex;
-use borrowck::move_data::{MoveData, MovePathIndex};
-use rustc::hir::def_id::{DefId};
-use rustc::ty::{self, AdtKind, TyCtxt};
-use rustc::middle::mem_categorization as mc;
-
-use std::mem;
-use std::rc::Rc;
-use syntax::ast;
-use syntax_pos::DUMMY_SP;
-
-#[derive(PartialEq, Eq, PartialOrd, Ord)]
-enum Fragment {
-    // This represents the path described by the move path index
-    Just(MovePathIndex),
-
-    // This represents the collection of all but one of the elements
-    // from an array at the path described by the move path index.
-    // Note that attached MovePathIndex should have mem_categorization
-    // of InteriorElement (i.e. array dereference `&foo[..]`).
-    AllButOneFrom(MovePathIndex),
-}
-
-impl Fragment {
-    fn loan_path_repr(&self, move_data: &MoveData) -> String {
-        let lp = |mpi| move_data.path_loan_path(mpi);
-        match *self {
-            Just(mpi) => format!("{:?}", lp(mpi)),
-            AllButOneFrom(mpi) => format!("$(allbutone {:?})", lp(mpi)),
-        }
-    }
-
-    fn loan_path_user_string(&self, move_data: &MoveData) -> String {
-        let lp = |mpi| move_data.path_loan_path(mpi);
-        match *self {
-            Just(mpi) => lp(mpi).to_string(),
-            AllButOneFrom(mpi) => format!("$(allbutone {})", lp(mpi)),
-        }
-    }
-}
-
-pub fn build_unfragmented_map(this: &mut borrowck::BorrowckCtxt,
-                              move_data: &MoveData,
-                              id: ast::NodeId) {
-    let fr = &move_data.fragments.borrow();
-
-    // For now, don't care about other kinds of fragments; the precise
-    // classfication of all paths for non-zeroing *drop* needs them,
-    // but the loose approximation used by non-zeroing moves does not.
-    let moved_leaf_paths = fr.moved_leaf_paths();
-    let assigned_leaf_paths = fr.assigned_leaf_paths();
-
-    let mut fragment_infos = Vec::with_capacity(moved_leaf_paths.len());
-
-    let find_var_id = |move_path_index: MovePathIndex| -> Option<ast::NodeId> {
-        let lp = move_data.path_loan_path(move_path_index);
-        match lp.kind {
-            LpVar(var_id) => Some(var_id),
-            LpUpvar(ty::UpvarId { var_id, closure_expr_id }) => {
-                // The `var_id` is unique *relative to* the current function.
-                // (Check that we are indeed talking about the same function.)
-                assert_eq!(id, closure_expr_id);
-                Some(var_id)
-            }
-            LpDowncast(..) | LpExtend(..) => {
-                // This simple implementation of non-zeroing move does
-                // not attempt to deal with tracking substructure
-                // accurately in the general case.
-                None
-            }
-        }
-    };
-
-    let moves = move_data.moves.borrow();
-    for &move_path_index in moved_leaf_paths {
-        let var_id = match find_var_id(move_path_index) {
-            None => continue,
-            Some(var_id) => var_id,
-        };
-
-        move_data.each_applicable_move(move_path_index, |move_index| {
-            let info = ty::FragmentInfo::Moved {
-                var: var_id,
-                move_expr: moves[move_index.get()].id,
-            };
-            debug!("fragment_infos push({:?} \
-                    due to move_path_index: {} move_index: {}",
-                   info, move_path_index.get(), move_index.get());
-            fragment_infos.push(info);
-            true
-        });
-    }
-
-    for &move_path_index in assigned_leaf_paths {
-        let var_id = match find_var_id(move_path_index) {
-            None => continue,
-            Some(var_id) => var_id,
-        };
-
-        let var_assigns = move_data.var_assignments.borrow();
-        for var_assign in var_assigns.iter()
-            .filter(|&assign| assign.path == move_path_index)
-        {
-            let info = ty::FragmentInfo::Assigned {
-                var: var_id,
-                assign_expr: var_assign.id,
-                assignee_id: var_assign.assignee_id,
-            };
-            debug!("fragment_infos push({:?} due to var_assignment", info);
-            fragment_infos.push(info);
-        }
-    }
-
-    let mut fraginfo_map = this.tcx.fragment_infos.borrow_mut();
-    let fn_did = this.tcx.hir.local_def_id(id);
-    let prev = fraginfo_map.insert(fn_did, fragment_infos);
-    assert!(prev.is_none());
-}
-
-pub struct FragmentSets {
-    /// During move_data construction, `moved_leaf_paths` tracks paths
-    /// that have been used directly by being moved out of.  When
-    /// move_data construction has been completed, `moved_leaf_paths`
-    /// tracks such paths that are *leaf fragments* (e.g. `a.j` if we
-    /// never move out any child like `a.j.x`); any parent paths
-    /// (e.g. `a` for the `a.j` example) are moved over to
-    /// `parents_of_fragments`.
-    moved_leaf_paths: Vec<MovePathIndex>,
-
-    /// `assigned_leaf_paths` tracks paths that have been used
-    /// directly by being overwritten, but is otherwise much like
-    /// `moved_leaf_paths`.
-    assigned_leaf_paths: Vec<MovePathIndex>,
-
-    /// `parents_of_fragments` tracks paths that are definitely
-    /// parents of paths that have been moved.
-    ///
-    /// FIXME(pnkfelix) probably do not want/need
-    /// `parents_of_fragments` at all, if we can avoid it.
-    ///
-    /// Update: I do not see a way to avoid it.  Maybe just remove
-    /// above fixme, or at least document why doing this may be hard.
-    parents_of_fragments: Vec<MovePathIndex>,
-
-    /// During move_data construction (specifically the
-    /// fixup_fragment_sets call), `unmoved_fragments` tracks paths
-    /// that have been "left behind" after a sibling has been moved or
-    /// assigned.  When move_data construction has been completed,
-    /// `unmoved_fragments` tracks paths that were *only* results of
-    /// being left-behind, and never directly moved themselves.
-    unmoved_fragments: Vec<Fragment>,
-}
-
-impl FragmentSets {
-    pub fn new() -> FragmentSets {
-        FragmentSets {
-            unmoved_fragments: Vec::new(),
-            moved_leaf_paths: Vec::new(),
-            assigned_leaf_paths: Vec::new(),
-            parents_of_fragments: Vec::new(),
-        }
-    }
-
-    pub fn moved_leaf_paths(&self) -> &[MovePathIndex] {
-        &self.moved_leaf_paths
-    }
-
-    pub fn assigned_leaf_paths(&self) -> &[MovePathIndex] {
-        &self.assigned_leaf_paths
-    }
-
-    pub fn add_move(&mut self, path_index: MovePathIndex) {
-        self.moved_leaf_paths.push(path_index);
-    }
-
-    pub fn add_assignment(&mut self, path_index: MovePathIndex) {
-        self.assigned_leaf_paths.push(path_index);
-    }
-}
-
-pub fn instrument_move_fragments<'a, 'tcx>(this: &MoveData<'tcx>,
-                                           tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                           id: ast::NodeId) {
-    let span_err = tcx.hir.attrs(id).iter()
-                          .any(|a| a.check_name("rustc_move_fragments"));
-    let print = tcx.sess.opts.debugging_opts.print_move_fragments;
-
-    if !span_err && !print { return; }
-
-    let sp = tcx.hir.span(id);
-
-    let instrument_all_paths = |kind, vec_rc: &Vec<MovePathIndex>| {
-        for (i, mpi) in vec_rc.iter().enumerate() {
-            let lp = || this.path_loan_path(*mpi);
-            if span_err {
-                tcx.sess.span_err(sp, &format!("{}: `{}`", kind, lp()));
-            }
-            if print {
-                println!("id:{} {}[{}] `{}`", id, kind, i, lp());
-            }
-        }
-    };
-
-    let instrument_all_fragments = |kind, vec_rc: &Vec<Fragment>| {
-        for (i, f) in vec_rc.iter().enumerate() {
-            let render = || f.loan_path_user_string(this);
-            if span_err {
-                tcx.sess.span_err(sp, &format!("{}: `{}`", kind, render()));
-            }
-            if print {
-                println!("id:{} {}[{}] `{}`", id, kind, i, render());
-            }
-        }
-    };
-
-    let fragments = this.fragments.borrow();
-    instrument_all_paths("moved_leaf_path", &fragments.moved_leaf_paths);
-    instrument_all_fragments("unmoved_fragment", &fragments.unmoved_fragments);
-    instrument_all_paths("parent_of_fragments", &fragments.parents_of_fragments);
-    instrument_all_paths("assigned_leaf_path", &fragments.assigned_leaf_paths);
-}
-
-/// Normalizes the fragment sets in `this`; i.e., removes duplicate entries, constructs the set of
-/// parents, and constructs the left-over fragments.
-///
-/// Note: "left-over fragments" means paths that were not directly referenced in moves nor
-/// assignments, but must nonetheless be tracked as potential drop obligations.
-pub fn fixup_fragment_sets<'a, 'tcx>(this: &MoveData<'tcx>, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
-
-    let mut fragments = this.fragments.borrow_mut();
-
-    // Swap out contents of fragments so that we can modify the fields
-    // without borrowing the common fragments.
-    let mut unmoved = mem::replace(&mut fragments.unmoved_fragments, vec![]);
-    let mut parents = mem::replace(&mut fragments.parents_of_fragments, vec![]);
-    let mut moved = mem::replace(&mut fragments.moved_leaf_paths, vec![]);
-    let mut assigned = mem::replace(&mut fragments.assigned_leaf_paths, vec![]);
-
-    let path_lps = |mpis: &[MovePathIndex]| -> Vec<String> {
-        mpis.iter().map(|mpi| format!("{:?}", this.path_loan_path(*mpi))).collect()
-    };
-
-    let frag_lps = |fs: &[Fragment]| -> Vec<String> {
-        fs.iter().map(|f| f.loan_path_repr(this)).collect()
-    };
-
-    // First, filter out duplicates
-    moved.sort();
-    moved.dedup();
-    debug!("fragments 1 moved: {:?}", path_lps(&moved));
-
-    assigned.sort();
-    assigned.dedup();
-    debug!("fragments 1 assigned: {:?}", path_lps(&assigned));
-
-    // Second, build parents from the moved and assigned.
-    for m in &moved {
-        let mut p = this.path_parent(*m);
-        while p != InvalidMovePathIndex {
-            parents.push(p);
-            p = this.path_parent(p);
-        }
-    }
-    for a in &assigned {
-        let mut p = this.path_parent(*a);
-        while p != InvalidMovePathIndex {
-            parents.push(p);
-            p = this.path_parent(p);
-        }
-    }
-
-    parents.sort();
-    parents.dedup();
-    debug!("fragments 2 parents: {:?}", path_lps(&parents));
-
-    // Third, filter the moved and assigned fragments down to just the non-parents
-    moved.retain(|f| non_member(*f, &parents));
-    debug!("fragments 3 moved: {:?}", path_lps(&moved));
-
-    assigned.retain(|f| non_member(*f, &parents));
-    debug!("fragments 3 assigned: {:?}", path_lps(&assigned));
-
-    // Fourth, build the leftover from the moved, assigned, and parents.
-    for m in &moved {
-        let lp = this.path_loan_path(*m);
-        add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
-    }
-    for a in &assigned {
-        let lp = this.path_loan_path(*a);
-        add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
-    }
-    for p in &parents {
-        let lp = this.path_loan_path(*p);
-        add_fragment_siblings(this, tcx, &mut unmoved, lp, None);
-    }
-
-    unmoved.sort();
-    unmoved.dedup();
-    debug!("fragments 4 unmoved: {:?}", frag_lps(&unmoved));
-
-    // Fifth, filter the leftover fragments down to its core.
-    unmoved.retain(|f| match *f {
-        AllButOneFrom(_) => true,
-        Just(mpi) => non_member(mpi, &parents) &&
-            non_member(mpi, &moved) &&
-            non_member(mpi, &assigned)
-    });
-    debug!("fragments 5 unmoved: {:?}", frag_lps(&unmoved));
-
-    // Swap contents back in.
-    fragments.unmoved_fragments = unmoved;
-    fragments.parents_of_fragments = parents;
-    fragments.moved_leaf_paths = moved;
-    fragments.assigned_leaf_paths = assigned;
-
-    return;
-
-    fn non_member(elem: MovePathIndex, set: &[MovePathIndex]) -> bool {
-        match set.binary_search(&elem) {
-            Ok(_) => false,
-            Err(_) => true,
-        }
-    }
-}
-
-/// Adds all of the precisely-tracked siblings of `lp` as potential move paths of interest. For
-/// example, if `lp` represents `s.x.j`, then adds moves paths for `s.x.i` and `s.x.k`, the
-/// siblings of `s.x.j`.
-fn add_fragment_siblings<'a, 'tcx>(this: &MoveData<'tcx>,
-                                   tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                   gathered_fragments: &mut Vec<Fragment>,
-                                   lp: Rc<LoanPath<'tcx>>,
-                                   origin_id: Option<ast::NodeId>) {
-    match lp.kind {
-        LpVar(_) | LpUpvar(..) => {} // Local variables have no siblings.
-
-        // Consuming a downcast is like consuming the original value, so propage inward.
-        LpDowncast(ref loan_parent, _) => {
-            add_fragment_siblings(this, tcx, gathered_fragments, loan_parent.clone(), origin_id);
-        }
-
-        // *LV for Unique consumes the contents of the box (at
-        // least when it is non-copy...), so propagate inward.
-        LpExtend(ref loan_parent, _, LpDeref(mc::Unique)) => {
-            add_fragment_siblings(this, tcx, gathered_fragments, loan_parent.clone(), origin_id);
-        }
-
-        // *LV for unsafe and borrowed pointers do not consume their loan path, so stop here.
-        LpExtend(.., LpDeref(mc::UnsafePtr(..)))   |
-        LpExtend(.., LpDeref(mc::Implicit(..)))    |
-        LpExtend(.., LpDeref(mc::BorrowedPtr(..))) => {}
-
-        // FIXME (pnkfelix): LV[j] should be tracked, at least in the
-        // sense of we will track the remaining drop obligation of the
-        // rest of the array.
-        //
-        // Well, either that or LV[j] should be made illegal.
-        // But even then, we will need to deal with destructuring
-        // bind.
-        //
-        // Anyway, for now: LV[j] is not tracked precisely
-        LpExtend(.., LpInterior(_, InteriorElement(..))) => {
-            let mp = this.move_path(tcx, lp.clone());
-            gathered_fragments.push(AllButOneFrom(mp));
-        }
-
-        // field access LV.x and tuple access LV#k are the cases
-        // we are interested in
-        LpExtend(ref loan_parent, mc,
-                 LpInterior(_, InteriorField(ref field_name))) => {
-            let enum_variant_info = match loan_parent.kind {
-                LpDowncast(ref loan_parent_2, variant_def_id) =>
-                    Some((variant_def_id, loan_parent_2.clone())),
-                LpExtend(..) | LpVar(..) | LpUpvar(..) =>
-                    None,
-            };
-            add_fragment_siblings_for_extension(
-                this,
-                tcx,
-                gathered_fragments,
-                loan_parent, mc, field_name, &lp, origin_id, enum_variant_info);
-        }
-    }
-}
-
-/// We have determined that `origin_lp` destructures to LpExtend(parent, original_field_name).
-/// Based on this, add move paths for all of the siblings of `origin_lp`.
-fn add_fragment_siblings_for_extension<'a, 'tcx>(this: &MoveData<'tcx>,
-                                                 tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                 gathered_fragments: &mut Vec<Fragment>,
-                                                 parent_lp: &Rc<LoanPath<'tcx>>,
-                                                 mc: mc::MutabilityCategory,
-                                                 origin_field_name: &mc::FieldName,
-                                                 origin_lp: &Rc<LoanPath<'tcx>>,
-                                                 origin_id: Option<ast::NodeId>,
-                                                 enum_variant_info: Option<(DefId,
-                                                    Rc<LoanPath<'tcx>>)>) {
-    let parent_ty = parent_lp.to_type();
-
-    let mut add_fragment_sibling_local = |field_name, variant_did| {
-        add_fragment_sibling_core(
-            this, tcx, gathered_fragments, parent_lp.clone(), mc, field_name, origin_lp,
-            variant_did);
-    };
-
-    match parent_ty.sty {
-        ty::TyTuple(ref v, _) => {
-            let tuple_idx = match *origin_field_name {
-                mc::PositionalField(tuple_idx) => tuple_idx,
-                mc::NamedField(_) =>
-                    bug!("tuple type {:?} should not have named fields.",
-                         parent_ty),
-            };
-            let tuple_len = v.len();
-            for i in 0..tuple_len {
-                if i == tuple_idx { continue }
-                let field_name = mc::PositionalField(i);
-                add_fragment_sibling_local(field_name, None);
-            }
-        }
-
-        ty::TyAdt(def, ..) => match def.adt_kind() {
-            AdtKind::Struct => {
-                match *origin_field_name {
-                    mc::NamedField(ast_name) => {
-                        for f in &def.struct_variant().fields {
-                            if f.name == ast_name {
-                                continue;
-                            }
-                            let field_name = mc::NamedField(f.name);
-                            add_fragment_sibling_local(field_name, None);
-                        }
-                    }
-                    mc::PositionalField(tuple_idx) => {
-                        for (i, _f) in def.struct_variant().fields.iter().enumerate() {
-                            if i == tuple_idx {
-                                continue
-                            }
-                            let field_name = mc::PositionalField(i);
-                            add_fragment_sibling_local(field_name, None);
-                        }
-                    }
-                }
-            }
-            AdtKind::Union => {
-                // Do nothing, all union fields are moved/assigned together.
-            }
-            AdtKind::Enum => {
-                let variant = match enum_variant_info {
-                    Some((vid, ref _lp2)) => def.variant_with_id(vid),
-                    None => {
-                        assert!(def.is_univariant());
-                        &def.variants[0]
-                    }
-                };
-                match *origin_field_name {
-                    mc::NamedField(ast_name) => {
-                        for field in &variant.fields {
-                            if field.name == ast_name {
-                                continue;
-                            }
-                            let field_name = mc::NamedField(field.name);
-                            add_fragment_sibling_local(field_name, Some(variant.did));
-                        }
-                    }
-                    mc::PositionalField(tuple_idx) => {
-                        for (i, _f) in variant.fields.iter().enumerate() {
-                            if tuple_idx == i {
-                                continue;
-                            }
-                            let field_name = mc::PositionalField(i);
-                            add_fragment_sibling_local(field_name, None);
-                        }
-                    }
-                }
-            }
-        },
-
-        ref ty => {
-            let span = origin_id.map_or(DUMMY_SP, |id| tcx.hir.span(id));
-            span_bug!(span,
-                      "type {:?} ({:?}) is not fragmentable",
-                      parent_ty, ty);
-        }
-    }
-}
-
-/// Adds the single sibling `LpExtend(parent, new_field_name)` of `origin_lp` (the original
-/// loan-path).
-fn add_fragment_sibling_core<'a, 'tcx>(this: &MoveData<'tcx>,
-                                       tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                       gathered_fragments: &mut Vec<Fragment>,
-                                       parent: Rc<LoanPath<'tcx>>,
-                                       mc: mc::MutabilityCategory,
-                                       new_field_name: mc::FieldName,
-                                       origin_lp: &Rc<LoanPath<'tcx>>,
-                                       enum_variant_did: Option<DefId>)
-                                       -> MovePathIndex {
-    let opt_variant_did = match parent.kind {
-        LpDowncast(_, variant_did) => Some(variant_did),
-        LpVar(..) | LpUpvar(..) | LpExtend(..) => enum_variant_did,
-    };
-
-    let loan_path_elem = LpInterior(opt_variant_did, InteriorField(new_field_name));
-    let new_lp_type = match new_field_name {
-        mc::NamedField(ast_name) =>
-            tcx.named_element_ty(parent.to_type(), ast_name, opt_variant_did),
-        mc::PositionalField(idx) =>
-            tcx.positional_element_ty(parent.to_type(), idx, opt_variant_did),
-    };
-    let new_lp_variant = LpExtend(parent, mc, loan_path_elem);
-    let new_lp = LoanPath::new(new_lp_variant, new_lp_type.unwrap());
-    debug!("add_fragment_sibling_core(new_lp={:?}, origin_lp={:?})",
-           new_lp, origin_lp);
-    let mp = this.move_path(tcx, Rc::new(new_lp));
-
-    // Do not worry about checking for duplicates here; we will sort
-    // and dedup after all are added.
-    gathered_fragments.push(Just(mp));
-
-    mp
-}
index e0d86ff23f8621a6aae97415ee2919e2990055f8..b03d34819f6374ce58985c1a50d9a22df141b6f7 100644 (file)
@@ -22,7 +22,7 @@
 use rustc_data_structures::indexed_set::IdxSetBuf;
 use rustc_data_structures::indexed_vec::Idx;
 use rustc_mir::util::patch::MirPatch;
-use rustc_mir::util::elaborate_drops::{DropFlagState, elaborate_drop};
+use rustc_mir::util::elaborate_drops::{DropFlagState, Unwind, elaborate_drop};
 use rustc_mir::util::elaborate_drops::{DropElaborator, DropStyle, DropFlagMode};
 use syntax::ast;
 use syntax_pos::Span;
@@ -399,14 +399,13 @@ fn elaborate_drops(&mut self)
                                     ctxt: self
                                 },
                                 terminator.source_info,
-                                data.is_cleanup,
                                 location,
                                 path,
                                 target,
                                 if data.is_cleanup {
-                                    None
+                                    Unwind::InCleanup
                                 } else {
-                                    Some(Option::unwrap_or(unwind, resume_block))
+                                    Unwind::To(Option::unwrap_or(unwind, resume_block))
                                 },
                                 bb)
                         }
@@ -455,6 +454,7 @@ fn elaborate_replace(
         let bb = loc.block;
         let data = &self.mir[bb];
         let terminator = data.terminator();
+        assert!(!data.is_cleanup, "DropAndReplace in unwind path not supported");
 
         let assign = Statement {
             kind: StatementKind::Assign(location.clone(), Rvalue::Use(value.clone())),
@@ -477,7 +477,7 @@ fn elaborate_replace(
                 kind: TerminatorKind::Goto { target: target },
                 ..*terminator
             }),
-            is_cleanup: data.is_cleanup,
+            is_cleanup: false,
         });
 
         match self.move_data().rev_lookup.find(location) {
@@ -491,11 +491,10 @@ fn elaborate_replace(
                         ctxt: self
                     },
                     terminator.source_info,
-                    data.is_cleanup,
                     location,
                     path,
                     target,
-                    Some(unwind),
+                    Unwind::To(unwind),
                     bb);
                 on_all_children_bits(self.tcx, self.mir, self.move_data(), path, |child| {
                     self.set_drop_flag(Location { block: target, statement_index: 0 },
index 931cdf4f6861208d048fad342af8256c502ae442..b03d2a775df71de964255b3fadbe0226b189e620 100644 (file)
@@ -438,7 +438,8 @@ fn gather_rvalue(&mut self, loc: Location, rvalue: &Rvalue<'tcx>) {
             Rvalue::Ref(..) |
             Rvalue::Discriminant(..) |
             Rvalue::Len(..) |
-            Rvalue::Box(..) => {
+            Rvalue::NullaryOp(NullOp::SizeOf, _) |
+            Rvalue::NullaryOp(NullOp::Box, _) => {
                 // This returns an rvalue with uninitialized contents. We can't
                 // move out of it here because it is an rvalue - assignments always
                 // completely initialize their lvalue.
index 2eb064305e87c0626999e4951acf956089148332..2b39d2a256e1fa6cbd96fd611dd9c53bcbd9c1f7 100644 (file)
@@ -59,7 +59,7 @@ pub fn borrowck_mir(bcx: &mut BorrowckCtxt,
                     attributes: &[ast::Attribute]) {
     let tcx = bcx.tcx;
     let def_id = tcx.hir.local_def_id(id);
-    debug!("borrowck_mir({}) UNIMPLEMENTED", tcx.item_path_str(def_id));
+    debug!("borrowck_mir({:?}) UNIMPLEMENTED", def_id);
 
     // It is safe for us to borrow `mir_validated()`: `optimized_mir`
     // steals it, but it forces the `borrowck` query.
index 6c076265db639b2c8abd2ff7959753244b2deee5..3248731d1cafc2ef0c7d902ee185547d286f2017 100644 (file)
@@ -129,13 +129,6 @@ fn borrowck<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>, owner_def_id: DefId) {
                        move_data: flowed_moves } =
         build_borrowck_dataflow_data(bccx, &cfg, body_id);
 
-    move_data::fragments::instrument_move_fragments(&flowed_moves.move_data,
-                                                    bccx.tcx,
-                                                    owner_id);
-    move_data::fragments::build_unfragmented_map(bccx,
-                                                 &flowed_moves.move_data,
-                                                 owner_id);
-
     check_loans::check_loans(bccx, &loan_dfcx, &flowed_moves, &all_loans, body);
 }
 
index 3e23086ec7bddc5f03591064f084347fd6df151f..1b364596a23f7fd518de0871ebb808afa4778592 100644 (file)
@@ -33,9 +33,6 @@
 use rustc::hir;
 use rustc::hir::intravisit::IdRange;
 
-#[path="fragments.rs"]
-pub mod fragments;
-
 pub struct MoveData<'tcx> {
     /// Move paths. See section "Move paths" in `README.md`.
     pub paths: RefCell<Vec<MovePath<'tcx>>>,
@@ -62,9 +59,6 @@ pub struct MoveData<'tcx> {
 
     /// Assignments to a variable or path, like `x = foo`, but not `x += foo`.
     pub assignee_ids: RefCell<NodeSet>,
-
-    /// Path-fragments from moves in to or out of parts of structured data.
-    pub fragments: RefCell<fragments::FragmentSets>,
 }
 
 pub struct FlowedMoveData<'a, 'tcx: 'a> {
@@ -223,7 +217,6 @@ pub fn new() -> MoveData<'tcx> {
             var_assignments: RefCell::new(Vec::new()),
             variant_matches: RefCell::new(Vec::new()),
             assignee_ids: RefCell::new(NodeSet()),
-            fragments: RefCell::new(fragments::FragmentSets::new()),
         }
     }
 
@@ -401,8 +394,6 @@ fn add_move_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let path_index = self.move_path(tcx, lp.clone());
         let move_index = MoveIndex(self.moves.borrow().len());
 
-        self.fragments.borrow_mut().add_move(path_index);
-
         let next_move = self.path_first_move(path_index);
         self.set_path_first_move(path_index, move_index);
 
@@ -458,8 +449,6 @@ fn add_assignment_helper(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
 
         let path_index = self.move_path(tcx, lp.clone());
 
-        self.fragments.borrow_mut().add_assignment(path_index);
-
         match mode {
             MutateMode::Init | MutateMode::JustWrite => {
                 self.assignee_ids.borrow_mut().insert(assignee_id);
@@ -502,8 +491,6 @@ pub fn add_variant_match(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
         let path_index = self.move_path(tcx, lp.clone());
         let base_path_index = self.move_path(tcx, base_lp.clone());
 
-        self.fragments.borrow_mut().add_assignment(path_index);
-
         let variant_match = VariantMatch {
             path: path_index,
             base_path: base_path_index,
@@ -514,10 +501,6 @@ pub fn add_variant_match(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>,
         self.variant_matches.borrow_mut().push(variant_match);
     }
 
-    fn fixup_fragment_sets(&self, tcx: TyCtxt<'a, 'tcx, 'tcx>) {
-        fragments::fixup_fragment_sets(self, tcx)
-    }
-
     /// Adds the gen/kills for the various moves and
     /// assignments into the provided data flow contexts.
     /// Moves are generated by moves and killed by assignments and
@@ -677,8 +660,6 @@ pub fn new(move_data: MoveData<'tcx>,
                                  id_range,
                                  move_data.var_assignments.borrow().len());
 
-        move_data.fixup_fragment_sets(tcx);
-
         move_data.add_gen_kills(bccx,
                                 &mut dfcx_moves,
                                 &mut dfcx_assign);
index 0642ddc71622b1abe8cfae41c72a6266b51b3d12..29ac650aa7053110e374dfa6859919913b9cf067 100644 (file)
@@ -212,6 +212,13 @@ fn index_mut(&mut self, index: I) -> &mut T {
     }
 }
 
+impl<I: Idx, T> Default for IndexVec<I, T> {
+    #[inline]
+    fn default() -> Self {
+        Self::new()
+    }
+}
+
 impl<I: Idx, T> Extend<T> for IndexVec<I, T> {
     #[inline]
     fn extend<J: IntoIterator<Item = T>>(&mut self, iter: J) {
index c9ed9ad3c7d2d4d2027ab5b31fee1bac4259980d..f5727aa0a5ecc4be364e23c0fbcbfe27571c45dc 100644 (file)
 
 use std::cell::RefCell;
 use std::hash::Hash;
-use std::sync::Arc;
 use rustc::dep_graph::DepNode;
 use rustc::hir;
-use rustc::hir::def_id::{LOCAL_CRATE, CRATE_DEF_INDEX, DefId};
+use rustc::hir::def_id::{CRATE_DEF_INDEX, DefId};
 use rustc::hir::itemlikevisit::ItemLikeVisitor;
 use rustc::ich::{Fingerprint, StableHashingContext};
 use rustc::ty::TyCtxt;
@@ -155,11 +154,6 @@ fn compute_crate_hash(&mut self) {
                                         // We want to incoporate these into the
                                         // SVH.
                                     }
-                                    DepNode::FileMap(..) => {
-                                        // These don't make a semantic
-                                        // difference, filter them out.
-                                        return None
-                                    }
                                     DepNode::AllLocalTraitImpls => {
                                         // These are already covered by hashing
                                         // the HIR.
@@ -306,24 +300,6 @@ pub fn compute_incremental_hashes_map<'a, 'tcx: 'a>(tcx: TyCtxt<'a, 'tcx, 'tcx>)
             visitor.compute_and_store_ich_for_item_like(DepNode::HirBody(def_id), true, macro_def);
         }
 
-        for filemap in tcx.sess
-                          .codemap()
-                          .files_untracked()
-                          .iter()
-                          .filter(|fm| !fm.is_imported()) {
-            assert_eq!(LOCAL_CRATE.as_u32(), filemap.crate_of_origin);
-            let def_id = DefId {
-                krate: LOCAL_CRATE,
-                index: CRATE_DEF_INDEX,
-            };
-            let name = Arc::new(filemap.name.clone());
-            let dep_node = DepNode::FileMap(def_id, name);
-            let mut hasher = IchHasher::new();
-            filemap.hash_stable(&mut visitor.hcx, &mut hasher);
-            let fingerprint = hasher.finish();
-            visitor.hashes.insert(dep_node, fingerprint);
-        }
-
         visitor.compute_and_store_ich_for_trait_impls(krate);
     });
 
index 2f727a80f016e0589ccb8069554338645c3db31d..6ab280be470dd329794c0c6b536d06af67f6b211 100644 (file)
@@ -51,8 +51,7 @@ pub fn is_hashable(dep_node: &DepNode<DefId>) -> bool {
         match *dep_node {
             DepNode::Krate |
             DepNode::Hir(_) |
-            DepNode::HirBody(_) |
-            DepNode::FileMap(..) =>
+            DepNode::HirBody(_) =>
                 true,
             DepNode::MetaData(def_id) |
             DepNode::GlobalMetaData(def_id, _) => !def_id.is_local(),
@@ -77,20 +76,6 @@ pub fn hash(&mut self, dep_node: &DepNode<DefId>) -> Option<Fingerprint> {
                 Some(self.incremental_hashes_map[dep_node])
             }
 
-            DepNode::FileMap(def_id, ref name) => {
-                if def_id.is_local() {
-                    // We will have been able to retrace the DefId (which is
-                    // always the local CRATE_DEF_INDEX), but the file with the
-                    // given name might have been removed, so we use get() in
-                    // order to allow for that case.
-                    self.incremental_hashes_map.get(dep_node).map(|x| *x)
-                } else {
-                    Some(self.metadata_hash(DepNode::FileMap(def_id, name.clone()),
-                                            def_id.krate,
-                                            |this| &mut this.global_metadata_hashes))
-                }
-            }
-
             // MetaData from other crates is an *input* to us.
             // MetaData nodes from *our* crates are an *output*; we
             // don't hash them, but we do compute a hash for them and
@@ -242,7 +227,6 @@ fn load_from_data(&mut self,
             let def_id = DefId { krate: cnum, index: CRATE_DEF_INDEX };
             let dep_node = match dep_node {
                 DepNode::GlobalMetaData(_, kind) => DepNode::GlobalMetaData(def_id, kind),
-                DepNode::FileMap(_, name) => DepNode::FileMap(def_id, name),
                 other => {
                     bug!("unexpected DepNode variant: {:?}", other)
                 }
index 39b8e568ab48d959f5a1e1a5a573c7b0a3b59a36..58c53c5b4b98c7168c0eb976a901629a52cd339d 100644 (file)
@@ -259,12 +259,6 @@ fn check_trait_item(&mut self, cx: &LateContext, item: &hir::TraitItem) {
 }
 
 pub struct MissingDoc {
-    /// Stack of IDs of struct definitions.
-    struct_def_stack: Vec<ast::NodeId>,
-
-    /// True if inside variant definition
-    in_variant: bool,
-
     /// Stack of whether #[doc(hidden)] is set
     /// at each level which has lint attributes.
     doc_hidden_stack: Vec<bool>,
@@ -276,8 +270,6 @@ pub struct MissingDoc {
 impl MissingDoc {
     pub fn new() -> MissingDoc {
         MissingDoc {
-            struct_def_stack: vec![],
-            in_variant: false,
             doc_hidden_stack: vec![false],
             private_traits: HashSet::new(),
         }
@@ -345,25 +337,6 @@ fn exit_lint_attrs(&mut self, _: &LateContext, _attrs: &[ast::Attribute]) {
         self.doc_hidden_stack.pop().expect("empty doc_hidden_stack");
     }
 
-    fn check_struct_def(&mut self,
-                        _: &LateContext,
-                        _: &hir::VariantData,
-                        _: ast::Name,
-                        _: &hir::Generics,
-                        item_id: ast::NodeId) {
-        self.struct_def_stack.push(item_id);
-    }
-
-    fn check_struct_def_post(&mut self,
-                             _: &LateContext,
-                             _: &hir::VariantData,
-                             _: ast::Name,
-                             _: &hir::Generics,
-                             item_id: ast::NodeId) {
-        let popped = self.struct_def_stack.pop().expect("empty struct_def_stack");
-        assert!(popped == item_id);
-    }
-
     fn check_crate(&mut self, cx: &LateContext, krate: &hir::Crate) {
         self.check_missing_docs_attrs(cx, None, &krate.attrs, krate.span, "crate");
     }
@@ -451,16 +424,11 @@ fn check_impl_item(&mut self, cx: &LateContext, impl_item: &hir::ImplItem) {
 
     fn check_struct_field(&mut self, cx: &LateContext, sf: &hir::StructField) {
         if !sf.is_positional() {
-            if sf.vis == hir::Public || self.in_variant {
-                let cur_struct_def = *self.struct_def_stack
-                    .last()
-                    .expect("empty struct_def_stack");
-                self.check_missing_docs_attrs(cx,
-                                              Some(cur_struct_def),
-                                              &sf.attrs,
-                                              sf.span,
-                                              "a struct field")
-            }
+            self.check_missing_docs_attrs(cx,
+                                          Some(sf.id),
+                                          &sf.attrs,
+                                          sf.span,
+                                          "a struct field")
         }
     }
 
@@ -470,13 +438,6 @@ fn check_variant(&mut self, cx: &LateContext, v: &hir::Variant, _: &hir::Generic
                                       &v.node.attrs,
                                       v.span,
                                       "a variant");
-        assert!(!self.in_variant);
-        self.in_variant = true;
-    }
-
-    fn check_variant_post(&mut self, _: &LateContext, _: &hir::Variant, _: &hir::Generics) {
-        assert!(self.in_variant);
-        self.in_variant = false;
     }
 }
 
index 479c7206cb4cb95b150c5b0adcb0b40daf86c0de..d8f29768ccd67cc6883598377d7e598233c7e64b 100644 (file)
@@ -252,6 +252,10 @@ macro_rules! add_lint_group {
             id: LintId::of(MISSING_FRAGMENT_SPECIFIER),
             reference: "issue #40107 <https://github.com/rust-lang/rust/issues/40107>",
         },
+        FutureIncompatibleInfo {
+            id: LintId::of(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES),
+            reference: "issue #42238 <https://github.com/rust-lang/rust/issues/42238>",
+        },
         FutureIncompatibleInfo {
             id: LintId::of(ANONYMOUS_PARAMETERS),
             reference: "issue #41686 <https://github.com/rust-lang/rust/issues/41686>",
index 9d098557367cd48311369dfc59557c7f5a1ee188..b3503713c904ea3acff385cb4337ac0aba044357 100644 (file)
@@ -106,6 +106,7 @@ pub fn provide<$lt>(providers: &mut Providers<$lt>) {
     closure_type => { cdata.closure_ty(def_id.index, tcx) }
     inherent_impls => { Rc::new(cdata.get_inherent_implementations_for_type(def_id.index)) }
     is_foreign_item => { cdata.is_foreign_item(def_id.index) }
+    is_default_impl => { cdata.is_default_impl(def_id.index) }
     describe_def => { cdata.get_def(def_id.index) }
     def_span => { cdata.get_span(def_id.index, &tcx.sess) }
     stability => { cdata.get_stability(def_id.index) }
@@ -176,11 +177,6 @@ fn is_const_fn(&self, did: DefId) -> bool
         self.get_crate_data(did.krate).is_const_fn(did.index)
     }
 
-    fn is_default_impl(&self, impl_did: DefId) -> bool {
-        self.dep_graph.read(DepNode::MetaData(impl_did));
-        self.get_crate_data(impl_did.krate).is_default_impl(impl_did.index)
-    }
-
     fn is_statically_included_foreign_item(&self, def_id: DefId) -> bool
     {
         self.do_is_statically_included_foreign_item(def_id)
@@ -403,7 +399,7 @@ fn item_body<'a, 'tcx>(&self,
         }
 
         self.dep_graph.read(DepNode::MetaData(def_id));
-        debug!("item_body({}): inlining item", tcx.item_path_str(def_id));
+        debug!("item_body({:?}): inlining item", def_id);
 
         self.get_crate_data(def_id.krate).item_body(tcx, def_id.index)
     }
@@ -515,4 +511,4 @@ fn visible_parent_map<'a>(&'a self) -> ::std::cell::Ref<'a, DefIdMap<DefId>> {
         drop(visible_parent_map);
         self.visible_parent_map.borrow()
     }
-}
\ No newline at end of file
+}
index 07c475949d4357555c41e90dc33776106bc121c1..a02a82dfe69a698b4d814391a8219392a0935413 100644 (file)
@@ -18,7 +18,7 @@
 use rustc::hir::def_id::{CrateNum, CRATE_DEF_INDEX, DefIndex, DefId, LOCAL_CRATE};
 use rustc::hir::map::definitions::DefPathTable;
 use rustc::dep_graph::{DepNode, GlobalMetaDataKind};
-use rustc::ich::{StableHashingContext, Fingerprint};
+use rustc::ich::Fingerprint;
 use rustc::middle::dependency_format::Linkage;
 use rustc::middle::lang_items;
 use rustc::mir;
@@ -29,7 +29,6 @@
 use rustc::util::nodemap::{FxHashMap, NodeSet};
 
 use rustc_serialize::{Encodable, Encoder, SpecializedEncoder, opaque};
-use rustc_data_structures::stable_hasher::{StableHasher, HashStable};
 
 use std::hash::Hash;
 use std::intrinsics;
@@ -37,7 +36,6 @@
 use std::io::Cursor;
 use std::path::Path;
 use std::rc::Rc;
-use std::sync::Arc;
 use std::u32;
 use syntax::ast::{self, CRATE_NODE_ID};
 use syntax::codemap::Spanned;
@@ -284,7 +282,6 @@ fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
         let codemap = self.tcx.sess.codemap();
         let all_filemaps = codemap.files();
 
-        let hcx = &mut StableHashingContext::new(self.tcx);
         let (working_dir, working_dir_was_remapped) = self.tcx.sess.working_dir.clone();
 
         let adapted = all_filemaps.iter()
@@ -316,21 +313,10 @@ fn encode_codemap(&mut self) -> LazySeq<syntax_pos::FileMap> {
                     adapted.name = abs_path;
                     Rc::new(adapted)
                 }
-            });
-
-        let filemaps: Vec<_> = if self.compute_ich {
-            adapted.inspect(|filemap| {
-                let mut hasher = StableHasher::new();
-                filemap.hash_stable(hcx, &mut hasher);
-                let fingerprint = hasher.finish();
-                let dep_node = DepNode::FileMap((), Arc::new(filemap.name.clone()));
-                self.metadata_hashes.global_hashes.push((dep_node, fingerprint));
-            }).collect()
-        } else {
-            adapted.collect()
-        };
+            })
+            .collect::<Vec<_>>();
 
-        self.lazy_seq_ref(filemaps.iter().map(|fm| &**fm))
+        self.lazy_seq_ref(adapted.iter().map(|rc| &**rc))
     }
 
     fn encode_crate_root(&mut self) -> Lazy<CrateRoot> {
index 7722a7b10c99658ec817e075ef35e1e922de4d35..ed1680fbfaee86e76084891fd33c574f31e4e1d6 100644 (file)
@@ -35,7 +35,17 @@ pub fn new(ecx: &'a mut EncodeContext<'b, 'tcx>) -> Self {
             tcx: tcx,
             ecx: ecx,
             hcx: if compute_ich {
-                Some((StableHashingContext::new(tcx), StableHasher::new()))
+                // We are always hashing spans for things in metadata because
+                // don't know if a downstream crate will use them or not.
+                // Except when -Zquery-dep-graph is specified because we don't
+                // want to mess up our tests.
+                let hcx = if tcx.sess.opts.debugging_opts.query_dep_graph {
+                    StableHashingContext::new(tcx)
+                } else {
+                    StableHashingContext::new(tcx).force_span_hashing()
+                };
+
+                Some((hcx, StableHasher::new()))
             } else {
                 None
             }
index e1832e0a0af347614978ad0db3a66db2a330d707..2884b60fdd8a9ced6c6044e8af4e78a04afeb330 100644 (file)
@@ -97,7 +97,8 @@ fn expr_as_rvalue(&mut self,
                 let value = this.hir.mirror(value);
                 let result = this.temp(expr.ty, expr_span);
                 // to start, malloc some memory of suitable type (thus far, uninitialized):
-                this.cfg.push_assign(block, source_info, &result, Rvalue::Box(value.ty));
+                let box_ = Rvalue::NullaryOp(NullOp::Box, value.ty);
+                this.cfg.push_assign(block, source_info, &result, box_);
                 this.in_scope(value_extents, block, |this| {
                     // schedule a shallow free of that memory, lest we unwind:
                     this.schedule_box_free(expr_span, value_extents, &result, value.ty);
index 428685d7f5058cf1dbdaeabb0251eb9096352e3d..54779cbe30126c9484c1034c8f519691c66cf58d 100644 (file)
@@ -198,11 +198,10 @@ fn build_drop_shim<'a, 'tcx>(tcx: ty::TyCtxt<'a, 'tcx, 'tcx>,
             elaborate_drops::elaborate_drop(
                 &mut elaborator,
                 source_info,
-                false,
                 &dropee,
                 (),
                 return_block,
-                Some(resume_block),
+                elaborate_drops::Unwind::To(resume_block),
                 START_BLOCK
             );
             elaborator.patch
index 19714849b09141c252cb1c9640fe679852768aaf..fa88eca6ec3f09f2cd02f1428d5e418c24dcf302 100644 (file)
@@ -53,7 +53,7 @@ fn visit_rvalue(&mut self, rvalue: &mut Rvalue<'tcx>, location: Location) {
             Rvalue::CheckedBinaryOp(..) |
             Rvalue::UnaryOp(..) |
             Rvalue::Discriminant(..) |
-            Rvalue::Box(..) |
+            Rvalue::NullaryOp(..) |
             Rvalue::Aggregate(..) => {
                 // These variants don't contain regions.
             }
index 4e84cbe6fecb130bfd268a39cdfab4c0edc345d2..3b1c54f68e49b878889ba43261671bbb115d0641 100644 (file)
@@ -361,7 +361,7 @@ fn assign(&mut self, dest: &Lvalue<'tcx>, location: Location) {
 
     /// Qualify a whole const, static initializer or const fn.
     fn qualify_const(&mut self) -> Qualif {
-        debug!("qualifying {} {}", self.mode, self.tcx.item_path_str(self.def_id));
+        debug!("qualifying {} {:?}", self.mode, self.def_id);
 
         let mir = self.mir;
 
@@ -595,7 +595,9 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
         match *rvalue {
             Rvalue::Use(_) |
             Rvalue::Repeat(..) |
-            Rvalue::UnaryOp(..) |
+            Rvalue::UnaryOp(UnOp::Neg, _) |
+            Rvalue::UnaryOp(UnOp::Not, _) |
+            Rvalue::NullaryOp(NullOp::SizeOf, _) |
             Rvalue::CheckedBinaryOp(..) |
             Rvalue::Cast(CastKind::ReifyFnPointer, ..) |
             Rvalue::Cast(CastKind::UnsafeFnPointer, ..) |
@@ -703,7 +705,8 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                 if let ty::TyRawPtr(_) = lhs.ty(self.mir, self.tcx).sty {
                     assert!(op == BinOp::Eq || op == BinOp::Ne ||
                             op == BinOp::Le || op == BinOp::Lt ||
-                            op == BinOp::Ge || op == BinOp::Gt);
+                            op == BinOp::Ge || op == BinOp::Gt ||
+                            op == BinOp::Offset);
 
                     self.add(Qualif::NOT_CONST);
                     if self.mode != Mode::Fn {
@@ -719,7 +722,7 @@ fn visit_rvalue(&mut self, rvalue: &Rvalue<'tcx>, location: Location) {
                 }
             }
 
-            Rvalue::Box(_) => {
+            Rvalue::NullaryOp(NullOp::Box, _) => {
                 self.add(Qualif::NOT_CONST);
                 if self.mode != Mode::Fn {
                     struct_span_err!(self.tcx.sess, self.span, E0010,
index 6d9603ea459d40941bc1225945f8d55ace1b1eef..8258627748f3008df8b7f567fce5345c6604a966 100644 (file)
@@ -744,7 +744,7 @@ fn run_pass<'a, 'tcx>(&self,
                           mir: &mut Mir<'tcx>) {
         let item_id = src.item_id();
         let def_id = tcx.hir.local_def_id(item_id);
-        debug!("run_pass: {}", tcx.item_path_str(def_id));
+        debug!("run_pass: {:?}", def_id);
 
         if tcx.sess.err_count() > 0 {
             // compiling a broken program can obviously result in a
index 585840ce1e509f17df1558be01671169b234710d..50ebe366387330901ab6fc96db53c8f70fc58577 100644 (file)
@@ -11,7 +11,7 @@
 use std::fmt;
 use rustc::hir;
 use rustc::mir::*;
-use rustc::middle::const_val::ConstInt;
+use rustc::middle::const_val::{ConstInt, ConstVal};
 use rustc::middle::lang_items;
 use rustc::ty::{self, Ty};
 use rustc::ty::subst::{Kind, Substs};
@@ -50,6 +50,35 @@ pub enum DropFlagMode {
     Deep
 }
 
+#[derive(Copy, Clone, Debug)]
+pub enum Unwind {
+    To(BasicBlock),
+    InCleanup
+}
+
+impl Unwind {
+    fn is_cleanup(self) -> bool {
+        match self {
+            Unwind::To(..) => false,
+            Unwind::InCleanup => true
+        }
+    }
+
+    fn into_option(self) -> Option<BasicBlock> {
+        match self {
+            Unwind::To(bb) => Some(bb),
+            Unwind::InCleanup => None,
+        }
+    }
+
+    fn map<F>(self, f: F) -> Self where F: FnOnce(BasicBlock) -> BasicBlock {
+        match self {
+            Unwind::To(bb) => Unwind::To(f(bb)),
+            Unwind::InCleanup => Unwind::InCleanup
+        }
+    }
+}
+
 pub trait DropElaborator<'a, 'tcx: 'a> : fmt::Debug {
     type Path : Copy + fmt::Debug;
 
@@ -75,28 +104,25 @@ struct DropCtxt<'l, 'b: 'l, 'tcx: 'b, D>
     elaborator: &'l mut D,
 
     source_info: SourceInfo,
-    is_cleanup: bool,
 
     lvalue: &'l Lvalue<'tcx>,
     path: D::Path,
     succ: BasicBlock,
-    unwind: Option<BasicBlock>,
+    unwind: Unwind,
 }
 
 pub fn elaborate_drop<'b, 'tcx, D>(
     elaborator: &mut D,
     source_info: SourceInfo,
-    is_cleanup: bool,
     lvalue: &Lvalue<'tcx>,
     path: D::Path,
     succ: BasicBlock,
-    unwind: Option<BasicBlock>,
+    unwind: Unwind,
     bb: BasicBlock)
     where D: DropElaborator<'b, 'tcx>
 {
-    assert_eq!(unwind.is_none(), is_cleanup);
     DropCtxt {
-        elaborator, source_info, is_cleanup, lvalue, path, succ, unwind
+        elaborator, source_info, lvalue, path, succ, unwind
     }.elaborate_drop(bb)
 }
 
@@ -145,14 +171,13 @@ pub fn elaborate_drop<'a>(&mut self, bb: BasicBlock) {
                 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Drop {
                     location: self.lvalue.clone(),
                     target: self.succ,
-                    unwind: self.unwind
+                    unwind: self.unwind.into_option(),
                 });
             }
             DropStyle::Conditional => {
-                let is_cleanup = self.is_cleanup; // FIXME(#6393)
+                let unwind = self.unwind; // FIXME(#6393)
                 let succ = self.succ;
-                let drop_bb = self.complete_drop(
-                    is_cleanup, Some(DropFlagMode::Deep), succ);
+                let drop_bb = self.complete_drop(Some(DropFlagMode::Deep), succ, unwind);
                 self.elaborator.patch().patch_terminator(bb, TerminatorKind::Goto {
                     target: drop_bb
                 });
@@ -189,11 +214,10 @@ fn move_paths_for_fields(&self,
     }
 
     fn drop_subpath(&mut self,
-                    is_cleanup: bool,
                     lvalue: &Lvalue<'tcx>,
                     path: Option<D::Path>,
                     succ: BasicBlock,
-                    unwind: Option<BasicBlock>)
+                    unwind: Unwind)
                     -> BasicBlock
     {
         if let Some(path) = path {
@@ -202,7 +226,7 @@ fn drop_subpath(&mut self,
             DropCtxt {
                 elaborator: self.elaborator,
                 source_info: self.source_info,
-                path, lvalue, succ, unwind, is_cleanup
+                path, lvalue, succ, unwind,
             }.elaborated_drop_block()
         } else {
             debug!("drop_subpath: for rest field {:?}", lvalue);
@@ -210,49 +234,46 @@ fn drop_subpath(&mut self,
             DropCtxt {
                 elaborator: self.elaborator,
                 source_info: self.source_info,
-                lvalue, succ, unwind, is_cleanup,
+                lvalue, succ, unwind,
                 // Using `self.path` here to condition the drop on
                 // our own drop flag.
                 path: self.path
-            }.complete_drop(is_cleanup, None, succ)
+            }.complete_drop(None, succ, unwind)
         }
     }
 
     /// Create one-half of the drop ladder for a list of fields, and return
-    /// the list of steps in it in reverse order.
+    /// the list of steps in it in reverse order, with the first step
+    /// dropping 0 fields and so on.
     ///
     /// `unwind_ladder` is such a list of steps in reverse order,
-    /// which is called instead of the next step if the drop unwinds
-    /// (the first field is never reached). If it is `None`, all
-    /// unwind targets are left blank.
-    fn drop_halfladder<'a>(&mut self,
-                           unwind_ladder: Option<&[BasicBlock]>,
-                           succ: BasicBlock,
-                           fields: &[(Lvalue<'tcx>, Option<D::Path>)],
-                           is_cleanup: bool)
-                           -> Vec<BasicBlock>
+    /// which is called if the matching step of the drop glue panics.
+    fn drop_halfladder(&mut self,
+                       unwind_ladder: &[Unwind],
+                       mut succ: BasicBlock,
+                       fields: &[(Lvalue<'tcx>, Option<D::Path>)])
+                       -> Vec<BasicBlock>
     {
-        let mut unwind_succ = if is_cleanup {
-            None
-        } else {
-            self.unwind
-        };
-
-        let goto = TerminatorKind::Goto { target: succ };
-        let mut succ = self.new_block(is_cleanup, goto);
-
-        // Always clear the "master" drop flag at the bottom of the
-        // ladder. This is needed because the "master" drop flag
-        // protects the ADT's discriminant, which is invalidated
-        // after the ADT is dropped.
-        let succ_loc = Location { block: succ, statement_index: 0 };
-        self.elaborator.clear_drop_flag(succ_loc, self.path, DropFlagMode::Shallow);
+        Some(succ).into_iter().chain(
+            fields.iter().rev().zip(unwind_ladder)
+                .map(|(&(ref lv, path), &unwind_succ)| {
+                    succ = self.drop_subpath(lv, path, succ, unwind_succ);
+                    succ
+                })
+        ).collect()
+    }
 
-        fields.iter().rev().enumerate().map(|(i, &(ref lv, path))| {
-            succ = self.drop_subpath(is_cleanup, lv, path, succ, unwind_succ);
-            unwind_succ = unwind_ladder.as_ref().map(|p| p[i]);
-            succ
-        }).collect()
+    fn drop_ladder_bottom(&mut self) -> (BasicBlock, Unwind) {
+        // Clear the "master" drop flag at the end. This is needed
+        // because the "master" drop protects the ADT's discriminant,
+        // which is invalidated after the ADT is dropped.
+        let (succ, unwind) = (self.succ, self.unwind); // FIXME(#6393)
+        (
+            self.drop_flag_reset_block(DropFlagMode::Shallow, succ, unwind),
+            unwind.map(|unwind| {
+                self.drop_flag_reset_block(DropFlagMode::Shallow, unwind, Unwind::InCleanup)
+            })
+        )
     }
 
     /// Create a full drop ladder, consisting of 2 connected half-drop-ladders
@@ -269,9 +290,14 @@ fn drop_halfladder<'a>(&mut self,
     ///     ELAB(drop location.1 [target=.c2])
     /// .c2:
     ///     ELAB(drop location.2 [target=`self.unwind`])
+    ///
+    /// NOTE: this does not clear the master drop flag, so you need
+    /// to point succ/unwind on a `drop_ladder_bottom`.
     fn drop_ladder<'a>(&mut self,
-                       fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>)
-                       -> (BasicBlock, Option<BasicBlock>)
+                       fields: Vec<(Lvalue<'tcx>, Option<D::Path>)>,
+                       succ: BasicBlock,
+                       unwind: Unwind)
+                       -> (BasicBlock, Unwind)
     {
         debug!("drop_ladder({:?}, {:?})", self, fields);
 
@@ -282,21 +308,18 @@ fn drop_ladder<'a>(&mut self,
 
         debug!("drop_ladder - fields needing drop: {:?}", fields);
 
-        let unwind_ladder = if self.is_cleanup {
-            None
+        let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
+        let unwind_ladder: Vec<_> = if let Unwind::To(target) = unwind {
+            let halfladder = self.drop_halfladder(&unwind_ladder, target, &fields);
+            halfladder.into_iter().map(Unwind::To).collect()
         } else {
-            let unwind = self.unwind.unwrap(); // FIXME(#6393)
-            Some(self.drop_halfladder(None, unwind, &fields, true))
+            unwind_ladder
         };
 
-        let succ = self.succ; // FIXME(#6393)
-        let is_cleanup = self.is_cleanup;
         let normal_ladder =
-            self.drop_halfladder(unwind_ladder.as_ref().map(|x| &**x),
-                                 succ, &fields, is_cleanup);
+            self.drop_halfladder(&unwind_ladder, succ, &fields);
 
-        (normal_ladder.last().cloned().unwrap_or(succ),
-         unwind_ladder.and_then(|l| l.last().cloned()).or(self.unwind))
+        (*normal_ladder.last().unwrap(), *unwind_ladder.last().unwrap())
     }
 
     fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
@@ -309,7 +332,8 @@ fn open_drop_for_tuple<'a>(&mut self, tys: &[Ty<'tcx>])
              self.elaborator.field_subpath(self.path, Field::new(i)))
         }).collect();
 
-        self.drop_ladder(fields).0
+        let (succ, unwind) = self.drop_ladder_bottom();
+        self.drop_ladder(fields, succ, unwind).0
     }
 
     fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
@@ -320,13 +344,13 @@ fn open_drop_for_box<'a>(&mut self, ty: Ty<'tcx>) -> BasicBlock
         let interior_path = self.elaborator.deref_subpath(self.path);
 
         let succ = self.succ; // FIXME(#6393)
-        let is_cleanup = self.is_cleanup;
-        let succ = self.box_free_block(ty, succ, is_cleanup);
-        let unwind_succ = self.unwind.map(|u| {
-            self.box_free_block(ty, u, true)
+        let unwind = self.unwind;
+        let succ = self.box_free_block(ty, succ, unwind);
+        let unwind_succ = self.unwind.map(|unwind| {
+            self.box_free_block(ty, unwind, Unwind::InCleanup)
         });
 
-        self.drop_subpath(is_cleanup, &interior, interior_path, succ, unwind_succ)
+        self.drop_subpath(&interior, interior_path, succ, unwind_succ)
     }
 
     fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<'tcx>)
@@ -339,7 +363,7 @@ fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<
                     source_info: self.source_info,
                     kind: TerminatorKind::Unreachable
                 }),
-                is_cleanup: self.is_cleanup
+                is_cleanup: self.unwind.is_cleanup()
             });
         }
 
@@ -356,114 +380,108 @@ fn open_drop_for_adt<'a>(&mut self, adt: &'tcx ty::AdtDef, substs: &'tcx Substs<
         }
     }
 
-    fn open_drop_for_adt_contents<'a>(&mut self, adt: &'tcx ty::AdtDef,
-                                      substs: &'tcx Substs<'tcx>)
-                                      -> (BasicBlock, Option<BasicBlock>) {
-        match adt.variants.len() {
-            1 => {
-                let fields = self.move_paths_for_fields(
-                    self.lvalue,
-                    self.path,
-                    &adt.variants[0],
-                    substs
-                );
-                self.drop_ladder(fields)
-            }
-            _ => {
-                let is_cleanup = self.is_cleanup;
-                let succ = self.succ;
-                let unwind = self.unwind; // FIXME(#6393)
+    fn open_drop_for_adt_contents(&mut self, adt: &'tcx ty::AdtDef,
+                                  substs: &'tcx Substs<'tcx>)
+                                  -> (BasicBlock, Unwind) {
+        let (succ, unwind) = self.drop_ladder_bottom();
+        if adt.variants.len() == 1 {
+            let fields = self.move_paths_for_fields(
+                self.lvalue,
+                self.path,
+                &adt.variants[0],
+                substs
+            );
+            self.drop_ladder(fields, succ, unwind)
+        } else {
+            self.open_drop_for_multivariant(adt, substs, succ, unwind)
+        }
+    }
+
+    fn open_drop_for_multivariant(&mut self, adt: &'tcx ty::AdtDef,
+                                  substs: &'tcx Substs<'tcx>,
+                                  succ: BasicBlock,
+                                  unwind: Unwind)
+                                  -> (BasicBlock, Unwind) {
+        let mut values = Vec::with_capacity(adt.variants.len());
+        let mut normal_blocks = Vec::with_capacity(adt.variants.len());
+        let mut unwind_blocks = if unwind.is_cleanup() {
+            None
+        } else {
+            Some(Vec::with_capacity(adt.variants.len()))
+        };
 
-                let mut values = Vec::with_capacity(adt.variants.len());
-                let mut normal_blocks = Vec::with_capacity(adt.variants.len());
-                let mut unwind_blocks = if is_cleanup {
-                    None
-                } else {
-                    Some(Vec::with_capacity(adt.variants.len()))
-                };
-                let mut otherwise = None;
-                let mut unwind_otherwise = None;
-                for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
-                    let subpath = self.elaborator.downcast_subpath(
-                        self.path, variant_index);
-                    if let Some(variant_path) = subpath {
-                        let base_lv = self.lvalue.clone().elem(
-                            ProjectionElem::Downcast(adt, variant_index)
+        let mut have_otherwise = false;
+
+        for (variant_index, discr) in adt.discriminants(self.tcx()).enumerate() {
+            let subpath = self.elaborator.downcast_subpath(
+                self.path, variant_index);
+            if let Some(variant_path) = subpath {
+                let base_lv = self.lvalue.clone().elem(
+                    ProjectionElem::Downcast(adt, variant_index)
                         );
-                        let fields = self.move_paths_for_fields(
-                            &base_lv,
-                            variant_path,
-                            &adt.variants[variant_index],
-                            substs);
-                        values.push(discr);
-                        if let Some(ref mut unwind_blocks) = unwind_blocks {
-                            // We can't use the half-ladder from the original
-                            // drop ladder, because this breaks the
-                            // "funclet can't have 2 successor funclets"
-                            // requirement from MSVC:
-                            //
-                            //           switch       unwind-switch
-                            //          /      \         /        \
-                            //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
-                            //         |        |      /             |
-                            //    v1.1-unwind  v2.1-unwind           |
-                            //      ^                                |
-                            //       \-------------------------------/
-                            //
-                            // Create a duplicate half-ladder to avoid that. We
-                            // could technically only do this on MSVC, but I
-                            // I want to minimize the divergence between MSVC
-                            // and non-MSVC.
-
-                            let unwind = unwind.unwrap();
-                            let halfladder = self.drop_halfladder(
-                                None, unwind, &fields, true);
-                            unwind_blocks.push(
-                                halfladder.last().cloned().unwrap_or(unwind)
-                            );
-                        }
-                        let (normal, _) = self.drop_ladder(fields);
-                        normal_blocks.push(normal);
-                    } else {
-                        // variant not found - drop the entire enum
-                        if let None = otherwise {
-                            otherwise = Some(self.complete_drop(
-                                is_cleanup,
-                                Some(DropFlagMode::Shallow),
-                                succ));
-                            unwind_otherwise = unwind.map(|unwind| self.complete_drop(
-                                true,
-                                Some(DropFlagMode::Shallow),
-                                unwind
-                            ));
-                        }
-                    }
-                }
-                if let Some(block) = otherwise {
-                    normal_blocks.push(block);
-                    if let Some(ref mut unwind_blocks) = unwind_blocks {
-                        unwind_blocks.push(unwind_otherwise.unwrap());
-                    }
-                } else {
-                    values.pop();
+                let fields = self.move_paths_for_fields(
+                    &base_lv,
+                    variant_path,
+                    &adt.variants[variant_index],
+                    substs);
+                values.push(discr);
+                if let Unwind::To(unwind) = unwind {
+                    // We can't use the half-ladder from the original
+                    // drop ladder, because this breaks the
+                    // "funclet can't have 2 successor funclets"
+                    // requirement from MSVC:
+                    //
+                    //           switch       unwind-switch
+                    //          /      \         /        \
+                    //         v1.0    v2.0  v2.0-unwind  v1.0-unwind
+                    //         |        |      /             |
+                    //    v1.1-unwind  v2.1-unwind           |
+                    //      ^                                |
+                    //       \-------------------------------/
+                    //
+                    // Create a duplicate half-ladder to avoid that. We
+                    // could technically only do this on MSVC, but I
+                    // I want to minimize the divergence between MSVC
+                    // and non-MSVC.
+
+                    let unwind_blocks = unwind_blocks.as_mut().unwrap();
+                    let unwind_ladder = vec![Unwind::InCleanup; fields.len() + 1];
+                    let halfladder =
+                        self.drop_halfladder(&unwind_ladder, unwind, &fields);
+                    unwind_blocks.push(halfladder.last().cloned().unwrap());
                 }
+                let (normal, _) = self.drop_ladder(fields, succ, unwind);
+                normal_blocks.push(normal);
+            } else {
+                have_otherwise = true;
+            }
+        }
 
-                (self.adt_switch_block(is_cleanup, adt, normal_blocks, &values, succ),
-                 unwind_blocks.map(|unwind_blocks| {
-                     self.adt_switch_block(
-                         is_cleanup, adt, unwind_blocks, &values, unwind.unwrap()
-                     )
-                 }))
+        if have_otherwise {
+            normal_blocks.push(self.drop_block(succ, unwind));
+            if let Unwind::To(unwind) = unwind {
+                unwind_blocks.as_mut().unwrap().push(
+                    self.drop_block(unwind, Unwind::InCleanup)
+                        );
             }
+        } else {
+            values.pop();
         }
+
+        (self.adt_switch_block(adt, normal_blocks, &values, succ, unwind),
+         unwind.map(|unwind| {
+             self.adt_switch_block(
+                 adt, unwind_blocks.unwrap(), &values, unwind, Unwind::InCleanup
+             )
+         }))
     }
 
     fn adt_switch_block(&mut self,
-                        is_cleanup: bool,
                         adt: &'tcx ty::AdtDef,
                         blocks: Vec<BasicBlock>,
                         values: &[ConstInt],
-                        succ: BasicBlock)
+                        succ: BasicBlock,
+                        unwind: Unwind)
                         -> BasicBlock {
         // If there are multiple variants, then if something
         // is present within the enum the discriminant, tracked
@@ -475,13 +493,8 @@ fn adt_switch_block(&mut self,
         let discr_ty = adt.repr.discr_type().to_ty(self.tcx());
         let discr = Lvalue::Local(self.new_temp(discr_ty));
         let discr_rv = Rvalue::Discriminant(self.lvalue.clone());
-        let switch_block = self.elaborator.patch().new_block(BasicBlockData {
-            statements: vec![
-                Statement {
-                    source_info: self.source_info,
-                    kind: StatementKind::Assign(discr.clone(), discr_rv),
-                }
-                ],
+        let switch_block = BasicBlockData {
+            statements: vec![self.assign(&discr, discr_rv)],
             terminator: Some(Terminator {
                 source_info: self.source_info,
                 kind: TerminatorKind::SwitchInt {
@@ -491,12 +504,13 @@ fn adt_switch_block(&mut self,
                     targets: blocks,
                 }
             }),
-            is_cleanup: is_cleanup,
-        });
-        self.drop_flag_test_block(is_cleanup, switch_block, succ)
+            is_cleanup: unwind.is_cleanup(),
+        };
+        let switch_block = self.elaborator.patch().new_block(switch_block);
+        self.drop_flag_test_block(switch_block, succ, unwind)
     }
 
-    fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<BasicBlock>))
+    fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Unwind))
                                  -> BasicBlock
     {
         debug!("destructor_call_block({:?}, {:?})", self, succ);
@@ -513,26 +527,213 @@ fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<Basi
         let ref_lvalue = self.new_temp(ref_ty);
         let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
 
-        self.elaborator.patch().new_block(BasicBlockData {
-            statements: vec![Statement {
-                source_info: self.source_info,
-                kind: StatementKind::Assign(
-                    Lvalue::Local(ref_lvalue),
-                    Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
-                )
-            }],
+        let result = BasicBlockData {
+            statements: vec![self.assign(
+                &Lvalue::Local(ref_lvalue),
+                Rvalue::Ref(tcx.types.re_erased, BorrowKind::Mut, self.lvalue.clone())
+            )],
             terminator: Some(Terminator {
                 kind: TerminatorKind::Call {
                     func: Operand::function_handle(tcx, drop_fn.def_id, substs,
                                                    self.source_info.span),
                     args: vec![Operand::Consume(Lvalue::Local(ref_lvalue))],
                     destination: Some((unit_temp, succ)),
-                    cleanup: unwind,
+                    cleanup: unwind.into_option(),
                 },
                 source_info: self.source_info
             }),
-            is_cleanup: self.is_cleanup,
-        })
+            is_cleanup: unwind.is_cleanup(),
+        };
+        self.elaborator.patch().new_block(result)
+    }
+
+    /// create a loop that drops an array:
+    ///
+
+    ///
+    /// loop-block:
+    ///    can_go = cur == length_or_end
+    ///    if can_go then succ else drop-block
+    /// drop-block:
+    ///    if ptr_based {
+    ///        ptr = cur
+    ///        cur = cur.offset(1)
+    ///    } else {
+    ///        ptr = &mut LV[cur]
+    ///        cur = cur + 1
+    ///    }
+    ///    drop(ptr)
+    fn drop_loop(&mut self,
+                 succ: BasicBlock,
+                 cur: &Lvalue<'tcx>,
+                 length_or_end: &Lvalue<'tcx>,
+                 ety: Ty<'tcx>,
+                 unwind: Unwind,
+                 ptr_based: bool)
+                 -> BasicBlock
+    {
+        let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
+        let tcx = self.tcx();
+
+        let ref_ty = tcx.mk_ref(tcx.types.re_erased, ty::TypeAndMut {
+            ty: ety,
+            mutbl: hir::Mutability::MutMutable
+        });
+        let ptr = &Lvalue::Local(self.new_temp(ref_ty));
+        let can_go = &Lvalue::Local(self.new_temp(tcx.types.bool));
+
+        let one = self.constant_usize(1);
+        let (ptr_next, cur_next) = if ptr_based {
+            (Rvalue::Use(use_(cur)),
+             Rvalue::BinaryOp(BinOp::Offset, use_(cur), one))
+        } else {
+            (Rvalue::Ref(
+                 tcx.types.re_erased,
+                 BorrowKind::Mut,
+                 self.lvalue.clone().index(use_(cur))),
+             Rvalue::BinaryOp(BinOp::Add, use_(cur), one))
+        };
+
+        let drop_block = BasicBlockData {
+            statements: vec![
+                self.assign(ptr, ptr_next),
+                self.assign(cur, cur_next)
+            ],
+            is_cleanup: unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                // this gets overwritten by drop elaboration.
+                kind: TerminatorKind::Unreachable,
+            })
+        };
+        let drop_block = self.elaborator.patch().new_block(drop_block);
+
+        let loop_block = BasicBlockData {
+            statements: vec![
+                self.assign(can_go, Rvalue::BinaryOp(BinOp::Eq,
+                                                     use_(cur),
+                                                     use_(length_or_end)))
+            ],
+            is_cleanup: unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                kind: TerminatorKind::if_(tcx, use_(can_go), succ, drop_block)
+            })
+        };
+        let loop_block = self.elaborator.patch().new_block(loop_block);
+
+        self.elaborator.patch().patch_terminator(drop_block, TerminatorKind::Drop {
+            location: ptr.clone().deref(),
+            target: loop_block,
+            unwind: unwind.into_option()
+        });
+
+        loop_block
+    }
+
+    fn open_drop_for_array(&mut self, ety: Ty<'tcx>) -> BasicBlock {
+        debug!("open_drop_for_array({:?})", ety);
+
+        // if size_of::<ety>() == 0 {
+        //     index_based_loop
+        // } else {
+        //     ptr_based_loop
+        // }
+
+        let tcx = self.tcx();
+
+        let use_ = |lv: &Lvalue<'tcx>| Operand::Consume(lv.clone());
+        let size = &Lvalue::Local(self.new_temp(tcx.types.usize));
+        let size_is_zero = &Lvalue::Local(self.new_temp(tcx.types.bool));
+        let base_block = BasicBlockData {
+            statements: vec![
+                self.assign(size, Rvalue::NullaryOp(NullOp::SizeOf, ety)),
+                self.assign(size_is_zero, Rvalue::BinaryOp(BinOp::Eq,
+                                                           use_(size),
+                                                           self.constant_usize(0)))
+            ],
+            is_cleanup: self.unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                kind: TerminatorKind::if_(
+                    tcx,
+                    use_(size_is_zero),
+                    self.drop_loop_pair(ety, false),
+                    self.drop_loop_pair(ety, true)
+                )
+            })
+        };
+        self.elaborator.patch().new_block(base_block)
+    }
+
+    // create a pair of drop-loops of `lvalue`, which drops its contents
+    // even in the case of 1 panic. If `ptr_based`, create a pointer loop,
+    // otherwise create an index loop.
+    fn drop_loop_pair(&mut self, ety: Ty<'tcx>, ptr_based: bool) -> BasicBlock {
+        debug!("drop_loop_pair({:?}, {:?})", ety, ptr_based);
+        let tcx = self.tcx();
+        let iter_ty = if ptr_based {
+            tcx.mk_ptr(ty::TypeAndMut { ty: ety, mutbl: hir::Mutability::MutMutable })
+        } else {
+            tcx.types.usize
+        };
+
+        let cur = Lvalue::Local(self.new_temp(iter_ty));
+        let length = Lvalue::Local(self.new_temp(tcx.types.usize));
+        let length_or_end = if ptr_based {
+            Lvalue::Local(self.new_temp(iter_ty))
+        } else {
+            length.clone()
+        };
+
+        let unwind = self.unwind.map(|unwind| {
+            self.drop_loop(unwind,
+                           &cur,
+                           &length_or_end,
+                           ety,
+                           Unwind::InCleanup,
+                           ptr_based)
+        });
+
+        let succ = self.succ; // FIXME(#6393)
+        let loop_block = self.drop_loop(
+            succ,
+            &cur,
+            &length_or_end,
+            ety,
+            unwind,
+            ptr_based);
+
+        let zero = self.constant_usize(0);
+        let mut drop_block_stmts = vec![];
+        drop_block_stmts.push(self.assign(&length, Rvalue::Len(self.lvalue.clone())));
+        if ptr_based {
+            // cur = &LV[0];
+            // end = &LV[len];
+            drop_block_stmts.push(self.assign(&cur, Rvalue::Ref(
+                tcx.types.re_erased, BorrowKind::Mut,
+                self.lvalue.clone().index(zero.clone())
+            )));
+            drop_block_stmts.push(self.assign(&length_or_end, Rvalue::Ref(
+                tcx.types.re_erased, BorrowKind::Mut,
+                self.lvalue.clone().index(Operand::Consume(length.clone()))
+            )));
+        } else {
+            // index = 0 (length already pushed)
+            drop_block_stmts.push(self.assign(&cur, Rvalue::Use(zero)));
+        }
+        let drop_block = self.elaborator.patch().new_block(BasicBlockData {
+            statements: drop_block_stmts,
+            is_cleanup: unwind.is_cleanup(),
+            terminator: Some(Terminator {
+                source_info: self.source_info,
+                kind: TerminatorKind::Goto { target: loop_block }
+            })
+        });
+
+        // FIXME(#34708): handle partially-dropped array/slice elements.
+        let reset_block = self.drop_flag_reset_block(DropFlagMode::Deep, drop_block, unwind);
+        self.drop_flag_test_block(reset_block, succ, unwind)
     }
 
     /// The slow-path - create an "open", elaborated drop for a type
@@ -545,8 +746,6 @@ fn destructor_call_block<'a>(&mut self, (succ, unwind): (BasicBlock, Option<Basi
     /// ADT, both in the success case or if one of the destructors fail.
     fn open_drop<'a>(&mut self) -> BasicBlock {
         let ty = self.lvalue_ty(self.lvalue);
-        let is_cleanup = self.is_cleanup; // FIXME(#6393)
-        let succ = self.succ;
         match ty.sty {
             ty::TyClosure(def_id, substs) => {
                 let tys : Vec<_> = substs.upvar_tys(def_id, self.tcx()).collect();
@@ -562,12 +761,12 @@ fn open_drop<'a>(&mut self) -> BasicBlock {
                 self.open_drop_for_adt(def, substs)
             }
             ty::TyDynamic(..) => {
-                self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
+                let unwind = self.unwind; // FIXME(#6393)
+                let succ = self.succ;
+                self.complete_drop(Some(DropFlagMode::Deep), succ, unwind)
             }
-            ty::TyArray(..) | ty::TySlice(..) => {
-                // FIXME(#34708): handle partially-dropped
-                // array/slice elements.
-                self.complete_drop(is_cleanup, Some(DropFlagMode::Deep), succ)
+            ty::TyArray(ety, _) | ty::TySlice(ety) => {
+                self.open_drop_for_array(ety)
             }
             _ => bug!("open drop from non-ADT `{:?}`", ty)
         }
@@ -581,26 +780,40 @@ fn open_drop<'a>(&mut self) -> BasicBlock {
     ///     if let Some(mode) = mode: FLAG(self.path)[mode] = false
     ///     drop(self.lv)
     fn complete_drop<'a>(&mut self,
-                         is_cleanup: bool,
                          drop_mode: Option<DropFlagMode>,
-                         succ: BasicBlock) -> BasicBlock
+                         succ: BasicBlock,
+                         unwind: Unwind) -> BasicBlock
     {
         debug!("complete_drop({:?},{:?})", self, drop_mode);
 
-        let drop_block = self.drop_block(is_cleanup, succ);
-        if let Some(mode) = drop_mode {
-            let block_start = Location { block: drop_block, statement_index: 0 };
-            self.elaborator.clear_drop_flag(block_start, self.path, mode);
-        }
+        let drop_block = self.drop_block(succ, unwind);
+        let drop_block = if let Some(mode) = drop_mode {
+            self.drop_flag_reset_block(mode, drop_block, unwind)
+        } else {
+            drop_block
+        };
+
+        self.drop_flag_test_block(drop_block, succ, unwind)
+    }
 
-        self.drop_flag_test_block(is_cleanup, drop_block, succ)
+    fn drop_flag_reset_block(&mut self,
+                             mode: DropFlagMode,
+                             succ: BasicBlock,
+                             unwind: Unwind) -> BasicBlock
+    {
+        debug!("drop_flag_reset_block({:?},{:?})", self, mode);
+
+        let block = self.new_block(unwind, TerminatorKind::Goto { target: succ });
+        let block_start = Location { block: block, statement_index: 0 };
+        self.elaborator.clear_drop_flag(block_start, self.path, mode);
+        block
     }
 
     fn elaborated_drop_block<'a>(&mut self) -> BasicBlock {
         debug!("elaborated_drop_block({:?})", self);
-        let is_cleanup = self.is_cleanup; // FIXME(#6393)
+        let unwind = self.unwind; // FIXME(#6393)
         let succ = self.succ;
-        let blk = self.drop_block(is_cleanup, succ);
+        let blk = self.drop_block(succ, unwind);
         self.elaborate_drop(blk);
         blk
     }
@@ -609,17 +822,17 @@ fn box_free_block<'a>(
         &mut self,
         ty: Ty<'tcx>,
         target: BasicBlock,
-        is_cleanup: bool
+        unwind: Unwind,
     ) -> BasicBlock {
-        let block = self.unelaborated_free_block(ty, target, is_cleanup);
-        self.drop_flag_test_block(is_cleanup, block, target)
+        let block = self.unelaborated_free_block(ty, target, unwind);
+        self.drop_flag_test_block(block, target, unwind)
     }
 
     fn unelaborated_free_block<'a>(
         &mut self,
         ty: Ty<'tcx>,
         target: BasicBlock,
-        is_cleanup: bool
+        unwind: Unwind
     ) -> BasicBlock {
         let tcx = self.tcx();
         let unit_temp = Lvalue::Local(self.new_temp(tcx.mk_nil()));
@@ -632,31 +845,31 @@ fn unelaborated_free_block<'a>(
             destination: Some((unit_temp, target)),
             cleanup: None
         }; // FIXME(#6393)
-        let free_block = self.new_block(is_cleanup, call);
+        let free_block = self.new_block(unwind, call);
 
         let block_start = Location { block: free_block, statement_index: 0 };
         self.elaborator.clear_drop_flag(block_start, self.path, DropFlagMode::Shallow);
         free_block
     }
 
-    fn drop_block<'a>(&mut self, is_cleanup: bool, succ: BasicBlock) -> BasicBlock {
+    fn drop_block<'a>(&mut self, target: BasicBlock, unwind: Unwind) -> BasicBlock {
         let block = TerminatorKind::Drop {
             location: self.lvalue.clone(),
-            target: succ,
-            unwind: if is_cleanup { None } else { self.unwind }
+            target: target,
+            unwind: unwind.into_option()
         };
-        self.new_block(is_cleanup, block)
+        self.new_block(unwind, block)
     }
 
     fn drop_flag_test_block(&mut self,
-                            is_cleanup: bool,
                             on_set: BasicBlock,
-                            on_unset: BasicBlock)
+                            on_unset: BasicBlock,
+                            unwind: Unwind)
                             -> BasicBlock
     {
         let style = self.elaborator.drop_style(self.path, DropFlagMode::Shallow);
-        debug!("drop_flag_test_block({:?},{:?},{:?}) - {:?}",
-               self, is_cleanup, on_set, style);
+        debug!("drop_flag_test_block({:?},{:?},{:?},{:?}) - {:?}",
+               self, on_set, on_unset, unwind, style);
 
         match style {
             DropStyle::Dead => on_unset,
@@ -664,13 +877,13 @@ fn drop_flag_test_block(&mut self,
             DropStyle::Conditional | DropStyle::Open => {
                 let flag = self.elaborator.get_drop_flag(self.path).unwrap();
                 let term = TerminatorKind::if_(self.tcx(), flag, on_set, on_unset);
-                self.new_block(is_cleanup, term)
+                self.new_block(unwind, term)
             }
         }
     }
 
     fn new_block<'a>(&mut self,
-                     is_cleanup: bool,
+                     unwind: Unwind,
                      k: TerminatorKind<'tcx>)
                      -> BasicBlock
     {
@@ -679,7 +892,7 @@ fn new_block<'a>(&mut self,
             terminator: Some(Terminator {
                 source_info: self.source_info, kind: k
             }),
-            is_cleanup: is_cleanup
+            is_cleanup: unwind.is_cleanup()
         })
     }
 
@@ -691,4 +904,19 @@ fn terminator_loc(&mut self, bb: BasicBlock) -> Location {
         let mir = self.elaborator.mir();
         self.elaborator.patch().terminator_loc(mir, bb)
     }
+
+    fn constant_usize(&self, val: u16) -> Operand<'tcx> {
+        Operand::Constant(box Constant {
+            span: self.source_info.span,
+            ty: self.tcx().types.usize,
+            literal: Literal::Value { value: ConstVal::Integral(self.tcx().const_usize(val)) }
+        })
+    }
+
+    fn assign(&self, lhs: &Lvalue<'tcx>, rhs: Rvalue<'tcx>) -> Statement<'tcx> {
+        Statement {
+            source_info: self.source_info,
+            kind: StatementKind::Assign(lhs.clone(), rhs)
+        }
+    }
 }
index 24218725186624525e735628bd03a9fac3b6b07a..e29da3a6496552cc9b43bef6b449649e2f4c7a17 100644 (file)
@@ -186,7 +186,7 @@ fn visit_rvalue(&mut self,
             Rvalue::CheckedBinaryOp(..) => "Rvalue::CheckedBinaryOp",
             Rvalue::UnaryOp(..) => "Rvalue::UnaryOp",
             Rvalue::Discriminant(..) => "Rvalue::Discriminant",
-            Rvalue::Box(..) => "Rvalue::Box",
+            Rvalue::NullaryOp(..) => "Rvalue::NullaryOp",
             Rvalue::Aggregate(ref kind, ref _operands) => {
                 // AggregateKind is not distinguished by visit API, so
                 // record it. (`super_rvalue` handles `_operands`.)
index 9b94a3b2f234b39106a8ea5e1e5bedbd7b967785..120f201a9c8b73ef6bd4a93f079f96cc1cf5094a 100644 (file)
@@ -11,7 +11,7 @@
 use llvm::{self, ValueRef, AttributePlace};
 use base;
 use builder::Builder;
-use common::{type_is_fat_ptr, C_uint};
+use common::{instance_ty, ty_fn_sig, type_is_fat_ptr, C_uint};
 use context::CrateContext;
 use cabi_x86;
 use cabi_x86_64;
@@ -610,6 +610,14 @@ pub struct FnType<'tcx> {
 }
 
 impl<'a, 'tcx> FnType<'tcx> {
+    pub fn of_instance(ccx: &CrateContext<'a, 'tcx>, instance: &ty::Instance<'tcx>)
+                       -> Self {
+        let fn_ty = instance_ty(ccx.shared(), &instance);
+        let sig = ty_fn_sig(ccx, fn_ty);
+        let sig = ccx.tcx().erase_late_bound_regions_and_normalize(&sig);
+        Self::new(ccx, sig, &[])
+    }
+
     pub fn new(ccx: &CrateContext<'a, 'tcx>,
                sig: ty::FnSig<'tcx>,
                extra_args: &[Ty<'tcx>]) -> FnType<'tcx> {
@@ -631,6 +639,8 @@ pub fn new_vtable(ccx: &CrateContext<'a, 'tcx>,
     pub fn unadjusted(ccx: &CrateContext<'a, 'tcx>,
                       sig: ty::FnSig<'tcx>,
                       extra_args: &[Ty<'tcx>]) -> FnType<'tcx> {
+        debug!("FnType::unadjusted({:?}, {:?})", sig, extra_args);
+
         use self::Abi::*;
         let cconv = match ccx.sess().target.target.adjust_abi(sig.abi) {
             RustIntrinsic | PlatformIntrinsic |
index 63cfe591ce366b6fefdd6367de221f214f12610d..b5ef4aac34c89586436a73ade3cff6f517807bb5 100644 (file)
@@ -40,8 +40,8 @@
 #[derive(Debug, PartialEq)]
 enum Disposition { Reused, Translated }
 
-pub fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                       modules: &[ModuleTranslation]) {
+pub(crate) fn assert_module_sources<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                                              modules: &[ModuleTranslation]) {
     let _ignore = tcx.dep_graph.in_ignore();
 
     if tcx.sess.opts.incremental.is_none() {
index f85d3f9f54dfd7cd8b63ad152089bfa10e10eca0..e40267238801cb791e63a600821b493aa769e84b 100644 (file)
 use super::rpath;
 use super::msvc;
 use metadata::METADATA_FILENAME;
-use session::config;
-use session::config::NoDebugInfo;
-use session::config::{OutputFilenames, Input, OutputType};
-use session::filesearch;
-use session::search_paths::PathKind;
-use session::Session;
-use middle::cstore::{self, LinkMeta, NativeLibrary, LibSource};
-use middle::cstore::{LinkagePreference, NativeLibraryKind};
-use middle::dependency_format::Linkage;
+use rustc::session::config::{self, NoDebugInfo, OutputFilenames, Input, OutputType};
+use rustc::session::filesearch;
+use rustc::session::search_paths::PathKind;
+use rustc::session::Session;
+use rustc::middle::cstore::{self, LinkMeta, NativeLibrary, LibSource, LinkagePreference,
+                            NativeLibraryKind};
+use rustc::middle::dependency_format::Linkage;
 use CrateTranslation;
-use util::common::time;
-use util::fs::fix_windows_verbatim_for_gcc;
+use rustc::util::common::time;
+use rustc::util::fs::fix_windows_verbatim_for_gcc;
 use rustc::dep_graph::DepNode;
 use rustc::hir::def_id::CrateNum;
 use rustc::hir::svh::Svh;
index 61c57f00de70d5b00c229933d4ff5148767e963b..0b15886083a4e4d099613a43d86c06c88f86f74e 100644 (file)
 
 use back::archive;
 use back::symbol_export::{self, ExportedSymbols};
-use middle::dependency_format::Linkage;
+use rustc::middle::dependency_format::Linkage;
 use rustc::hir::def_id::{LOCAL_CRATE, CrateNum};
 use rustc_back::LinkerFlavor;
-use session::Session;
-use session::config::{self, CrateType, OptLevel, DebugInfoLevel};
+use rustc::session::Session;
+use rustc::session::config::{self, CrateType, OptLevel, DebugInfoLevel};
 use serialize::{json, Encoder};
 
 /// For all the linkers we support, and information they might
index 31f3415b1ecd026424e2de1baf2444e0e7edbe7f..97648888fa9b95bebc3362964c64c3598a2fac42 100644 (file)
@@ -52,7 +52,7 @@ mod platform {
     use std::fs;
     use std::path::{Path, PathBuf};
     use std::process::Command;
-    use session::Session;
+    use rustc::session::Session;
     use super::arch::{host_arch, Arch};
     use super::registry::LOCAL_MACHINE;
 
@@ -296,7 +296,7 @@ fn vc_lib_subdir(arch: &str) -> Option<&'static str> {
 mod platform {
     use std::path::PathBuf;
     use std::process::Command;
-    use session::Session;
+    use rustc::session::Session;
     pub fn link_exe_cmd(_sess: &Session) -> (Command, Option<PathBuf>) {
         (Command::new("link.exe"), None)
     }
index ddd86c4679934d79493a9d20e2755e6d0fd1b164..ef75de04045e5d64b5307b535cbe5e17386fccf8 100644 (file)
@@ -10,7 +10,7 @@
 
 use context::SharedCrateContext;
 use monomorphize::Instance;
-use util::nodemap::FxHashMap;
+use rustc::util::nodemap::FxHashMap;
 use rustc::hir::def_id::{DefId, CrateNum, LOCAL_CRATE};
 use rustc::session::config;
 use rustc::ty::TyCtxt;
index 1aed2932948020f87e990c1f675aa46c413d39f0..8e14335ceacb9950a2d8ffe9cddc52114db062bc 100644 (file)
@@ -348,16 +348,6 @@ fn push(&mut self, text: &str) {
     }
 }
 
-pub fn exported_name_from_type_and_prefix<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
-                                                    t: Ty<'tcx>,
-                                                    prefix: &str)
-                                                    -> String {
-    let hash = get_symbol_hash(tcx, None, t, None);
-    let mut buffer = SymbolPathBuffer::new();
-    buffer.push(prefix);
-    buffer.finish(hash)
-}
-
 // Name sanitation. LLVM will happily accept identifiers with weird names, but
 // gas doesn't!
 // gas accepts the following characters in symbols: a-z, A-Z, 0-9, ., _, $
index 3492403a1bf8e545e192c70ab7e526e1108f762d..6ed0cb0092f52d0be31e3d5fa522ac49c807c534 100644 (file)
 use back::link::{get_linker, remove};
 use back::symbol_export::ExportedSymbols;
 use rustc_incremental::{save_trans_partition, in_incr_comp_dir};
-use session::config::{OutputFilenames, OutputTypes, Passes, SomePasses, AllPasses, Sanitizer};
-use session::Session;
-use session::config::{self, OutputType};
+use rustc::session::config::{self, OutputFilenames, OutputType, OutputTypes, Passes, SomePasses,
+                             AllPasses, Sanitizer};
+use rustc::session::Session;
 use llvm;
 use llvm::{ModuleRef, TargetMachineRef, PassManagerRef, DiagnosticInfoRef, ContextRef};
 use llvm::SMDiagnosticRef;
 use {CrateTranslation, ModuleLlvm, ModuleSource, ModuleTranslation};
-use util::common::{time, time_depth, set_time_depth};
-use util::common::path2cstr;
-use util::fs::link_or_copy;
+use rustc::util::common::{time, time_depth, set_time_depth, path2cstr};
+use rustc::util::fs::link_or_copy;
 use errors::{self, Handler, Level, DiagnosticBuilder};
 use errors::emitter::Emitter;
 use syntax_pos::MultiSpan;
index 437ced85b2e4ad48ec97d7186ea54f5d50820764..1b3f0ba7ce5b6f61c7430d92a882cfb7cc7fa9f7 100644 (file)
 use llvm;
 use metadata;
 use rustc::hir::def_id::LOCAL_CRATE;
-use middle::lang_items::StartFnLangItem;
-use middle::cstore::EncodedMetadata;
+use rustc::middle::lang_items::StartFnLangItem;
+use rustc::middle::cstore::EncodedMetadata;
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::dep_graph::AssertDepGraphSafe;
 use rustc::middle::cstore::LinkMeta;
 use rustc::hir::map as hir_map;
 use rustc::util::common::time;
-use session::config::{self, NoDebugInfo};
+use rustc::session::config::{self, NoDebugInfo};
+use rustc::session::{self, DataTypeKind, Session};
 use rustc_incremental::IncrementalHashesMap;
-use session::{self, DataTypeKind, Session};
 use abi;
 use mir::lvalue::LvalueRef;
 use attributes;
@@ -71,7 +71,7 @@
 use type_::Type;
 use type_of;
 use value::Value;
-use util::nodemap::{NodeSet, FxHashMap, FxHashSet};
+use rustc::util::nodemap::{NodeSet, FxHashMap, FxHashSet};
 
 use libc::c_uint;
 use std::ffi::{CStr, CString};
index 5f8b79a994a554755bbd0b93cb336c0a5f8985a7..da74ed88eaf857d87e3530ccdf65f5360244fab9 100644 (file)
 use context::SharedCrateContext;
 use common::{def_ty, instance_ty};
 use monomorphize::{self, Instance};
-use util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
+use rustc::util::nodemap::{FxHashSet, FxHashMap, DefIdMap};
 
 use trans_item::{TransItem, DefPathBasedNames, InstantiationMode};
 
@@ -502,7 +502,7 @@ fn visit_rvalue(&mut self, rvalue: &mir::Rvalue<'tcx>, location: Location) {
                     _ => bug!(),
                 }
             }
-            mir::Rvalue::Box(..) => {
+            mir::Rvalue::NullaryOp(mir::NullOp::Box, _) => {
                 let tcx = self.scx.tcx();
                 let exchange_malloc_fn_def_id = tcx
                     .lang_items
@@ -612,17 +612,7 @@ fn visit_instance_use<'a, 'tcx>(scx: &SharedCrateContext<'a, 'tcx>,
                 output.push(create_fn_trans_item(instance));
             }
         }
-        ty::InstanceDef::DropGlue(_, Some(ty)) => {
-            match ty.sty {
-                ty::TyArray(ety, _) |
-                ty::TySlice(ety)
-                    if is_direct_call =>
-                {
-                    // drop of arrays/slices is translated in-line.
-                    visit_drop_use(scx, ety, false, output);
-                }
-                _ => {}
-            };
+        ty::InstanceDef::DropGlue(_, Some(_)) => {
             output.push(create_fn_trans_item(instance));
         }
         ty::InstanceDef::ClosureOnceShim { .. } |
index efd4f13678502e3e556fa06050e5e9e61de8b36f..a6f3fb709a01b67f5cc4532f74e4d7809041c005 100644 (file)
@@ -17,7 +17,7 @@
 use llvm::{True, False, Bool, OperandBundleDef};
 use rustc::hir::def_id::DefId;
 use rustc::hir::map::DefPathData;
-use middle::lang_items::LangItem;
+use rustc::middle::lang_items::LangItem;
 use base;
 use builder::Builder;
 use consts;
@@ -191,15 +191,6 @@ pub fn bundle(&self) -> &OperandBundleDef {
     }
 }
 
-impl Clone for Funclet {
-    fn clone(&self) -> Funclet {
-        Funclet {
-            cleanuppad: self.cleanuppad,
-            operand: OperandBundleDef::new("funclet", &[self.cleanuppad]),
-        }
-    }
-}
-
 pub fn val_ty(v: ValueRef) -> Type {
     unsafe {
         Type::from_ref(llvm::LLVMTypeOf(v))
index 0dece586c930dabfd39226e4ec8e817161049536..c3b16c2d07d0733ddc43b0c403ab7dc426a38455 100644 (file)
 use rustc::ty::subst::Substs;
 use rustc::ty::{self, Ty, TyCtxt};
 use rustc::ty::layout::{LayoutTyper, TyLayout};
-use session::config::NoDebugInfo;
-use session::Session;
-use session::config;
-use util::nodemap::{NodeSet, DefIdMap, FxHashMap};
+use rustc::session::config::{self, NoDebugInfo};
+use rustc::session::Session;
+use rustc::util::nodemap::{NodeSet, DefIdMap, FxHashMap};
 
 use std::ffi::{CStr, CString};
 use std::cell::{Cell, RefCell};
index 4567ec8b452df58a258cd932092af89b9699c52a..14d3fa495530773173de9e870aa5eff813f9c2b9 100644 (file)
@@ -16,7 +16,7 @@
 use builder::Builder;
 use declare;
 use type_::Type;
-use session::config::NoDebugInfo;
+use rustc::session::config::NoDebugInfo;
 
 use std::ptr;
 use syntax::attr;
index 7d8b8161abe0282e2751b87ab95e20be3cb1c8e4..fea24e6da873e02420303f8c0089c8211073b378 100644 (file)
@@ -19,7 +19,6 @@
 use super::type_names::compute_debuginfo_type_name;
 use super::{CrateDebugContext};
 use context::SharedCrateContext;
-use session::Session;
 
 use llvm::{self, ValueRef};
 use llvm::debuginfo::{DIType, DIFile, DIScope, DIDescriptor,
@@ -37,8 +36,8 @@
 use type_::Type;
 use rustc::ty::{self, AdtKind, Ty};
 use rustc::ty::layout::{self, LayoutTyper};
-use session::config;
-use util::nodemap::FxHashMap;
+use rustc::session::{Session, config};
+use rustc::util::nodemap::FxHashMap;
 
 use libc::{c_uint, c_longlong};
 use std::ffi::CString;
index ebe42a2b9082d2214acbfdd89a7a817efebe992b..4d781d6f77de919f15d71832f8dd7e709da38e39 100644 (file)
@@ -32,8 +32,8 @@
 use monomorphize::Instance;
 use rustc::ty::{self, Ty};
 use rustc::mir;
-use session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
-use util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
+use rustc::session::config::{self, FullDebugInfo, LimitedDebugInfo, NoDebugInfo};
+use rustc::util::nodemap::{DefIdMap, FxHashMap, FxHashSet};
 
 use libc::c_uint;
 use std::cell::{Cell, RefCell};
index 59876a7f2a201da688fe23eda5f397852fd9ceb0..fa400b54d2708ed98b4d2e7f0d7df0bc54617ec9 100644 (file)
@@ -76,6 +76,7 @@ pub fn size_and_align_of_dst<'a, 'tcx>(bcx: &Builder<'a, 'tcx>, t: Ty<'tcx>, inf
         let align = C_uint(bcx.ccx, align);
         return (size, align);
     }
+    assert!(!info.is_null());
     match t.sty {
         ty::TyAdt(def, substs) => {
             let ccx = bcx.ccx;
index 3ac0d88b90d7b7be0f6894313951a6b6595cf235..c111a3983e7ea272e0b7c0c9957e2d906776f775 100644 (file)
 extern crate rustc_errors as errors;
 extern crate serialize;
 
-pub use rustc::session;
-pub use rustc::middle;
-pub use rustc::lint;
-pub use rustc::util;
-
 pub use base::trans_crate;
 pub use back::symbol_names::provide;
 
 pub use llvm_util::{init, target_features, print_version, print_passes, print, enable_llvm_debug};
 
 pub mod back {
-    pub use rustc::hir::svh;
-
-    pub mod archive;
-    pub mod linker;
+    mod archive;
+    pub(crate) mod linker;
     pub mod link;
-    pub mod lto;
-    pub mod symbol_export;
-    pub mod symbol_names;
+    mod lto;
+    pub(crate) mod symbol_export;
+    pub(crate) mod symbol_names;
     pub mod write;
-    pub mod msvc;
-    pub mod rpath;
+    mod msvc;
+    mod rpath;
 }
 
-pub mod diagnostics;
+mod diagnostics;
 
 mod abi;
 mod adt;
@@ -171,8 +164,8 @@ pub struct CrateTranslation {
     pub crate_name: Symbol,
     pub modules: Vec<ModuleTranslation>,
     pub metadata_module: ModuleTranslation,
-    pub link: middle::cstore::LinkMeta,
-    pub metadata: middle::cstore::EncodedMetadata,
+    pub link: rustc::middle::cstore::LinkMeta,
+    pub metadata: rustc::middle::cstore::EncodedMetadata,
     pub exported_symbols: back::symbol_export::ExportedSymbols,
     pub no_builtins: bool,
     pub windows_subsystem: Option<String>,
index 96ef26d3f6f833aa87a2a56c2eef37ac01d8c74f..45afcf51b5203d18ae46287ad6484b2632771ce7 100644 (file)
@@ -197,6 +197,16 @@ pub enum CleanupKind {
     Internal { funclet: mir::BasicBlock }
 }
 
+impl CleanupKind {
+    pub fn funclet_bb(self, for_bb: mir::BasicBlock) -> Option<mir::BasicBlock> {
+        match self {
+            CleanupKind::NotCleanup => None,
+            CleanupKind::Funclet => Some(for_bb),
+            CleanupKind::Internal { funclet } => Some(funclet),
+        }
+    }
+}
+
 pub fn cleanup_kinds<'a, 'tcx>(mir: &mir::Mir<'tcx>) -> IndexVec<mir::BasicBlock, CleanupKind> {
     fn discover_masters<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
                               mir: &mir::Mir<'tcx>) {
@@ -260,7 +270,9 @@ fn propagate<'tcx>(result: &mut IndexVec<mir::BasicBlock, CleanupKind>,
                         result[succ] = CleanupKind::Internal { funclet: funclet };
                     }
                     CleanupKind::Funclet => {
-                        set_successor(funclet, succ);
+                        if funclet != succ {
+                            set_successor(funclet, succ);
+                        }
                     }
                     CleanupKind::Internal { funclet: succ_funclet } => {
                         if funclet != succ_funclet {
index a3fa1279ffb4575866cb091e27afe236bbfb85ca..4926485a121256e95afca19ce19a057100384434 100644 (file)
 use base::{self, Lifetime};
 use callee;
 use builder::Builder;
-use common::{self, Funclet};
-use common::{C_bool, C_str_slice, C_struct, C_u32, C_uint, C_undef};
+use common::{self, C_bool, C_str_slice, C_struct, C_u32, C_undef};
 use consts;
 use machine::llalign_of_min;
 use meth;
 use monomorphize;
 use type_of;
-use tvec;
 use type_::Type;
 
-use rustc_data_structures::indexed_vec::IndexVec;
 use syntax::symbol::Symbol;
 
 use std::cmp;
 
 use super::{MirContext, LocalRef};
-use super::analyze::CleanupKind;
 use super::constant::Const;
 use super::lvalue::{Alignment, LvalueRef};
 use super::operand::OperandRef;
 use super::operand::OperandValue::{Pair, Ref, Immediate};
 
 impl<'a, 'tcx> MirContext<'a, 'tcx> {
-    pub fn trans_block(&mut self, bb: mir::BasicBlock,
-        funclets: &IndexVec<mir::BasicBlock, Option<Funclet>>) {
+    pub fn trans_block(&mut self, bb: mir::BasicBlock) {
         let mut bcx = self.get_builder(bb);
         let data = &self.mir[bb];
 
         debug!("trans_block({:?}={:?})", bb, data);
 
-        let funclet = match self.cleanup_kinds[bb] {
-            CleanupKind::Internal { funclet } => funclets[funclet].as_ref(),
-            _ => funclets[bb].as_ref(),
-        };
+        for statement in &data.statements {
+            bcx = self.trans_statement(bcx, statement);
+        }
+
+        self.trans_terminator(bcx, bb, data.terminator());
+    }
+
+    fn trans_terminator(&mut self,
+                        mut bcx: Builder<'a, 'tcx>,
+                        bb: mir::BasicBlock,
+                        terminator: &mir::Terminator<'tcx>)
+    {
+        debug!("trans_terminator: {:?}", terminator);
 
         // Create the cleanup bundle, if needed.
+        let tcx = bcx.tcx();
+        let span = terminator.source_info.span;
+        let funclet_bb = self.cleanup_kinds[bb].funclet_bb(bb);
+        let funclet = funclet_bb.and_then(|funclet_bb| self.funclets[funclet_bb].as_ref());
+
         let cleanup_pad = funclet.map(|lp| lp.cleanuppad());
         let cleanup_bundle = funclet.map(|l| l.bundle());
 
-        let funclet_br = |this: &Self, bcx: Builder, bb: mir::BasicBlock| {
-            let lltarget = this.blocks[bb];
-            if let Some(cp) = cleanup_pad {
-                match this.cleanup_kinds[bb] {
-                    CleanupKind::Funclet => {
-                        // micro-optimization: generate a `ret` rather than a jump
-                        // to a return block
-                        bcx.cleanup_ret(cp, Some(lltarget));
-                    }
-                    CleanupKind::Internal { .. } => bcx.br(lltarget),
-                    CleanupKind::NotCleanup => bug!("jump from cleanup bb to bb {:?}", bb)
+        let lltarget = |this: &mut Self, target: mir::BasicBlock| {
+            let lltarget = this.blocks[target];
+            let target_funclet = this.cleanup_kinds[target].funclet_bb(target);
+            match (funclet_bb, target_funclet) {
+                (None, None) => (lltarget, false),
+                (Some(f), Some(t_f))
+                    if f == t_f || !base::wants_msvc_seh(tcx.sess)
+                    => (lltarget, false),
+                (None, Some(_)) => {
+                    // jump *into* cleanup - need a landing pad if GNU
+                    (this.landing_pad_to(target), false)
+                }
+                (Some(_), None) => span_bug!(span, "{:?} - jump out of cleanup?", terminator),
+                (Some(_), Some(_)) => {
+                    (this.landing_pad_to(target), true)
                 }
-            } else {
-                bcx.br(lltarget);
             }
         };
 
         let llblock = |this: &mut Self, target: mir::BasicBlock| {
-            let lltarget = this.blocks[target];
+            let (lltarget, is_cleanupret) = lltarget(this, target);
+            if is_cleanupret {
+                // MSVC cross-funclet jump - need a trampoline
+
+                debug!("llblock: creating cleanup trampoline for {:?}", target);
+                let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
+                let trampoline = this.new_block(name);
+                trampoline.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
+                trampoline.llbb()
+            } else {
+                lltarget
+            }
+        };
 
-            if let Some(cp) = cleanup_pad {
-                match this.cleanup_kinds[target] {
-                    CleanupKind::Funclet => {
-                        // MSVC cross-funclet jump - need a trampoline
+        let funclet_br = |this: &mut Self, bcx: Builder, target: mir::BasicBlock| {
+            let (lltarget, is_cleanupret) = lltarget(this, target);
+            if is_cleanupret {
+                // micro-optimization: generate a `ret` rather than a jump
+                // to a trampoline.
+                bcx.cleanup_ret(cleanup_pad.unwrap(), Some(lltarget));
+            } else {
+                bcx.br(lltarget);
+            }
+        };
 
-                        debug!("llblock: creating cleanup trampoline for {:?}", target);
-                        let name = &format!("{:?}_cleanup_trampoline_{:?}", bb, target);
-                        let trampoline = this.new_block(name);
-                        trampoline.cleanup_ret(cp, Some(lltarget));
-                        trampoline.llbb()
-                    }
-                    CleanupKind::Internal { .. } => lltarget,
-                    CleanupKind::NotCleanup =>
-                        bug!("jump from cleanup bb {:?} to bb {:?}", bb, target)
+        let do_call = |
+            this: &mut Self,
+            bcx: Builder<'a, 'tcx>,
+            fn_ty: FnType<'tcx>,
+            fn_ptr: ValueRef,
+            llargs: &[ValueRef],
+            destination: Option<(ReturnDest, ty::Ty<'tcx>, mir::BasicBlock)>,
+            cleanup: Option<mir::BasicBlock>
+        | {
+            if let Some(cleanup) = cleanup {
+                let ret_bcx = if let Some((_, _, target)) = destination {
+                    this.blocks[target]
+                } else {
+                    this.unreachable_block()
+                };
+                let invokeret = bcx.invoke(fn_ptr,
+                                           &llargs,
+                                           ret_bcx,
+                                           llblock(this, cleanup),
+                                           cleanup_bundle);
+                fn_ty.apply_attrs_callsite(invokeret);
+
+                if let Some((ret_dest, ret_ty, target)) = destination {
+                    let ret_bcx = this.get_builder(target);
+                    this.set_debug_loc(&ret_bcx, terminator.source_info);
+                    let op = OperandRef {
+                        val: Immediate(invokeret),
+                        ty: ret_ty,
+                    };
+                    this.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
                 }
             } else {
-                if let (CleanupKind::NotCleanup, CleanupKind::Funclet) =
-                    (this.cleanup_kinds[bb], this.cleanup_kinds[target])
-                {
-                    // jump *into* cleanup - need a landing pad if GNU
-                    this.landing_pad_to(target)
+                let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
+                fn_ty.apply_attrs_callsite(llret);
+
+                if let Some((ret_dest, ret_ty, target)) = destination {
+                    let op = OperandRef {
+                        val: Immediate(llret),
+                        ty: ret_ty,
+                    };
+                    this.store_return(&bcx, ret_dest, &fn_ty.ret, op);
+                    funclet_br(this, bcx, target);
                 } else {
-                    lltarget
+                    bcx.unreachable();
                 }
             }
         };
 
-        for statement in &data.statements {
-            bcx = self.trans_statement(bcx, statement);
-        }
-
-        let terminator = data.terminator();
-        debug!("trans_block: terminator: {:?}", terminator);
-
-        let span = terminator.source_info.span;
         self.set_debug_loc(&bcx, terminator.source_info);
         match terminator.kind {
             mir::TerminatorKind::Resume => {
@@ -219,52 +266,16 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock,
                 }
 
                 let lvalue = self.trans_lvalue(&bcx, location);
+                let fn_ty = FnType::of_instance(bcx.ccx, &drop_fn);
                 let (drop_fn, need_extra) = match ty.sty {
                     ty::TyDynamic(..) => (meth::DESTRUCTOR.get_fn(&bcx, lvalue.llextra),
                                           false),
-                    ty::TyArray(ety, _) | ty::TySlice(ety) => {
-                        // FIXME: handle panics
-                        let drop_fn = monomorphize::resolve_drop_in_place(
-                            bcx.ccx.shared(), ety);
-                        let drop_fn = callee::get_fn(bcx.ccx, drop_fn);
-                        let bcx = tvec::slice_for_each(
-                            &bcx,
-                            lvalue.project_index(&bcx, C_uint(bcx.ccx, 0u64)),
-                            ety,
-                            lvalue.len(bcx.ccx),
-                            |bcx, llval, loop_bb| {
-                                self.set_debug_loc(&bcx, terminator.source_info);
-                                if let Some(unwind) = unwind {
-                                    bcx.invoke(
-                                        drop_fn,
-                                        &[llval],
-                                        loop_bb,
-                                        llblock(self, unwind),
-                                        cleanup_bundle
-                                    );
-                                } else {
-                                    bcx.call(drop_fn, &[llval], cleanup_bundle);
-                                    bcx.br(loop_bb);
-                                }
-                            });
-                        funclet_br(self, bcx, target);
-                        return
-                    }
                     _ => (callee::get_fn(bcx.ccx, drop_fn), lvalue.has_extra())
                 };
                 let args = &[lvalue.llval, lvalue.llextra][..1 + need_extra as usize];
-                if let Some(unwind) = unwind {
-                    bcx.invoke(
-                        drop_fn,
-                        args,
-                        self.blocks[target],
-                        llblock(self, unwind),
-                        cleanup_bundle
-                    );
-                } else {
-                    bcx.call(drop_fn, args, cleanup_bundle);
-                    funclet_br(self, bcx, target);
-                }
+                do_call(self, bcx, fn_ty, drop_fn, args,
+                        Some((ReturnDest::Nothing, tcx.mk_nil(), target)),
+                        unwind);
             }
 
             mir::TerminatorKind::Assert { ref cond, expected, ref msg, target, cleanup } => {
@@ -371,26 +382,18 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock,
                 // Obtain the panic entry point.
                 let def_id = common::langcall(bcx.tcx(), Some(span), "", lang_item);
                 let instance = ty::Instance::mono(bcx.tcx(), def_id);
+                let fn_ty = FnType::of_instance(bcx.ccx, &instance);
                 let llfn = callee::get_fn(bcx.ccx, instance);
 
                 // Translate the actual panic invoke/call.
-                if let Some(unwind) = cleanup {
-                    bcx.invoke(llfn,
-                               &args,
-                               self.unreachable_block(),
-                               llblock(self, unwind),
-                               cleanup_bundle);
-                } else {
-                    bcx.call(llfn, &args, cleanup_bundle);
-                    bcx.unreachable();
-                }
+                do_call(self, bcx, fn_ty, llfn, &args, None, cleanup);
             }
 
             mir::TerminatorKind::DropAndReplace { .. } => {
-                bug!("undesugared DropAndReplace in trans: {:?}", data);
+                bug!("undesugared DropAndReplace in trans: {:?}", terminator);
             }
 
-            mir::TerminatorKind::Call { ref func, ref args, ref destination, ref cleanup } => {
+            mir::TerminatorKind::Call { ref func, ref args, ref destination, cleanup } => {
                 // Create the callee. This is a fn ptr or zero-sized and hence a kind of scalar.
                 let callee = self.trans_operand(&bcx, func);
 
@@ -543,43 +546,9 @@ pub fn trans_block(&mut self, bb: mir::BasicBlock,
                     _ => span_bug!(span, "no llfn for call"),
                 };
 
-                // Many different ways to call a function handled here
-                if let &Some(cleanup) = cleanup {
-                    let ret_bcx = if let Some((_, target)) = *destination {
-                        self.blocks[target]
-                    } else {
-                        self.unreachable_block()
-                    };
-                    let invokeret = bcx.invoke(fn_ptr,
-                                               &llargs,
-                                               ret_bcx,
-                                               llblock(self, cleanup),
-                                               cleanup_bundle);
-                    fn_ty.apply_attrs_callsite(invokeret);
-
-                    if let Some((_, target)) = *destination {
-                        let ret_bcx = self.get_builder(target);
-                        self.set_debug_loc(&ret_bcx, terminator.source_info);
-                        let op = OperandRef {
-                            val: Immediate(invokeret),
-                            ty: sig.output(),
-                        };
-                        self.store_return(&ret_bcx, ret_dest, &fn_ty.ret, op);
-                    }
-                } else {
-                    let llret = bcx.call(fn_ptr, &llargs, cleanup_bundle);
-                    fn_ty.apply_attrs_callsite(llret);
-                    if let Some((_, target)) = *destination {
-                        let op = OperandRef {
-                            val: Immediate(llret),
-                            ty: sig.output(),
-                        };
-                        self.store_return(&bcx, ret_dest, &fn_ty.ret, op);
-                        funclet_br(self, bcx, target);
-                    } else {
-                        bcx.unreachable();
-                    }
-                }
+                do_call(self, bcx, fn_ty, fn_ptr, &llargs,
+                        destination.as_ref().map(|&(_, target)| (ret_dest, sig.output(), target)),
+                        cleanup);
             }
         }
     }
@@ -774,7 +743,7 @@ fn landing_pad_to(&mut self, target_bb: mir::BasicBlock) -> BasicBlockRef {
 
     fn landing_pad_uncached(&mut self, target_bb: BasicBlockRef) -> BasicBlockRef {
         if base::wants_msvc_seh(self.ccx.sess()) {
-            return target_bb;
+            span_bug!(self.mir.span, "landing pad was not inserted?")
         }
 
         let bcx = self.new_block("cleanup");
index cd27ddda1b15bdb5151f2baa5806dd1a78b3727e..4967ef2f7908b2236ff821e6efbab6363b0e3cfd 100644 (file)
@@ -796,6 +796,12 @@ fn const_rvalue(&self, rvalue: &mir::Rvalue<'tcx>,
                 Const::new(llval, operand.ty)
             }
 
+            mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
+                assert!(self.ccx.shared().type_is_sized(ty));
+                let llval = C_uint(self.ccx, self.ccx.size_of(ty));
+                Const::new(llval, tcx.types.usize)
+            }
+
             _ => span_bug!(span, "{:?} in constant", rvalue)
         };
 
@@ -870,6 +876,7 @@ pub fn const_scalar_binop(op: mir::BinOp,
                     llvm::LLVMConstICmp(cmp, lhs, rhs)
                 }
             }
+            mir::BinOp::Offset => unreachable!("BinOp::Offset in const-eval!")
         }
     }
 }
index 19a556bf3f0f9b27af268a234b5a73a1ee343bcd..a7f12babb10f96c01f16b81862e1a2acb3931e81 100644 (file)
@@ -17,7 +17,7 @@
 use rustc::mir::tcx::LvalueTy;
 use rustc::ty::subst::Substs;
 use rustc::infer::TransNormalize;
-use session::config::FullDebugInfo;
+use rustc::session::config::FullDebugInfo;
 use base;
 use builder::Builder;
 use common::{self, CrateContext, Funclet};
@@ -69,6 +69,10 @@ pub struct MirContext<'a, 'tcx:'a> {
     /// The funclet status of each basic block
     cleanup_kinds: IndexVec<mir::BasicBlock, analyze::CleanupKind>,
 
+    /// When targeting MSVC, this stores the cleanup info for each funclet
+    /// BB. This is initialized as we compute the funclets' head block in RPO.
+    funclets: &'a IndexVec<mir::BasicBlock, Option<Funclet>>,
+
     /// This stores the landing-pad block for a given BB, computed lazily on GNU
     /// and eagerly on MSVC.
     landing_pads: IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
@@ -202,8 +206,11 @@ pub fn trans_mir<'a, 'tcx: 'a>(
         debuginfo::create_function_debug_context(ccx, instance, sig, llfn, mir);
     let bcx = Builder::new_block(ccx, llfn, "start");
 
-    let cleanup_kinds = analyze::cleanup_kinds(&mir);
+    if mir.basic_blocks().iter().any(|bb| bb.is_cleanup) {
+        bcx.set_personality_fn(ccx.eh_personality());
+    }
 
+    let cleanup_kinds = analyze::cleanup_kinds(&mir);
     // Allocate a `Block` for every basic block, except
     // the start block, if nothing loops back to it.
     let reentrant_start_block = !mir.predecessors_for(mir::START_BLOCK).is_empty();
@@ -218,6 +225,7 @@ pub fn trans_mir<'a, 'tcx: 'a>(
 
     // Compute debuginfo scopes from MIR scopes.
     let scopes = debuginfo::create_mir_scopes(ccx, mir, &debug_context);
+    let (landing_pads, funclets) = create_funclets(&bcx, &cleanup_kinds, &block_bcxs);
 
     let mut mircx = MirContext {
         mir: mir,
@@ -228,7 +236,8 @@ pub fn trans_mir<'a, 'tcx: 'a>(
         blocks: block_bcxs,
         unreachable_block: None,
         cleanup_kinds: cleanup_kinds,
-        landing_pads: IndexVec::from_elem(None, mir.basic_blocks()),
+        landing_pads: landing_pads,
+        funclets: &funclets,
         scopes: scopes,
         locals: IndexVec::new(),
         debug_context: debug_context,
@@ -306,28 +315,13 @@ pub fn trans_mir<'a, 'tcx: 'a>(
     // emitting should be enabled.
     debuginfo::start_emitting_source_locations(&mircx.debug_context);
 
-    let funclets: IndexVec<mir::BasicBlock, Option<Funclet>> =
-    mircx.cleanup_kinds.iter_enumerated().map(|(bb, cleanup_kind)| {
-        if let CleanupKind::Funclet = *cleanup_kind {
-            let bcx = mircx.get_builder(bb);
-            unsafe {
-                llvm::LLVMSetPersonalityFn(mircx.llfn, mircx.ccx.eh_personality());
-            }
-            if base::wants_msvc_seh(ccx.sess()) {
-                return Some(Funclet::new(bcx.cleanup_pad(None, &[])));
-            }
-        }
-
-        None
-    }).collect();
-
     let rpo = traversal::reverse_postorder(&mir);
     let mut visited = BitVector::new(mir.basic_blocks().len());
 
     // Translate the body of each block using reverse postorder
     for (bb, _) in rpo {
         visited.insert(bb.index());
-        mircx.trans_block(bb, &funclets);
+        mircx.trans_block(bb);
     }
 
     // Remove blocks that haven't been visited, or have no
@@ -343,6 +337,26 @@ pub fn trans_mir<'a, 'tcx: 'a>(
     }
 }
 
+fn create_funclets<'a, 'tcx>(
+    bcx: &Builder<'a, 'tcx>,
+    cleanup_kinds: &IndexVec<mir::BasicBlock, CleanupKind>,
+    block_bcxs: &IndexVec<mir::BasicBlock, BasicBlockRef>)
+    -> (IndexVec<mir::BasicBlock, Option<BasicBlockRef>>,
+        IndexVec<mir::BasicBlock, Option<Funclet>>)
+{
+    block_bcxs.iter_enumerated().zip(cleanup_kinds).map(|((bb, &llbb), cleanup_kind)| {
+        match *cleanup_kind {
+            CleanupKind::Funclet if base::wants_msvc_seh(bcx.sess()) => {
+                let cleanup_bcx = bcx.build_sibling_block(&format!("funclet_{:?}", bb));
+                let cleanup = cleanup_bcx.cleanup_pad(None, &[]);
+                cleanup_bcx.br(llbb);
+                (Some(cleanup_bcx.llbb()), Some(Funclet::new(cleanup)))
+            }
+            _ => (None, None)
+        }
+    }).unzip()
+}
+
 /// Produce, for each argument, a `ValueRef` pointing at the
 /// argument's value. As arguments are lvalues, these are always
 /// indirect.
index 8b7c7d9d37232331b293266d57af0fb70696b5ef..a12d0fec1cdd06f31a0a00bb07562f4b06967933 100644 (file)
@@ -114,7 +114,7 @@ pub fn immediate(self) -> ValueRef {
 
     pub fn deref(self) -> LvalueRef<'tcx> {
         let projected_ty = self.ty.builtin_deref(true, ty::NoPreference)
-            .unwrap().ty;
+            .unwrap_or_else(|| bug!("deref of non-pointer {:?}", self)).ty;
         let (llptr, llextra) = match self.val {
             OperandValue::Immediate(llptr) => (llptr, ptr::null_mut()),
             OperandValue::Pair(llptr, llextra) => (llptr, llextra),
index 667075e6970e184c010e55ef7f9c6ab1de8f4ac8..61e537c9cc0c9488f5e1e85e5a646a2cd214b6c7 100644 (file)
@@ -14,7 +14,7 @@
 use rustc::ty::layout::{Layout, LayoutTyper};
 use rustc::mir::tcx::LvalueTy;
 use rustc::mir;
-use middle::lang_items::ExchangeMallocFnLangItem;
+use rustc::middle::lang_items::ExchangeMallocFnLangItem;
 
 use base;
 use builder::Builder;
@@ -432,7 +432,17 @@ pub fn trans_rvalue_operand(&mut self,
                 })
             }
 
-            mir::Rvalue::Box(content_ty) => {
+            mir::Rvalue::NullaryOp(mir::NullOp::SizeOf, ty) => {
+                assert!(bcx.ccx.shared().type_is_sized(ty));
+                let val = C_uint(bcx.ccx, bcx.ccx.size_of(ty));
+                let tcx = bcx.tcx();
+                (bcx, OperandRef {
+                    val: OperandValue::Immediate(val),
+                    ty: tcx.types.usize,
+                })
+            }
+
+            mir::Rvalue::NullaryOp(mir::NullOp::Box, content_ty) => {
                 let content_ty: Ty<'tcx> = self.monomorphize(&content_ty);
                 let llty = type_of::type_of(bcx.ccx, content_ty);
                 let llsize = machine::llsize_of(bcx.ccx, llty);
@@ -515,6 +525,7 @@ pub fn trans_scalar_binop(&mut self,
             mir::BinOp::BitOr => bcx.or(lhs, rhs),
             mir::BinOp::BitAnd => bcx.and(lhs, rhs),
             mir::BinOp::BitXor => bcx.xor(lhs, rhs),
+            mir::BinOp::Offset => bcx.inbounds_gep(lhs, &[rhs]),
             mir::BinOp::Shl => common::build_unchecked_lshift(bcx, lhs, rhs),
             mir::BinOp::Shr => common::build_unchecked_rshift(bcx, input_ty, lhs, rhs),
             mir::BinOp::Ne | mir::BinOp::Lt | mir::BinOp::Gt |
@@ -660,7 +671,7 @@ pub fn rvalue_creates_operand(&self, rvalue: &mir::Rvalue<'tcx>) -> bool {
             mir::Rvalue::CheckedBinaryOp(..) |
             mir::Rvalue::UnaryOp(..) |
             mir::Rvalue::Discriminant(..) |
-            mir::Rvalue::Box(..) |
+            mir::Rvalue::NullaryOp(..) |
             mir::Rvalue::Use(..) => // (*)
                 true,
             mir::Rvalue::Repeat(..) |
index 2c76cdeb48cdf7cda90d2699399560d1f5a19057..2fe463e92a8aad9540d4337b7da478622b40463a 100644 (file)
 use syntax::ast::NodeId;
 use syntax::symbol::{Symbol, InternedString};
 use trans_item::{TransItem, InstantiationMode};
-use util::nodemap::{FxHashMap, FxHashSet};
+use rustc::util::nodemap::{FxHashMap, FxHashSet};
 
 pub enum PartitioningStrategy {
     /// Generate one codegen unit per source-level module.
index 0c9d74df2485c30eb9aacdceb21642b6d06810c1..c9eb9807c41a51d5e50cb00c7aeae926eb9ea8e6 100644 (file)
@@ -22,6 +22,7 @@
 use rustc::traits;
 use rustc::ty::{self, Ty, TyCtxt, ToPredicate, TypeFoldable};
 use rustc::ty::wf::object_region_bounds;
+use rustc::lint::builtin::PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES;
 use rustc_back::slice;
 use require_c_abi_if_variadic;
 use util::common::{ErrorReported, FN_OUTPUT_NAME};
@@ -156,10 +157,7 @@ pub fn ast_path_substs_for_ty(&self,
         match item_segment.parameters {
             hir::AngleBracketedParameters(_) => {}
             hir::ParenthesizedParameters(..) => {
-                struct_span_err!(tcx.sess, span, E0214,
-                          "parenthesized parameters may only be used with a trait")
-                    .span_label(span, "only traits may use parentheses")
-                    .emit();
+                self.prohibit_parenthesized_params(item_segment, true);
 
                 return Substs::for_item(tcx, def_id, |_, _| {
                     tcx.types.re_static
@@ -370,6 +368,8 @@ pub fn instantiate_mono_trait_ref(&self,
         self_ty: Ty<'tcx>)
         -> ty::TraitRef<'tcx>
     {
+        self.prohibit_type_params(trait_ref.path.segments.split_last().unwrap().1);
+
         let trait_def_id = self.trait_def_id(trait_ref);
         self.ast_path_to_mono_trait_ref(trait_ref.path.span,
                                         trait_def_id,
@@ -402,6 +402,8 @@ pub fn instantiate_poly_trait_ref(&self,
 
         debug!("ast_path_to_poly_trait_ref({:?}, def_id={:?})", trait_ref, trait_def_id);
 
+        self.prohibit_type_params(trait_ref.path.segments.split_last().unwrap().1);
+
         let (substs, assoc_bindings) =
             self.create_substs_for_ast_trait_ref(trait_ref.path.span,
                                                  trait_def_id,
@@ -623,6 +625,13 @@ fn conv_object_ty_poly_trait_ref(&self,
                                                         dummy_self,
                                                         &mut projection_bounds);
 
+        for trait_bound in trait_bounds[1..].iter() {
+            // Sanity check for non-principal trait bounds
+            self.instantiate_poly_trait_ref(trait_bound,
+                                            dummy_self,
+                                            &mut vec![]);
+        }
+
         let (auto_traits, trait_bounds) = split_auto_traits(tcx, &trait_bounds[1..]);
 
         if !trait_bounds.is_empty() {
@@ -938,6 +947,10 @@ fn qpath_to_ty(&self,
 
     pub fn prohibit_type_params(&self, segments: &[hir::PathSegment]) {
         for segment in segments {
+            if let hir::ParenthesizedParameters(_) = segment.parameters {
+                self.prohibit_parenthesized_params(segment, false);
+                break;
+            }
             for typ in segment.parameters.types() {
                 struct_span_err!(self.tcx().sess, typ.span, E0109,
                                  "type parameters are not allowed on this type")
@@ -960,6 +973,21 @@ pub fn prohibit_type_params(&self, segments: &[hir::PathSegment]) {
         }
     }
 
+    pub fn prohibit_parenthesized_params(&self, segment: &hir::PathSegment, emit_error: bool) {
+        if let hir::ParenthesizedParameters(ref data) = segment.parameters {
+            if emit_error {
+                struct_span_err!(self.tcx().sess, data.span, E0214,
+                          "parenthesized parameters may only be used with a trait")
+                    .span_label(data.span, "only traits may use parentheses")
+                    .emit();
+            } else {
+                let msg = "parenthesized parameters may only be used with a trait".to_string();
+                self.tcx().sess.add_lint(PARENTHESIZED_PARAMS_IN_TYPES_AND_MODULES,
+                                         ast::CRATE_NODE_ID, data.span, msg);
+            }
+        }
+    }
+
     pub fn prohibit_projection(&self, span: Span) {
         let mut err = struct_span_err!(self.tcx().sess, span, E0229,
                                        "associated type bindings are not allowed here");
index c228fc6b24abde730be51c16c4ae0618176e0a79..883a0a9d88a1c083d94170415aaff3aa0004ce8a 100644 (file)
@@ -76,7 +76,6 @@
 use rustc::ty::subst::Subst;
 use errors::DiagnosticBuilder;
 use syntax::abi;
-use syntax::feature_gate;
 use syntax::ptr::P;
 use syntax_pos;
 
@@ -614,14 +613,6 @@ fn coerce_closure_to_fn(&self,
         let node_id_a = self.tcx.hir.as_local_node_id(def_id_a).unwrap();
         match b.sty {
             ty::TyFnPtr(_) if self.tcx.with_freevars(node_id_a, |v| v.is_empty()) => {
-                if !self.tcx.sess.features.borrow().closure_to_fn_coercion {
-                    feature_gate::emit_feature_err(&self.tcx.sess.parse_sess,
-                                                   "closure_to_fn_coercion",
-                                                   self.cause.span,
-                                                   feature_gate::GateIssue::Language,
-                                                   feature_gate::CLOSURE_TO_FN_COERCION);
-                    return self.unify_and(a, b, identity());
-                }
                 // We coerce the closure, which has fn type
                 //     `extern "rust-call" fn((arg0,arg1,...)) -> _`
                 // to
index 1ad3914854d342bff3b54aec232cb24103a1a463..b5c2780e9a7af30b9854137a0b62309bdcdadf6a 100644 (file)
@@ -4536,7 +4536,9 @@ fn check_path_parameter_count(&self,
                     (&data.lifetimes[..], &data.types[..], data.infer_types, &data.bindings[..])
                 }
                 Some(&hir::ParenthesizedParameters(_)) => {
-                    span_bug!(span, "parenthesized parameters cannot appear in ExprPath");
+                    AstConv::prohibit_parenthesized_params(self, &segment.as_ref().unwrap().0,
+                                                           false);
+                    (&[][..], &[][..], true, &[][..])
                 }
                 None => (&[][..], &[][..], true, &[][..])
             }
index cb1bd3e099d54c31ef6b468cf0cc619a1cb69e4a..fb3bcd31e21fcbaffe0dbcf0bca3cd3a9a34f315 100644 (file)
@@ -100,6 +100,7 @@ pub fn provide(providers: &mut Providers) {
         impl_trait_ref,
         impl_polarity,
         is_foreign_item,
+        is_default_impl,
         ..*providers
     };
 }
@@ -1545,3 +1546,14 @@ fn is_foreign_item<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
         _ => bug!("is_foreign_item applied to non-local def-id {:?}", def_id)
     }
 }
+
+fn is_default_impl<'a, 'tcx>(tcx: TyCtxt<'a, 'tcx, 'tcx>,
+                             def_id: DefId)
+                             -> bool {
+    match tcx.hir.get_if_local(def_id) {
+        Some(hir_map::NodeItem(&hir::Item { node: hir::ItemDefaultImpl(..), .. }))
+             => true,
+        Some(_) => false,
+        _ => bug!("is_default_impl applied to non-local def-id {:?}", def_id)
+    }
+}
index f19eb194276112245b98826ad6f980dd744147e4..910d5d7402478a104a26f72c1a5a78c43fb19396 100644 (file)
@@ -4131,7 +4131,7 @@ fn not(self) -> bool {
 //  E0217, // ambiguous associated type, defined in multiple supertraits
 //  E0218, // no associated type defined
 //  E0219, // associated type defined in higher-ranked supertrait
-//  E0222, // Error code E0045 (variadic function must have C calling
+//  E0222, // Error code E0045 (variadic function must have C or cdecl calling
            // convention) duplicate
     E0224, // at least one non-builtin train is required for an object type
     E0227, // ambiguous lifetime bound, explicit lifetime bound required
index 6f2c73b892567f343f174113bc6ee49b29d7029a..699b5f330d4577ac5dcd10a94b075910608fb49a 100644 (file)
@@ -141,11 +141,10 @@ fn require_c_abi_if_variadic(tcx: TyCtxt,
                              decl: &hir::FnDecl,
                              abi: Abi,
                              span: Span) {
-    if decl.variadic && abi != Abi::C {
+    if decl.variadic && !(abi == Abi::C || abi == Abi::Cdecl) {
         let mut err = struct_span_err!(tcx.sess, span, E0045,
-                  "variadic function must have C calling convention");
-        err.span_label(span, "variadics require C calling conventions")
-            .emit();
+                  "variadic function must have C or cdecl calling convention");
+        err.span_label(span, "variadics require C or cdecl calling convention").emit();
     }
 }
 
index 5ea3eaa88d7726b6acea1174e48e53e77cb800d2..8f7add14d0a058a70679082c925318eae8a57121 100644 (file)
@@ -290,7 +290,7 @@ pub fn build_impl(cx: &DocContext, did: DefId, ret: &mut Vec<clean::Item>) {
     }
 
     // If this is a defaulted impl, then bail out early here
-    if tcx.sess.cstore.is_default_impl(did) {
+    if tcx.is_default_impl(did) {
         return ret.push(clean::Item {
             inner: clean::DefaultImplItem(clean::DefaultImpl {
                 // FIXME: this should be decoded
index d32c3ec5f46b1f063bbcfa26ebea609d7b158178..bbe5bd4a10cf17c4fe026f92bd9aae0c66053f70 100644 (file)
@@ -103,18 +103,11 @@ fn read_file(&self, path: &Path) -> io::Result<String> {
 //
 
 pub struct CodeMap {
-    // The `files` field should not be visible outside of libsyntax so that we
-    // can do proper dependency tracking.
     pub(super) files: RefCell<Vec<Rc<FileMap>>>,
     file_loader: Box<FileLoader>,
     // This is used to apply the file path remapping as specified via
     // -Zremap-path-prefix to all FileMaps allocated within this CodeMap.
     path_mapping: FilePathMapping,
-    // The CodeMap will invoke this callback whenever a specific FileMap is
-    // accessed. The callback starts out as a no-op but when the dependency
-    // graph becomes available later during the compilation process, it is
-    // be replaced with something that notifies the dep-tracking system.
-    dep_tracking_callback: RefCell<Box<Fn(&FileMap)>>,
 }
 
 impl CodeMap {
@@ -123,7 +116,6 @@ pub fn new(path_mapping: FilePathMapping) -> CodeMap {
             files: RefCell::new(Vec::new()),
             file_loader: Box::new(RealFileLoader),
             path_mapping: path_mapping,
-            dep_tracking_callback: RefCell::new(Box::new(|_| {})),
         }
     }
 
@@ -134,7 +126,6 @@ pub fn with_file_loader(file_loader: Box<FileLoader>,
             files: RefCell::new(Vec::new()),
             file_loader: file_loader,
             path_mapping: path_mapping,
-            dep_tracking_callback: RefCell::new(Box::new(|_| {})),
         }
     }
 
@@ -142,10 +133,6 @@ pub fn path_mapping(&self) -> &FilePathMapping {
         &self.path_mapping
     }
 
-    pub fn set_dep_tracking_callback(&self, cb: Box<Fn(&FileMap)>) {
-        *self.dep_tracking_callback.borrow_mut() = cb;
-    }
-
     pub fn file_exists(&self, path: &Path) -> bool {
         self.file_loader.file_exists(path)
     }
@@ -156,15 +143,6 @@ pub fn load_file(&self, path: &Path) -> io::Result<Rc<FileMap>> {
     }
 
     pub fn files(&self) -> Ref<Vec<Rc<FileMap>>> {
-        let files = self.files.borrow();
-        for file in files.iter() {
-            (self.dep_tracking_callback.borrow())(file);
-        }
-        files
-    }
-
-    /// Only use this if you do your own dependency tracking!
-    pub fn files_untracked(&self) -> Ref<Vec<Rc<FileMap>>> {
         self.files.borrow()
     }
 
@@ -311,8 +289,6 @@ fn lookup_line(&self, pos: BytePos) -> Result<FileMapAndLine, Rc<FileMap>> {
         let files = self.files.borrow();
         let f = (*files)[idx].clone();
 
-        (self.dep_tracking_callback.borrow())(&f);
-
         match f.lookup_line(pos) {
             Some(line) => Ok(FileMapAndLine { fm: f, line: line }),
             None => Err(f)
@@ -502,7 +478,6 @@ pub fn def_span(&self, sp: Span) -> Span {
     pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
         for fm in self.files.borrow().iter() {
             if filename == fm.name {
-               (self.dep_tracking_callback.borrow())(fm);
                 return Some(fm.clone());
             }
         }
@@ -513,7 +488,6 @@ pub fn get_filemap(&self, filename: &str) -> Option<Rc<FileMap>> {
     pub fn lookup_byte_offset(&self, bpos: BytePos) -> FileMapAndBytePos {
         let idx = self.lookup_filemap_idx(bpos);
         let fm = (*self.files.borrow())[idx].clone();
-        (self.dep_tracking_callback.borrow())(&fm);
         let offset = bpos - fm.start_pos;
         FileMapAndBytePos {fm: fm, pos: offset}
     }
@@ -524,8 +498,6 @@ pub fn bytepos_to_file_charpos(&self, bpos: BytePos) -> CharPos {
         let files = self.files.borrow();
         let map = &(*files)[idx];
 
-        (self.dep_tracking_callback.borrow())(map);
-
         // The number of extra bytes due to multibyte chars in the FileMap
         let mut total_extra_bytes = 0;
 
index a9252d0818e38039a1152589515cc821b1a156d3..15042e529e51a8c2970c7a92041ac8697306dba4 100644 (file)
@@ -84,6 +84,12 @@ fn expand<'cx>(&self,
     }
 }
 
+fn trace_macros_note(cx: &mut ExtCtxt, sp: Span, message: String) {
+    let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
+    let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
+    values.push(message);
+}
+
 /// Given `lhses` and `rhses`, this is the new macro we create
 fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                           sp: Span,
@@ -93,9 +99,7 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                           rhses: &[quoted::TokenTree])
                           -> Box<MacResult+'cx> {
     if cx.trace_macros() {
-        let sp = sp.macro_backtrace().last().map(|trace| trace.call_site).unwrap_or(sp);
-        let mut values: &mut Vec<String> = cx.expansions.entry(sp).or_insert_with(Vec::new);
-        values.push(format!("expands to `{}! {{ {} }}`", name, arg));
+        trace_macros_note(cx, sp, format!("expanding `{}! {{ {} }}`", name, arg));
     }
 
     // Which arm's failure should we report? (the one furthest along)
@@ -117,6 +121,11 @@ fn generic_extension<'cx>(cx: &'cx mut ExtCtxt,
                 };
                 // rhs has holes ( `$id` and `$(...)` that need filled)
                 let tts = transcribe(&cx.parse_sess.span_diagnostic, Some(named_matches), rhs);
+
+                if cx.trace_macros() {
+                    trace_macros_note(cx, sp, format!("to `{}`", tts));
+                }
+
                 let directory = Directory {
                     path: cx.current_expansion.module.directory.clone(),
                     ownership: cx.current_expansion.directory_ownership,
index c119fad1b736e599d7d1a969df5a9356000d529b..b2f52d11db2b111b82f6073ec5e17fd5527a4745 100644 (file)
@@ -321,9 +321,6 @@ pub fn new() -> Features {
     // `extern "msp430-interrupt" fn()`
     (active, abi_msp430_interrupt, "1.16.0", Some(38487)),
 
-    // Coerces non capturing closures to function pointers
-    (active, closure_to_fn_coercion, "1.17.0", Some(39817)),
-
     // Used to identify crates that contain sanitizer runtimes
     // rustc internal
     (active, sanitizer_runtime, "1.17.0", None),
@@ -427,6 +424,8 @@ pub fn new() -> Features {
     (accepted, loop_break_value, "1.19.0", Some(37339)),
     // Permits numeric fields in struct expressions and patterns.
     (accepted, relaxed_adts, "1.19.0", Some(35626)),
+    // Coerces non capturing closures to function pointers
+    (accepted, closure_to_fn_coercion, "1.19.0", Some(39817)),
 );
 
 // If you change this, please modify src/doc/unstable-book as well. You must
@@ -665,12 +664,6 @@ pub fn is_builtin_attr(attr: &ast::Attribute) -> bool {
                                            "rustc_attrs",
                                            "internal rustc attributes will never be stable",
                                            cfg_fn!(rustc_attrs))),
-    ("rustc_move_fragments", Normal, Gated(Stability::Unstable,
-                                           "rustc_attrs",
-                                           "the `#[rustc_move_fragments]` attribute \
-                                            is just used for rustc unit tests \
-                                            and will never be stable",
-                                           cfg_fn!(rustc_attrs))),
     ("rustc_mir", Whitelisted, Gated(Stability::Unstable,
                                      "rustc_attrs",
                                      "the `#[rustc_mir]` attribute \
@@ -1026,9 +1019,6 @@ pub fn feature_err<'a>(sess: &'a ParseSess, feature: &str, span: Span, issue: Ga
 pub const EXPLAIN_PLACEMENT_IN: &'static str =
     "placement-in expression syntax is experimental and subject to change.";
 
-pub const CLOSURE_TO_FN_COERCION: &'static str =
-    "non-capturing closure to fn coercion is experimental";
-
 struct PostExpansionVisitor<'a> {
     context: &'a Context<'a>,
 }
index c24867224ea864cd94733550001037fe7dee83fa..838c180c70b75c539f6f0c9f42031b77dc8d5855 100644 (file)
@@ -781,11 +781,15 @@ extern "C" void LLVMRustWriteTypeToString(LLVMTypeRef Ty, RustStringRef Str) {
 extern "C" void LLVMRustWriteValueToString(LLVMValueRef V,
                                            RustStringRef Str) {
   RawRustStringOstream OS(Str);
-  OS << "(";
-  unwrap<llvm::Value>(V)->getType()->print(OS);
-  OS << ":";
-  unwrap<llvm::Value>(V)->print(OS);
-  OS << ")";
+  if (!V) {
+    OS << "(null)";
+  } else {
+    OS << "(";
+    unwrap<llvm::Value>(V)->getType()->print(OS);
+    OS << ":";
+    unwrap<llvm::Value>(V)->print(OS);
+    OS << ")";
+  }
 }
 
 extern "C" bool LLVMRustLinkInExternalBitcode(LLVMModuleRef DstRef, char *BC,
index a3fea8e0db299b2a810f6b1790221e340d6b691d..3f098861eb60c08587ccfc4af967f11a256ca4c9 100644 (file)
@@ -9,7 +9,7 @@
 // except according to those terms.
 
 extern "Rust" { fn foo(x: u8, ...); }   //~ ERROR E0045
-                                        //~| NOTE variadics require C calling conventions
+                                        //~| NOTE variadics require C or cdecl calling convention
 
 fn main() {
 }
diff --git a/src/test/compile-fail/feature-gate-closure_to_fn_coercion.rs b/src/test/compile-fail/feature-gate-closure_to_fn_coercion.rs
deleted file mode 100644 (file)
index d074a35..0000000
+++ /dev/null
@@ -1,45 +0,0 @@
-// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// ignore-stage0: new feature, remove this when SNAP
-// revisions: a b
-
-#[cfg(a)]
-mod a {
-    const FOO: fn(u8) -> u8 = |v: u8| { v };
-    //[a]~^ ERROR non-capturing closure to fn coercion is experimental
-    //[a]~^^ ERROR mismatched types
-
-    const BAR: [fn(&mut u32); 1] = [
-        |v: &mut u32| *v += 1,
-    //[a]~^ ERROR non-capturing closure to fn coercion is experimental
-    //[a]~^^ ERROR mismatched types
-    ];
-}
-
-#[cfg(b)]
-mod b {
-    fn func_specific() -> (fn() -> u32) {
-        || return 42
-        //[b]~^ ERROR non-capturing closure to fn coercion is experimental
-        //[b]~^^ ERROR mismatched types
-    }
-    fn foo() {
-        // Items
-        assert_eq!(func_specific()(), 42);
-        let foo: fn(u8) -> u8 = |v: u8| { v };
-        //[b]~^ ERROR non-capturing closure to fn coercion is experimental
-        //[b]~^^ ERROR mismatched types
-    }
-
-}
-
-
-
index bb5b70829a163a29a3426043f01e0644e131f878..8cfd3e020c69a47da7d3eeb1cf1eb8be261a62fa 100644 (file)
@@ -14,7 +14,6 @@
 
 #[rustc_variance] //~ ERROR the `#[rustc_variance]` attribute is just used for rustc unit tests and will never be stable
 #[rustc_error] //~ ERROR the `#[rustc_error]` attribute is just used for rustc unit tests and will never be stable
-#[rustc_move_fragments] //~ ERROR the `#[rustc_move_fragments]` attribute is just used for rustc unit tests and will never be stable
 #[rustc_foo]
 //~^ ERROR unless otherwise specified, attributes with the prefix `rustc_` are reserved for internal compiler diagnostics
 
index 2ad804fc8ced7e3c92964c1825f0553697c9521f..eb5c6076440e9c0eee559bf3dec87f1e558ad77d 100644 (file)
             //~| ERROR E0191
             //~| NOTE missing associated type `Output` value
             Sub;
-            //~^ ERROR E0225
+            //~^ ERROR E0393
+            //~| NOTE missing reference to `RHS`
+            //~| NOTE because of the default `Self` reference, type parameters must be specified on object types
+            //~| ERROR E0225
             //~| NOTE non-Send/Sync additional trait
 
 fn main() { }
diff --git a/src/test/compile-fail/issue-32995-2.rs b/src/test/compile-fail/issue-32995-2.rs
new file mode 100644 (file)
index 0000000..cb68d52
--- /dev/null
@@ -0,0 +1,36 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(parenthesized_params_in_types_and_modules)]
+//~^ NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+#![allow(dead_code, unused_variables)]
+#![feature(conservative_impl_trait)]
+
+fn main() {
+    { fn f<X: ::std::marker()::Send>() {} }
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+
+    { fn f() -> impl ::std::marker()::Send { } }
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+}
+
+#[derive(Clone)]
+struct X;
+
+impl ::std::marker()::Copy for X {}
+//~^ ERROR parenthesized parameters may only be used with a trait
+//~| WARN previously accepted
+//~| NOTE issue #42238
diff --git a/src/test/compile-fail/issue-32995.rs b/src/test/compile-fail/issue-32995.rs
new file mode 100644 (file)
index 0000000..f2ed8bf
--- /dev/null
@@ -0,0 +1,62 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![deny(parenthesized_params_in_types_and_modules)]
+//~^ NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+//~| NOTE lint level defined here
+#![allow(dead_code, unused_variables)]
+
+fn main() {
+    let x: usize() = 1;
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+
+    let b: ::std::boxed()::Box<_> = Box::new(1);
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+
+    macro_rules! pathexpr {
+        ($p:path) => { $p }
+    }
+
+    let p = pathexpr!(::std::str()::from_utf8)(b"foo").unwrap();
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+
+    let p = pathexpr!(::std::str::from_utf8())(b"foo").unwrap();
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+
+    let o : Box<::std::marker()::Send> = Box::new(1);
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+
+    let o : Box<Send + ::std::marker()::Sync> = Box::new(1);
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+}
+
+fn foo<X:Default>() {
+    let d : X() = Default::default();
+    //~^ ERROR parenthesized parameters may only be used with a trait
+    //~| WARN previously accepted
+    //~| NOTE issue #42238
+}
diff --git a/src/test/compile-fail/issue-39974.rs b/src/test/compile-fail/issue-39974.rs
new file mode 100644 (file)
index 0000000..6f6b775
--- /dev/null
@@ -0,0 +1,21 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+const LENGTH: f64 = 2;
+
+struct Thing {
+    f: [[f64; 2]; LENGTH],
+    //~^ ERROR mismatched types
+    //~| expected usize, found f64
+}
+
+fn main() {
+    let _t = Thing { f: [[0.0, 0.0], [0.0, 0.0], [0.0, 0.0], [0.0, 0.0]] };
+}
index 3ccee0f12becb63e1f84cf4bd89be3109253a3c5..7daf4bcbaa44b970e0adab3e08d01ff0f3eb8f20 100644 (file)
@@ -8,8 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-#![feature(closure_to_fn_coercion)]
-
 fn main() {
     let bar: fn(&mut u32) = |_| {};
 
diff --git a/src/test/compile-fail/issue-40350.rs b/src/test/compile-fail/issue-40350.rs
new file mode 100644 (file)
index 0000000..39249ee
--- /dev/null
@@ -0,0 +1,24 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+#![feature(rustc_attrs)]
+#![allow(warnings)]
+
+enum E {
+    A = {
+        enum F { B }
+        0
+    }
+}
+
+#[rustc_error]
+fn main() {}
+//~^ ERROR compilation successful
+
diff --git a/src/test/compile-fail/move-fragments-1.rs b/src/test/compile-fail/move-fragments-1.rs
deleted file mode 100644 (file)
index 0219f5b..0000000
+++ /dev/null
@@ -1,58 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// These are all fairly trivial cases: unused variables or direct
-// drops of substructure.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-#[rustc_move_fragments]
-pub fn test_noop() {
-}
-
-#[rustc_move_fragments]
-pub fn test_take(_x: D) {
-    //~^ ERROR                  assigned_leaf_path: `$(local _x)`
-}
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_take_struct(_p: Pair<D, D>) {
-    //~^ ERROR                  assigned_leaf_path: `$(local _p)`
-}
-
-#[rustc_move_fragments]
-pub fn test_drop_struct_part(p: Pair<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                     moved_leaf_path: `$(local p).x`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    drop(p.x);
-}
-
-#[rustc_move_fragments]
-pub fn test_drop_tuple_part(p: (D, D)) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                     moved_leaf_path: `$(local p).#0`
-    //~| ERROR                    unmoved_fragment: `$(local p).#1`
-    drop(p.0);
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-2.rs b/src/test/compile-fail/move-fragments-2.rs
deleted file mode 100644 (file)
index 15c28ec..0000000
+++ /dev/null
@@ -1,87 +0,0 @@
-// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// These are checking that enums are tracked; note that their output
-// paths include "downcasts" of the path to a particular enum.
-
-#![feature(rustc_attrs)]
-
-use self::Lonely::{Zero, One, Two};
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub enum Lonely<X,Y> { Zero, One(X), Two(X, Y) }
-
-#[rustc_move_fragments]
-pub fn test_match_partial(p: Lonely<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Zero)`
-    match p {
-        Zero => {}
-        _ => {}
-    }
-}
-
-#[rustc_move_fragments]
-pub fn test_match_full(p: Lonely<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Zero)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::One)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Two)`
-    match p {
-        Zero => {}
-        One(..) => {}
-        Two(..) => {}
-    }
-}
-
-#[rustc_move_fragments]
-pub fn test_match_bind_one(p: Lonely<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Zero)`
-    //~| ERROR                 parent_of_fragments: `($(local p) as Lonely::One)`
-    //~| ERROR                     moved_leaf_path: `($(local p) as Lonely::One).#0`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Two)`
-    //~| ERROR                  assigned_leaf_path: `$(local data)`
-    match p {
-        Zero => {}
-        One(data) => {}
-        Two(..) => {}
-    }
-}
-
-#[rustc_move_fragments]
-pub fn test_match_bind_many(p: Lonely<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Zero)`
-    //~| ERROR                 parent_of_fragments: `($(local p) as Lonely::One)`
-    //~| ERROR                     moved_leaf_path: `($(local p) as Lonely::One).#0`
-    //~| ERROR                  assigned_leaf_path: `$(local data)`
-    //~| ERROR                 parent_of_fragments: `($(local p) as Lonely::Two)`
-    //~| ERROR                     moved_leaf_path: `($(local p) as Lonely::Two).#0`
-    //~| ERROR                     moved_leaf_path: `($(local p) as Lonely::Two).#1`
-    //~| ERROR                  assigned_leaf_path: `$(local left)`
-    //~| ERROR                  assigned_leaf_path: `$(local right)`
-    match p {
-        Zero => {}
-        One(data) => {}
-        Two(left, right) => {}
-    }
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-3.rs b/src/test/compile-fail/move-fragments-3.rs
deleted file mode 100644 (file)
index a115233..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2014-2015 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// This checks the handling of `_` within variants, especially when mixed
-// with bindings.
-
-#![feature(rustc_attrs)]
-
-use self::Lonely::{Zero, One, Two};
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub enum Lonely<X,Y> { Zero, One(X), Two(X, Y) }
-
-#[rustc_move_fragments]
-pub fn test_match_bind_and_underscore(p: Lonely<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::Zero)`
-    //~| ERROR                  assigned_leaf_path: `($(local p) as Lonely::One)`
-    //~| ERROR                 parent_of_fragments: `($(local p) as Lonely::Two)`
-    //~| ERROR                     moved_leaf_path: `($(local p) as Lonely::Two).#0`
-    //~| ERROR                    unmoved_fragment: `($(local p) as Lonely::Two).#1`
-    //~| ERROR                  assigned_leaf_path: `$(local left)`
-
-    match p {
-        Zero => {}
-
-        One(_) => {}       // <-- does not fragment `($(local p) as One)` ...
-
-        Two(left, _) => {} // <-- ... *does* fragment `($(local p) as Two)`.
-    }
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-4.rs b/src/test/compile-fail/move-fragments-4.rs
deleted file mode 100644 (file)
index 191e23a..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// This checks that a move of deep structure is properly tracked. (An
-// early draft of the code did not properly traverse up through all of
-// the parents of the leaf fragment.)
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_move_substructure(pppp: Pair<Pair<Pair<Pair<D,D>, D>, D>, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local pppp)`
-    //~| ERROR                 parent_of_fragments: `$(local pppp).x`
-    //~| ERROR                 parent_of_fragments: `$(local pppp).x.x`
-    //~| ERROR                    unmoved_fragment: `$(local pppp).x.x.x`
-    //~| ERROR                     moved_leaf_path: `$(local pppp).x.x.y`
-    //~| ERROR                    unmoved_fragment: `$(local pppp).x.y`
-    //~| ERROR                    unmoved_fragment: `$(local pppp).y`
-    drop(pppp.x.x.y);
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-5.rs b/src/test/compile-fail/move-fragments-5.rs
deleted file mode 100644 (file)
index 38a385e..0000000
+++ /dev/null
@@ -1,94 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// This is the first test that checks moving into local variables.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local(p: Pair<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                     moved_leaf_path: `$(local p).x`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    //~| ERROR                  assigned_leaf_path: `$(local _x)`
-    let _x = p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local_to_local(p: Pair<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                     moved_leaf_path: `$(local p).x`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    //~| ERROR                  assigned_leaf_path: `$(local _x)`
-    //~| ERROR                     moved_leaf_path: `$(local _x)`
-    //~| ERROR                  assigned_leaf_path: `$(local _y)`
-    let _x = p.x;
-    let _y = _x;
-}
-
-// In the following fn's `test_move_field_to_local_delayed` and
-// `test_uninitialized_local` , the instrumentation reports that `_x`
-// is moved. This is unlike `test_move_field_to_local`, where `_x` is
-// just reported as an assigned_leaf_path. Presumably because this is
-// how we represent that it did not have an initializing expression at
-// the binding site.
-
-#[rustc_move_fragments]
-pub fn test_uninitialized_local(_p: Pair<D, D>) {
-    //~^ ERROR                  assigned_leaf_path: `$(local _p)`
-    //~| ERROR                     moved_leaf_path: `$(local _x)`
-    let _x: D;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local_delayed(p: Pair<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                     moved_leaf_path: `$(local p).x`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    //~| ERROR                  assigned_leaf_path: `$(local _x)`
-    //~| ERROR                     moved_leaf_path: `$(local _x)`
-    let _x;
-    _x = p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_mut_to_local(mut p: Pair<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local mut p)`
-    //~| ERROR                     moved_leaf_path: `$(local mut p).x`
-    //~| ERROR                    unmoved_fragment: `$(local mut p).y`
-    //~| ERROR                  assigned_leaf_path: `$(local _x)`
-    let _x = p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_field_to_local_to_local_mut(p: Pair<D, D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                     moved_leaf_path: `$(local p).x`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    //~| ERROR                  assigned_leaf_path: `$(local mut _x)`
-    //~| ERROR                     moved_leaf_path: `$(local mut _x)`
-    //~| ERROR                  assigned_leaf_path: `$(local _y)`
-    let mut _x = p.x;
-    let _y = _x;
-}
-
-pub fn main() {}
diff --git a/src/test/compile-fail/move-fragments-6.rs b/src/test/compile-fail/move-fragments-6.rs
deleted file mode 100644 (file)
index 122727c..0000000
+++ /dev/null
@@ -1,61 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// Test that moving into a field (i.e. overwriting it) fragments the
-// receiver.
-
-#![feature(rustc_attrs)]
-
-use std::mem::drop;
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_overwrite_uninit_field<Z>(z: Z) {
-    //~^ ERROR                 parent_of_fragments: `$(local mut p)`
-    //~| ERROR                  assigned_leaf_path: `$(local z)`
-    //~| ERROR                     moved_leaf_path: `$(local z)`
-    //~| ERROR                  assigned_leaf_path: `$(local mut p).x`
-    //~| ERROR                    unmoved_fragment: `$(local mut p).y`
-
-    let mut p: Pair<Z,Z>;
-    p.x = z;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_moved_field<Z>(mut p: Pair<Z,Z>, z: Z) {
-    //~^ ERROR                 parent_of_fragments: `$(local mut p)`
-    //~| ERROR                  assigned_leaf_path: `$(local z)`
-    //~| ERROR                     moved_leaf_path: `$(local z)`
-    //~| ERROR                  assigned_leaf_path: `$(local mut p).y`
-    //~| ERROR                    unmoved_fragment: `$(local mut p).x`
-
-    drop(p);
-    p.y = z;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_same_field<Z>(mut p: Pair<Z,Z>) {
-    //~^ ERROR                 parent_of_fragments: `$(local mut p)`
-    //~| ERROR                     moved_leaf_path: `$(local mut p).x`
-    //~| ERROR                  assigned_leaf_path: `$(local mut p).x`
-    //~| ERROR                    unmoved_fragment: `$(local mut p).y`
-
-    p.x = p.x;
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-7.rs b/src/test/compile-fail/move-fragments-7.rs
deleted file mode 100644 (file)
index a2a3720..0000000
+++ /dev/null
@@ -1,48 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// Test that moving a Box<T> fragments its containing structure, for
-// both moving out of the structure (i.e. reading `*p.x`) and writing
-// into the container (i.e. writing `*p.x`).
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_deref_box_field(p: Pair<Box<D>, Box<D>>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                 parent_of_fragments: `$(local p).x`
-    //~| ERROR                     moved_leaf_path: `$(local p).x.*`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    //~| ERROR                  assigned_leaf_path: `$(local i)`
-    let i : D = *p.x;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_deref_box_field(mut p: Pair<Box<D>, Box<D>>) {
-    //~^ ERROR                 parent_of_fragments: `$(local mut p)`
-    //~| ERROR                 parent_of_fragments: `$(local mut p).x`
-    //~| ERROR                  assigned_leaf_path: `$(local mut p).x.*`
-    //~| ERROR                    unmoved_fragment: `$(local mut p).y`
-    *p.x = D { d: 3 };
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-8.rs b/src/test/compile-fail/move-fragments-8.rs
deleted file mode 100644 (file)
index e57268d..0000000
+++ /dev/null
@@ -1,41 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test that we correctly compute the move fragments for a fn.
-//
-// Note that the code below is not actually incorrect; the
-// `rustc_move_fragments` attribute is a hack that uses the error
-// reporting mechanisms as a channel for communicating from the
-// internals of the compiler.
-
-// Test that assigning into a `&T` within structured container does
-// *not* fragment its containing structure.
-//
-// Compare against the `Box<T>` handling in move-fragments-7.rs. Note
-// also that in this case we cannot do a move out of `&T`, so we only
-// test writing `*p.x` here.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-pub struct Pair<X,Y> { x: X, y: Y }
-
-#[rustc_move_fragments]
-pub fn test_overwrite_deref_ampersand_field<'a>(p: Pair<&'a mut D, &'a D>) {
-    //~^ ERROR                 parent_of_fragments: `$(local p)`
-    //~| ERROR                 parent_of_fragments: `$(local p).x`
-    //~| ERROR                  assigned_leaf_path: `$(local p).x.*`
-    //~| ERROR                    unmoved_fragment: `$(local p).y`
-    *p.x = D { d: 3 };
-}
-
-pub fn main() { }
diff --git a/src/test/compile-fail/move-fragments-9.rs b/src/test/compile-fail/move-fragments-9.rs
deleted file mode 100644 (file)
index 350f416..0000000
+++ /dev/null
@@ -1,49 +0,0 @@
-// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
-// file at the top-level directory of this distribution and at
-// http://rust-lang.org/COPYRIGHT.
-//
-// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
-// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
-// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
-// option. This file may not be copied, modified, or distributed
-// except according to those terms.
-
-// Test moving array structures, e.g. `[T; 3]` as well as moving
-// elements in and out of such arrays.
-//
-// Note also that the `test_move_array_then_overwrite` tests represent
-// cases that we probably should make illegal.
-
-#![feature(rustc_attrs)]
-
-pub struct D { d: isize }
-impl Drop for D { fn drop(&mut self) { } }
-
-#[rustc_move_fragments]
-pub fn test_move_array_via_return(a: [D; 3]) -> [D; 3] {
-    //~^ ERROR                  assigned_leaf_path: `$(local a)`
-    //~| ERROR                     moved_leaf_path: `$(local a)`
-    return a;
-}
-
-#[rustc_move_fragments]
-pub fn test_move_array_into_recv(a: [D; 3], recv: &mut [D; 3]) {
-    //~^ ERROR                 parent_of_fragments: `$(local recv)`
-    //~| ERROR                  assigned_leaf_path: `$(local a)`
-    //~| ERROR                     moved_leaf_path: `$(local a)`
-    //~| ERROR                  assigned_leaf_path: `$(local recv).*`
-    *recv = a;
-}
-
-#[rustc_move_fragments]
-pub fn test_overwrite_array_elem(mut a: [D; 3], i: usize, d: D) {
-    //~^ ERROR                 parent_of_fragments: `$(local mut a)`
-    //~| ERROR                  assigned_leaf_path: `$(local i)`
-    //~| ERROR                  assigned_leaf_path: `$(local d)`
-    //~| ERROR                     moved_leaf_path: `$(local d)`
-    //~| ERROR                  assigned_leaf_path: `$(local mut a).[]`
-    //~| ERROR                    unmoved_fragment: `$(allbutone $(local mut a).[])`
-    a[i] = d;
-}
-
-pub fn main() { }
index afcad9d8f96141121e596ac223125d8bd9d29b5c..ec5669f639277f6db2623791bf8b4ac7c47df91b 100644 (file)
@@ -8,8 +8,10 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-fn baz(f: extern "cdecl" fn(usize, ...)) {
-    //~^ ERROR: variadic function must have C calling convention
+// ignore-arm stdcall isn't suppported
+
+fn baz(f: extern "stdcall" fn(usize, ...)) {
+    //~^ ERROR: variadic function must have C or cdecl calling convention
     f(22, 44);
 }
 
index af2b552e20f14ecfa812bae98a6d08006f8c96bb..125177efc53c79d102e62c519462536f9354fd69 100644 (file)
@@ -8,8 +8,10 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-extern "cdecl" {
-    fn printf(_: *const u8, ...); //~ ERROR: variadic function must have C calling convention
+// ignore-arm stdcall isn't suppported
+
+extern "stdcall" {
+    fn printf(_: *const u8, ...); //~ ERROR: variadic function must have C or cdecl calling
 }
 
 extern {
index ff5fd634714497bf53a38f11bc1aa323844a5a8c..1099aeb921763383da47b8feec0ac81d3889d61a 100644 (file)
@@ -9,6 +9,7 @@
 // except according to those terms.
 
 // no-prefer-dynamic
+// compile-flags: -Z query-dep-graph
 
 #![crate_type="rlib"]
 
index e1dba1317703d6c448b969d1327a3884b8cc113c..2ae434071f2c5f1dfeceb1e640839cba847fd7b4 100644 (file)
@@ -8,6 +8,8 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
+// compile-flags: -Z query-dep-graph
+
 #![crate_type="rlib"]
 
 #[cfg(rpass1)]
diff --git a/src/test/mir-opt/issue-41888.rs b/src/test/mir-opt/issue-41888.rs
new file mode 100644 (file)
index 0000000..ea4d7d3
--- /dev/null
@@ -0,0 +1,186 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// check that we clear the "ADT master drop flag" even when there are
+// no fields to be dropped.
+
+fn main() {
+    let e;
+    if cond() {
+        e = E::F(K);
+        if let E::F(_k) = e {
+            // older versions of rustc used to not clear the
+            // drop flag for `e` in this path.
+        }
+    }
+}
+
+fn cond() -> bool { false }
+
+struct K;
+
+enum E {
+    F(K),
+    G(Box<E>)
+}
+
+// END RUST SOURCE
+// fn main() -> () {
+//     let mut _0: ();
+//     scope 1 {
+//         let _1: E; // `e`
+//         scope 2 {
+//             let _6: K;
+//         }
+//     }
+//     let mut _2: bool;
+//     let mut _3: ();
+//     let mut _4: E;
+//     let mut _5: K;
+//     let mut _7: isize;
+//     let mut _8: bool; // drop flag for `e`
+//     let mut _9: bool;
+//     let mut _10: bool;
+//     let mut _11: isize;
+//     let mut _12: isize;
+//
+//     bb0: {
+//         _8 = const false;
+//         _10 = const false;
+//         _9 = const false;
+//         StorageLive(_1);
+//         StorageLive(_2);
+//         _2 = const cond() -> [return: bb3, unwind: bb2];
+//     }
+//
+//     bb1: {
+//         resume;
+//     }
+//
+//     bb2: {
+//         goto -> bb1;
+//     }
+//
+//     bb3: {
+//         switchInt(_2) -> [0u8: bb5, otherwise: bb4];
+//     }
+//
+//     bb4: {
+//         StorageLive(_4);
+//         StorageLive(_5);
+//         _5 = K::{{constructor}};
+//         _4 = E::F(_5,);
+//         StorageDead(_5);
+//         goto -> bb15;
+//     }
+//
+//     bb5: {
+//         _0 = ();
+//         goto -> bb12;
+//     }
+//
+//     bb6: {
+//         goto -> bb2;
+//     }
+//
+//     bb7: {
+//         goto -> bb8;
+//     }
+//
+//     bb8: {
+//         StorageDead(_4);
+//         _7 = discriminant(_1);
+//         switchInt(_7) -> [0isize: bb10, otherwise: bb9];
+//     }
+//
+//     bb9: {
+//         _0 = ();
+//         goto -> bb11;
+//     }
+//
+//     bb10: {
+//         StorageLive(_6);
+//         _10 = const false;
+//         _6 = ((_1 as F).0: K);
+//         _0 = ();
+//         goto -> bb11;
+//     }
+//
+//     bb11: {
+//         StorageDead(_6);
+//         goto -> bb12;
+//     }
+//
+//     bb12: {
+//         StorageDead(_2);
+//         goto -> bb22;
+//     }
+//
+//     bb13: {
+//         StorageDead(_1);
+//         return;
+//     }
+//
+//     bb14: {
+//         _8 = const true;
+//         _9 = const true;
+//         _10 = const true;
+//         _1 = _4;
+//         goto -> bb6;
+//     }
+//
+//     bb15: {
+//         _8 = const true;
+//         _9 = const true;
+//         _10 = const true;
+//         _1 = _4;
+//         goto -> bb7;
+//     }
+//
+//     bb16: {
+//         _8 = const false; // clear the drop flag - must always be reached
+//         goto -> bb13;
+//     }
+//
+//     bb17: {
+//         _8 = const false;
+//         goto -> bb1;
+//     }
+//
+//     bb18: {
+//         goto -> bb17;
+//     }
+//
+//     bb19: {
+//         drop(_1) -> [return: bb16, unwind: bb17];
+//     }
+//
+//     bb20: {
+//         drop(_1) -> bb17;
+//     }
+//
+//     bb21: {
+//         _11 = discriminant(_1);
+//         switchInt(_11) -> [0isize: bb16, otherwise: bb19];
+//     }
+//
+//     bb22: {
+//         switchInt(_8) -> [0u8: bb16, otherwise: bb21];
+//     }
+//
+//     bb23: {
+//         _12 = discriminant(_1);
+//         switchInt(_12) -> [0isize: bb18, otherwise: bb20];
+//     }
+//
+//     bb24: {
+//         switchInt(_8) -> [0u8: bb17, otherwise: bb23];
+//     }
+// }
index 13d1d6aa13900cf63193f7ba824c9dfa484c99f2..7fb26bdc9360d83569460ef8b85838bf62c353aa 100644 (file)
@@ -8,10 +8,6 @@
 // option. This file may not be copied, modified, or distributed
 // except according to those terms.
 
-// ignore-stage0: new feature, remove this when SNAP
-
-#![feature(closure_to_fn_coercion)]
-
 const FOO: fn(u8) -> u8 = |v: u8| { v };
 
 const BAR: [fn(&mut u32); 5] = [
index 7214ebfaf07033b67648876e7b82dddca2c6799f..41da3089c884e1a723fc8b738004f3c905608cb5 100644 (file)
@@ -9,7 +9,6 @@
 // except according to those terms.
 // Ensure that we deduce expected argument types when a `fn()` type is expected (#41755)
 
-#![feature(closure_to_fn_coercion)]
 fn foo(f: fn(Vec<u32>) -> usize) { }
 
 fn main() {
index a2cca206409263021ef770f98dc70f2f362ea5d5..6725a0c547f1dfaeb2e680c29774a68ee830c31b 100644 (file)
@@ -90,6 +90,22 @@ fn dynamic_drop(a: &Allocator, c: bool) {
     };
 }
 
+struct TwoPtrs<'a>(Ptr<'a>, Ptr<'a>);
+fn struct_dynamic_drop(a: &Allocator, c0: bool, c1: bool, c: bool) {
+    for i in 0..2 {
+        let x;
+        let y;
+        if (c0 && i == 0) || (c1 && i == 1) {
+            x = (a.alloc(), a.alloc(), a.alloc());
+            y = TwoPtrs(a.alloc(), a.alloc());
+            if c {
+                drop(x.1);
+                drop(y.0);
+            }
+        }
+    }
+}
+
 fn assignment2(a: &Allocator, c0: bool, c1: bool) {
     let mut _v = a.alloc();
     let mut _w = a.alloc();
@@ -125,6 +141,14 @@ fn union1(a: &Allocator) {
     }
 }
 
+fn array_simple(a: &Allocator) {
+    let _x = [a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
+fn vec_simple(a: &Allocator) {
+    let _x = vec![a.alloc(), a.alloc(), a.alloc(), a.alloc()];
+}
+
 fn run_test<F>(mut f: F)
     where F: FnMut(&Allocator)
 {
@@ -171,5 +195,17 @@ fn main() {
     run_test(|a| assignment1(a, false));
     run_test(|a| assignment1(a, true));
 
+    run_test(|a| array_simple(a));
+    run_test(|a| vec_simple(a));
+
+    run_test(|a| struct_dynamic_drop(a, false, false, false));
+    run_test(|a| struct_dynamic_drop(a, false, false, true));
+    run_test(|a| struct_dynamic_drop(a, false, true, false));
+    run_test(|a| struct_dynamic_drop(a, false, true, true));
+    run_test(|a| struct_dynamic_drop(a, true, false, false));
+    run_test(|a| struct_dynamic_drop(a, true, false, true));
+    run_test(|a| struct_dynamic_drop(a, true, true, false));
+    run_test(|a| struct_dynamic_drop(a, true, true, true));
+
     run_test_nopanic(|a| union1(a));
 }
diff --git a/src/test/run-pass/issue-41888.rs b/src/test/run-pass/issue-41888.rs
new file mode 100644 (file)
index 0000000..e145cde
--- /dev/null
@@ -0,0 +1,43 @@
+// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+fn main() { let _ = g(Some(E::F(K))); }
+
+type R = Result<(), ()>;
+struct K;
+
+enum E {
+    F(K), // must not be built-in type
+    #[allow(dead_code)]
+    G(Box<E>, Box<E>),
+}
+
+fn translate(x: R) -> R { x }
+
+fn g(mut status: Option<E>) -> R {
+    loop {
+        match status {
+            Some(infix_or_postfix) => match infix_or_postfix {
+                E::F(_op) => { // <- must be captured by value
+                    match Ok(()) {
+                        Err(err) => return Err(err),
+                        Ok(_) => {},
+                    };
+                }
+                _ => (),
+            },
+            _ => match translate(Err(())) {
+                Err(err) => return Err(err),
+                Ok(_) => {},
+            }
+        }
+        status = None;
+    }
+}
diff --git a/src/test/run-pass/issue-42210.rs b/src/test/run-pass/issue-42210.rs
new file mode 100644 (file)
index 0000000..ecdf78c
--- /dev/null
@@ -0,0 +1,29 @@
+// Copyright 2014 The Rust Project Developers. See the COPYRIGHT
+// file at the top-level directory of this distribution and at
+// http://rust-lang.org/COPYRIGHT.
+//
+// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
+// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
+// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
+// option. This file may not be copied, modified, or distributed
+// except according to those terms.
+
+// Regression test for #42210.
+
+// compile-flags: -g
+
+trait Foo {
+    fn foo() { }
+}
+
+struct Bar;
+
+trait Baz {
+}
+
+impl Foo for (Bar, Baz) { }
+
+
+fn main() {
+    <(Bar, Baz) as Foo>::foo()
+}
index 09117a4ca740452e0870a1adbd7c369f545e1ec0..6cf3b0bd35d5edab3182967d2b863d26526e62f7 100644 (file)
@@ -4,6 +4,9 @@ note: trace_macro
 14 |     println!("Hello, World!");
    |     ^^^^^^^^^^^^^^^^^^^^^^^^^^
    |
-   = note: expands to `println! { "Hello, World!" }`
-   = note: expands to `print! { concat ! ( "Hello, World!" , "/n" ) }`
+   = note: expanding `println! { "Hello, World!" }`
+   = note: to `print ! ( concat ! ( "Hello, World!" , "/n" ) )`
+   = note: expanding `print! { concat ! ( "Hello, World!" , "/n" ) }`
+   = note: to `$crate :: io :: _print ( format_args ! ( concat ! ( "Hello, World!" , "/n" ) )
+           )`
 
index 9fcdbb44fec18d10c086b6aba8143bc06a199761..82733b01471a2c62bb1cec966d888c52ff118914 160000 (submodule)
@@ -1 +1 @@
-Subproject commit 9fcdbb44fec18d10c086b6aba8143bc06a199761
+Subproject commit 82733b01471a2c62bb1cec966d888c52ff118914